var/home/core/zuul-output/0000755000175000017500000000000015067241106014527 5ustar corecorevar/home/core/zuul-output/logs/0000755000175000017500000000000015067262103015473 5ustar corecorevar/home/core/zuul-output/logs/kubelet.log0000644000000000000000003305774715067262073017725 0ustar rootrootOct 01 15:04:49 crc systemd[1]: Starting Kubernetes Kubelet... Oct 01 15:04:49 crc restorecon[4733]: Relabeled /var/lib/kubelet/config.json from system_u:object_r:unlabeled_t:s0 to system_u:object_r:container_var_lib_t:s0 Oct 01 15:04:49 crc restorecon[4733]: /var/lib/kubelet/device-plugins not reset as customized by admin to system_u:object_r:container_file_t:s0 Oct 01 15:04:49 crc restorecon[4733]: /var/lib/kubelet/device-plugins/kubelet.sock not reset as customized by admin to system_u:object_r:container_file_t:s0 Oct 01 15:04:49 crc restorecon[4733]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/volumes/kubernetes.io~configmap/nginx-conf/..2025_02_23_05_40_35.4114275528/nginx.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Oct 01 15:04:49 crc restorecon[4733]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Oct 01 15:04:49 crc restorecon[4733]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/containers/networking-console-plugin/22e96971 not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Oct 01 15:04:49 crc restorecon[4733]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/containers/networking-console-plugin/21c98286 not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Oct 01 15:04:49 crc restorecon[4733]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/containers/networking-console-plugin/0f1869e1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Oct 01 15:04:49 crc restorecon[4733]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c215,c682 Oct 01 15:04:49 crc restorecon[4733]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/setup/46889d52 not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Oct 01 15:04:49 crc restorecon[4733]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/setup/5b6a5969 not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c963 Oct 01 15:04:49 crc restorecon[4733]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/setup/6c7921f5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c215,c682 Oct 01 15:04:49 crc restorecon[4733]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/4804f443 not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Oct 01 15:04:49 crc restorecon[4733]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/2a46b283 not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Oct 01 15:04:49 crc restorecon[4733]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/a6b5573e not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Oct 01 15:04:49 crc restorecon[4733]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/4f88ee5b not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Oct 01 15:04:49 crc restorecon[4733]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/5a4eee4b not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c963 Oct 01 15:04:49 crc restorecon[4733]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/cd87c521 not reset as customized by admin to system_u:object_r:container_file_t:s0:c215,c682 Oct 01 15:04:49 crc restorecon[4733]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Oct 01 15:04:49 crc restorecon[4733]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_33_42.2574241751 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Oct 01 15:04:49 crc restorecon[4733]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_33_42.2574241751/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Oct 01 15:04:49 crc restorecon[4733]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Oct 01 15:04:49 crc restorecon[4733]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Oct 01 15:04:49 crc restorecon[4733]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Oct 01 15:04:49 crc restorecon[4733]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/38602af4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Oct 01 15:04:49 crc restorecon[4733]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/1483b002 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Oct 01 15:04:49 crc restorecon[4733]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/0346718b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Oct 01 15:04:49 crc restorecon[4733]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/d3ed4ada not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Oct 01 15:04:49 crc restorecon[4733]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/3bb473a5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Oct 01 15:04:49 crc restorecon[4733]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/8cd075a9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Oct 01 15:04:49 crc restorecon[4733]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/00ab4760 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Oct 01 15:04:49 crc restorecon[4733]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/54a21c09 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Oct 01 15:04:49 crc restorecon[4733]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c589,c726 Oct 01 15:04:49 crc restorecon[4733]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/70478888 not reset as customized by admin to system_u:object_r:container_file_t:s0:c176,c499 Oct 01 15:04:49 crc restorecon[4733]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/43802770 not reset as customized by admin to system_u:object_r:container_file_t:s0:c176,c499 Oct 01 15:04:49 crc restorecon[4733]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/955a0edc not reset as customized by admin to system_u:object_r:container_file_t:s0:c176,c499 Oct 01 15:04:49 crc restorecon[4733]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/bca2d009 not reset as customized by admin to system_u:object_r:container_file_t:s0:c140,c1009 Oct 01 15:04:49 crc restorecon[4733]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/b295f9bd not reset as customized by admin to system_u:object_r:container_file_t:s0:c589,c726 Oct 01 15:04:49 crc restorecon[4733]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Oct 01 15:04:49 crc restorecon[4733]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/..2025_02_23_05_21_22.3617465230 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Oct 01 15:04:49 crc restorecon[4733]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/..2025_02_23_05_21_22.3617465230/cnibincopy.sh not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Oct 01 15:04:49 crc restorecon[4733]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Oct 01 15:04:49 crc restorecon[4733]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/cnibincopy.sh not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Oct 01 15:04:49 crc restorecon[4733]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Oct 01 15:04:49 crc restorecon[4733]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/..2025_02_23_05_21_22.2050650026 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Oct 01 15:04:49 crc restorecon[4733]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/..2025_02_23_05_21_22.2050650026/allowlist.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Oct 01 15:04:49 crc restorecon[4733]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Oct 01 15:04:49 crc restorecon[4733]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/allowlist.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Oct 01 15:04:49 crc restorecon[4733]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Oct 01 15:04:49 crc restorecon[4733]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/egress-router-binary-copy/bc46ea27 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Oct 01 15:04:49 crc restorecon[4733]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/egress-router-binary-copy/5731fc1b not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Oct 01 15:04:49 crc restorecon[4733]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/egress-router-binary-copy/5e1b2a3c not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Oct 01 15:04:49 crc restorecon[4733]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/cni-plugins/943f0936 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Oct 01 15:04:49 crc restorecon[4733]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/cni-plugins/3f764ee4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Oct 01 15:04:49 crc restorecon[4733]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/cni-plugins/8695e3f9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Oct 01 15:04:49 crc restorecon[4733]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/bond-cni-plugin/aed7aa86 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Oct 01 15:04:49 crc restorecon[4733]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/bond-cni-plugin/c64d7448 not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Oct 01 15:04:49 crc restorecon[4733]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/bond-cni-plugin/0ba16bd2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Oct 01 15:04:49 crc restorecon[4733]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/routeoverride-cni/207a939f not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Oct 01 15:04:49 crc restorecon[4733]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/routeoverride-cni/54aa8cdb not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Oct 01 15:04:49 crc restorecon[4733]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/routeoverride-cni/1f5fa595 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Oct 01 15:04:49 crc restorecon[4733]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni-bincopy/bf9c8153 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Oct 01 15:04:49 crc restorecon[4733]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni-bincopy/47fba4ea not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Oct 01 15:04:49 crc restorecon[4733]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni-bincopy/7ae55ce9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Oct 01 15:04:49 crc restorecon[4733]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni/7906a268 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Oct 01 15:04:49 crc restorecon[4733]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni/ce43fa69 not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Oct 01 15:04:49 crc restorecon[4733]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni/7fc7ea3a not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Oct 01 15:04:49 crc restorecon[4733]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/kube-multus-additional-cni-plugins/d8c38b7d not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Oct 01 15:04:49 crc restorecon[4733]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/kube-multus-additional-cni-plugins/9ef015fb not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Oct 01 15:04:49 crc restorecon[4733]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/kube-multus-additional-cni-plugins/b9db6a41 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Oct 01 15:04:49 crc restorecon[4733]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c432,c991 Oct 01 15:04:49 crc restorecon[4733]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/network-metrics-daemon/b1733d79 not reset as customized by admin to system_u:object_r:container_file_t:s0:c476,c820 Oct 01 15:04:49 crc restorecon[4733]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/network-metrics-daemon/afccd338 not reset as customized by admin to system_u:object_r:container_file_t:s0:c272,c818 Oct 01 15:04:49 crc restorecon[4733]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/network-metrics-daemon/9df0a185 not reset as customized by admin to system_u:object_r:container_file_t:s0:c432,c991 Oct 01 15:04:49 crc restorecon[4733]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/kube-rbac-proxy/18938cf8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c476,c820 Oct 01 15:04:49 crc restorecon[4733]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/kube-rbac-proxy/7ab4eb23 not reset as customized by admin to system_u:object_r:container_file_t:s0:c272,c818 Oct 01 15:04:49 crc restorecon[4733]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/kube-rbac-proxy/56930be6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c432,c991 Oct 01 15:04:49 crc restorecon[4733]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/env-overrides not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Oct 01 15:04:49 crc restorecon[4733]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/env-overrides/..2025_02_23_05_21_35.630010865 not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Oct 01 15:04:49 crc restorecon[4733]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/env-overrides/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Oct 01 15:04:49 crc restorecon[4733]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Oct 01 15:04:49 crc restorecon[4733]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/..2025_02_23_05_21_35.1088506337 not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Oct 01 15:04:49 crc restorecon[4733]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/..2025_02_23_05_21_35.1088506337/ovnkube.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Oct 01 15:04:49 crc restorecon[4733]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Oct 01 15:04:49 crc restorecon[4733]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/ovnkube.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Oct 01 15:04:49 crc restorecon[4733]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Oct 01 15:04:49 crc restorecon[4733]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/kube-rbac-proxy/0d8e3722 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Oct 01 15:04:49 crc restorecon[4733]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/kube-rbac-proxy/d22b2e76 not reset as customized by admin to system_u:object_r:container_file_t:s0:c382,c850 Oct 01 15:04:49 crc restorecon[4733]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/kube-rbac-proxy/e036759f not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Oct 01 15:04:49 crc restorecon[4733]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/2734c483 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Oct 01 15:04:49 crc restorecon[4733]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/57878fe7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Oct 01 15:04:49 crc restorecon[4733]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/3f3c2e58 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Oct 01 15:04:49 crc restorecon[4733]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/375bec3e not reset as customized by admin to system_u:object_r:container_file_t:s0:c382,c850 Oct 01 15:04:49 crc restorecon[4733]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/7bc41e08 not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Oct 01 15:04:49 crc restorecon[4733]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 01 15:04:49 crc restorecon[4733]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/containers/download-server/48c7a72d not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 01 15:04:49 crc restorecon[4733]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/containers/download-server/4b66701f not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 01 15:04:49 crc restorecon[4733]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/containers/download-server/a5a1c202 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 01 15:04:49 crc restorecon[4733]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Oct 01 15:04:49 crc restorecon[4733]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..2025_02_23_05_21_40.3350632666 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Oct 01 15:04:49 crc restorecon[4733]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..2025_02_23_05_21_40.3350632666/additional-cert-acceptance-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Oct 01 15:04:49 crc restorecon[4733]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..2025_02_23_05_21_40.3350632666/additional-pod-admission-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Oct 01 15:04:49 crc restorecon[4733]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Oct 01 15:04:49 crc restorecon[4733]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/additional-cert-acceptance-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Oct 01 15:04:49 crc restorecon[4733]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/additional-pod-admission-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Oct 01 15:04:49 crc restorecon[4733]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/env-overrides not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Oct 01 15:04:49 crc restorecon[4733]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/env-overrides/..2025_02_23_05_21_40.1388695756 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Oct 01 15:04:49 crc restorecon[4733]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/env-overrides/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Oct 01 15:04:49 crc restorecon[4733]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Oct 01 15:04:49 crc restorecon[4733]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/webhook/26f3df5b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Oct 01 15:04:49 crc restorecon[4733]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/webhook/6d8fb21d not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Oct 01 15:04:49 crc restorecon[4733]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/webhook/50e94777 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Oct 01 15:04:49 crc restorecon[4733]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/208473b3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Oct 01 15:04:49 crc restorecon[4733]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/ec9e08ba not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Oct 01 15:04:49 crc restorecon[4733]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/3b787c39 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Oct 01 15:04:49 crc restorecon[4733]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/208eaed5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Oct 01 15:04:49 crc restorecon[4733]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/93aa3a2b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Oct 01 15:04:49 crc restorecon[4733]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/3c697968 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Oct 01 15:04:49 crc restorecon[4733]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Oct 01 15:04:49 crc restorecon[4733]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/containers/network-check-target-container/ba950ec9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Oct 01 15:04:49 crc restorecon[4733]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/containers/network-check-target-container/cb5cdb37 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Oct 01 15:04:49 crc restorecon[4733]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/containers/network-check-target-container/f2df9827 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Oct 01 15:04:49 crc restorecon[4733]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 01 15:04:49 crc restorecon[4733]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/..2025_02_23_05_22_30.473230615 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 01 15:04:49 crc restorecon[4733]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/..2025_02_23_05_22_30.473230615/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 01 15:04:49 crc restorecon[4733]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 01 15:04:49 crc restorecon[4733]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 01 15:04:49 crc restorecon[4733]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 01 15:04:49 crc restorecon[4733]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 01 15:04:49 crc restorecon[4733]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 01 15:04:49 crc restorecon[4733]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_24_06_22_02.1904938450 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 01 15:04:49 crc restorecon[4733]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_24_06_22_02.1904938450/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 01 15:04:49 crc restorecon[4733]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 01 15:04:49 crc restorecon[4733]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/machine-config-operator/fedaa673 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 01 15:04:49 crc restorecon[4733]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/machine-config-operator/9ca2df95 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 01 15:04:49 crc restorecon[4733]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/machine-config-operator/b2d7460e not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 01 15:04:49 crc restorecon[4733]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/kube-rbac-proxy/2207853c not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 01 15:04:49 crc restorecon[4733]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/kube-rbac-proxy/241c1c29 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 01 15:04:49 crc restorecon[4733]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/kube-rbac-proxy/2d910eaf not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 01 15:04:49 crc restorecon[4733]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Oct 01 15:04:49 crc restorecon[4733]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Oct 01 15:04:49 crc restorecon[4733]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/..2025_02_23_05_23_49.3726007728 not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Oct 01 15:04:49 crc restorecon[4733]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/..2025_02_23_05_23_49.3726007728/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Oct 01 15:04:49 crc restorecon[4733]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Oct 01 15:04:49 crc restorecon[4733]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Oct 01 15:04:49 crc restorecon[4733]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Oct 01 15:04:49 crc restorecon[4733]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/..2025_02_23_05_23_49.841175008 not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Oct 01 15:04:49 crc restorecon[4733]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/..2025_02_23_05_23_49.841175008/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Oct 01 15:04:49 crc restorecon[4733]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Oct 01 15:04:49 crc restorecon[4733]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Oct 01 15:04:49 crc restorecon[4733]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.843437178 not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Oct 01 15:04:49 crc restorecon[4733]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.843437178/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Oct 01 15:04:49 crc restorecon[4733]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Oct 01 15:04:49 crc restorecon[4733]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Oct 01 15:04:49 crc restorecon[4733]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Oct 01 15:04:49 crc restorecon[4733]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/c6c0f2e7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Oct 01 15:04:49 crc restorecon[4733]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/399edc97 not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Oct 01 15:04:49 crc restorecon[4733]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/8049f7cc not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Oct 01 15:04:49 crc restorecon[4733]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/0cec5484 not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Oct 01 15:04:49 crc restorecon[4733]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/312446d0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c406,c828 Oct 01 15:04:49 crc restorecon[4733]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/8e56a35d not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Oct 01 15:04:49 crc restorecon[4733]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Oct 01 15:04:49 crc restorecon[4733]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.133159589 not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Oct 01 15:04:49 crc restorecon[4733]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.133159589/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Oct 01 15:04:49 crc restorecon[4733]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Oct 01 15:04:49 crc restorecon[4733]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Oct 01 15:04:49 crc restorecon[4733]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Oct 01 15:04:49 crc restorecon[4733]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/2d30ddb9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c380,c909 Oct 01 15:04:49 crc restorecon[4733]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/eca8053d not reset as customized by admin to system_u:object_r:container_file_t:s0:c380,c909 Oct 01 15:04:49 crc restorecon[4733]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/c3a25c9a not reset as customized by admin to system_u:object_r:container_file_t:s0:c168,c522 Oct 01 15:04:49 crc restorecon[4733]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/b9609c22 not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Oct 01 15:04:49 crc restorecon[4733]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c968,c969 Oct 01 15:04:49 crc restorecon[4733]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/dns-operator/e8b0eca9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c106,c418 Oct 01 15:04:49 crc restorecon[4733]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/dns-operator/b36a9c3f not reset as customized by admin to system_u:object_r:container_file_t:s0:c529,c711 Oct 01 15:04:49 crc restorecon[4733]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/dns-operator/38af7b07 not reset as customized by admin to system_u:object_r:container_file_t:s0:c968,c969 Oct 01 15:04:49 crc restorecon[4733]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/kube-rbac-proxy/ae821620 not reset as customized by admin to system_u:object_r:container_file_t:s0:c106,c418 Oct 01 15:04:49 crc restorecon[4733]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/kube-rbac-proxy/baa23338 not reset as customized by admin to system_u:object_r:container_file_t:s0:c529,c711 Oct 01 15:04:49 crc restorecon[4733]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/kube-rbac-proxy/2c534809 not reset as customized by admin to system_u:object_r:container_file_t:s0:c968,c969 Oct 01 15:04:49 crc restorecon[4733]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Oct 01 15:04:49 crc restorecon[4733]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3532625537 not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Oct 01 15:04:49 crc restorecon[4733]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3532625537/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Oct 01 15:04:49 crc restorecon[4733]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Oct 01 15:04:49 crc restorecon[4733]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Oct 01 15:04:49 crc restorecon[4733]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Oct 01 15:04:49 crc restorecon[4733]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/59b29eae not reset as customized by admin to system_u:object_r:container_file_t:s0:c338,c381 Oct 01 15:04:49 crc restorecon[4733]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/c91a8e4f not reset as customized by admin to system_u:object_r:container_file_t:s0:c338,c381 Oct 01 15:04:49 crc restorecon[4733]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/4d87494a not reset as customized by admin to system_u:object_r:container_file_t:s0:c442,c857 Oct 01 15:04:49 crc restorecon[4733]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/1e33ca63 not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Oct 01 15:04:49 crc restorecon[4733]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 01 15:04:49 crc restorecon[4733]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/kube-rbac-proxy/8dea7be2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 01 15:04:49 crc restorecon[4733]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/kube-rbac-proxy/d0b04a99 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 01 15:04:49 crc restorecon[4733]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/kube-rbac-proxy/d84f01e7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 01 15:04:49 crc restorecon[4733]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/package-server-manager/4109059b not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 01 15:04:49 crc restorecon[4733]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/package-server-manager/a7258a3e not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 01 15:04:49 crc restorecon[4733]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/package-server-manager/05bdf2b6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 01 15:04:49 crc restorecon[4733]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 01 15:04:49 crc restorecon[4733]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/f3261b51 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 01 15:04:49 crc restorecon[4733]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/315d045e not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 01 15:04:49 crc restorecon[4733]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/5fdcf278 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 01 15:04:49 crc restorecon[4733]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/d053f757 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 01 15:04:49 crc restorecon[4733]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/c2850dc7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 01 15:04:49 crc restorecon[4733]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:49 crc restorecon[4733]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/..2025_02_23_05_22_30.2390596521 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:49 crc restorecon[4733]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/..2025_02_23_05_22_30.2390596521/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:49 crc restorecon[4733]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:49 crc restorecon[4733]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:49 crc restorecon[4733]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:49 crc restorecon[4733]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/fcfb0b2b not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:49 crc restorecon[4733]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/c7ac9b7d not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:49 crc restorecon[4733]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/fa0c0d52 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:49 crc restorecon[4733]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/c609b6ba not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:49 crc restorecon[4733]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/2be6c296 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:49 crc restorecon[4733]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/89a32653 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:49 crc restorecon[4733]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/4eb9afeb not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:49 crc restorecon[4733]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/13af6efa not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:49 crc restorecon[4733]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 01 15:04:49 crc restorecon[4733]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/containers/olm-operator/b03f9724 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 01 15:04:49 crc restorecon[4733]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/containers/olm-operator/e3d105cc not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 01 15:04:49 crc restorecon[4733]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/containers/olm-operator/3aed4d83 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1906041176 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1906041176/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/0765fa6e not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/2cefc627 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/3dcc6345 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/365af391 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-SelfManagedHA-Default.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-SelfManagedHA-TechPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-SelfManagedHA-DevPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-Hypershift-TechPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-Hypershift-DevPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-Hypershift-Default.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-api/b1130c0f not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-api/236a5913 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-api/b9432e26 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/5ddb0e3f not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/986dc4fd not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/8a23ff9a not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/9728ae68 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/665f31d0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1255385357 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1255385357/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_23_57.573792656 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_23_57.573792656/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_23_05_22_30.3254245399 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_23_05_22_30.3254245399/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/136c9b42 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/98a1575b not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/cac69136 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/5deb77a7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/2ae53400 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3608339744 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3608339744/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/e46f2326 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/dc688d3c not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/3497c3cd not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/177eb008 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3819292994 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3819292994/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/af5a2afa not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/d780cb1f not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/49b0f374 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/26fbb125 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.3244779536 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.3244779536/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/cf14125a not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/b7f86972 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/e51d739c not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/88ba6a69 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/669a9acf not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/5cd51231 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/75349ec7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/15c26839 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/45023dcd not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/2bb66a50 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/kube-rbac-proxy/64d03bdd not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/kube-rbac-proxy/ab8e7ca0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/kube-rbac-proxy/bb9be25f not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.2034221258 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.2034221258/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/containers/cluster-image-registry-operator/9a0b61d3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/containers/cluster-image-registry-operator/d471b9d2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/containers/cluster-image-registry-operator/8cb76b8e not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/containers/catalog-operator/11a00840 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/containers/catalog-operator/ec355a92 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/containers/catalog-operator/992f735e not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1782968797 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1782968797/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/d59cdbbc not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/72133ff0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/c56c834c not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/d13724c7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/0a498258 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/containers/machine-config-server/fa471982 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/containers/machine-config-server/fc900d92 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/containers/machine-config-server/fa7d68da not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/migrator/4bacf9b4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/migrator/424021b1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/migrator/fc2e31a3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/graceful-termination/f51eefac not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/graceful-termination/c8997f2f not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/graceful-termination/7481f599 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/..2025_02_23_05_22_49.2255460704 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/..2025_02_23_05_22_49.2255460704/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/fdafea19 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/d0e1c571 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/ee398915 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/682bb6b8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/setup/a3e67855 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/setup/a989f289 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/setup/915431bd not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-ensure-env-vars/7796fdab not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-ensure-env-vars/dcdb5f19 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-ensure-env-vars/a3aaa88c not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-resources-copy/5508e3e6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-resources-copy/160585de not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-resources-copy/e99f8da3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcdctl/8bc85570 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcdctl/a5861c91 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcdctl/84db1135 not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd/9e1a6043 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd/c1aba1c2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd/d55ccd6d not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-metrics/971cc9f6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-metrics/8f2e3dcf not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-metrics/ceb35e9c not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-readyz/1c192745 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-readyz/5209e501 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-readyz/f83de4df not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-rev/e7b978ac not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-rev/c64304a1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-rev/5384386b not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c268,c620 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/multus-admission-controller/cce3e3ff not reset as customized by admin to system_u:object_r:container_file_t:s0:c435,c756 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/multus-admission-controller/8fb75465 not reset as customized by admin to system_u:object_r:container_file_t:s0:c268,c620 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/kube-rbac-proxy/740f573e not reset as customized by admin to system_u:object_r:container_file_t:s0:c435,c756 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/kube-rbac-proxy/32fd1134 not reset as customized by admin to system_u:object_r:container_file_t:s0:c268,c620 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/containers/serve-healthcheck-canary/0a861bd3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/containers/serve-healthcheck-canary/80363026 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/containers/serve-healthcheck-canary/bfa952a8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_23_05_33_31.2122464563 not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_23_05_33_31.2122464563/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/config/..2025_02_23_05_33_31.333075221 not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/kube-rbac-proxy/793bf43d not reset as customized by admin to system_u:object_r:container_file_t:s0:c381,c387 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/kube-rbac-proxy/7db1bb6e not reset as customized by admin to system_u:object_r:container_file_t:s0:c142,c438 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/kube-rbac-proxy/4f6a0368 not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/c12c7d86 not reset as customized by admin to system_u:object_r:container_file_t:s0:c381,c387 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/36c4a773 not reset as customized by admin to system_u:object_r:container_file_t:s0:c142,c438 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/4c1e98ae not reset as customized by admin to system_u:object_r:container_file_t:s0:c142,c438 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/a4c8115c not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/setup/7db1802e not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver/a008a7ab not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-cert-syncer/2c836bac not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-cert-regeneration-controller/0ce62299 not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-insecure-readyz/945d2457 not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-check-endpoints/7d5c1dd8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/advanced-cluster-management not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/advanced-cluster-management/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-broker-rhel8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-broker-rhel8/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-online not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-online/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams-console not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams-console/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq7-interconnect-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq7-interconnect-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-automation-platform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-automation-platform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-cloud-addons-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-cloud-addons-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry-3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry-3/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-load-balancer-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-load-balancer-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-businessautomation-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-businessautomation-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-kogito-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-kogito-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator/index.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/businessautomation-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/businessautomation-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cephcsi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cephcsi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cincinnati-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cincinnati-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-kube-descheduler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-kube-descheduler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-logging not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-logging/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-observability-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-observability-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/compliance-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/compliance-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/container-security-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/container-security-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/costmanagement-metrics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/costmanagement-metrics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cryostat-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cryostat-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datagrid not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datagrid/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devspaces not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devspaces/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devworkspace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devworkspace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dpu-network-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dpu-network-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eap not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eap/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-dns-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-dns-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/file-integrity-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/file-integrity-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-apicurito not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-apicurito/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-console not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-console/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-online not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-online/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gatekeeper-operator-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gatekeeper-operator-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jws-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jws-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management-hub not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management-hub/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kiali-ossm not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kiali-ossm/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubevirt-hyperconverged not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubevirt-hyperconverged/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logic-operator-rhel8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logic-operator-rhel8/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lvms-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lvms-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mcg-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mcg-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mta-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mta-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtr-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtr-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-engine not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-engine/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-observability-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-observability-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-client-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-client-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-csi-addons-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-csi-addons-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-multicluster-orchestrator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-multicluster-orchestrator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-prometheus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-prometheus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-cluster-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-cluster-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-hub-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-hub-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator/bundle-v1.15.0.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator/channel.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator/package.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-custom-metrics-autoscaler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-custom-metrics-autoscaler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-gitops-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-gitops-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-pipelines-operator-rh not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-pipelines-operator-rh/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-secondary-scheduler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-secondary-scheduler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-bridge-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-bridge-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/recipe not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/recipe/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-camel-k not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-camel-k/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-hawtio-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-hawtio-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redhat-oadp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redhat-oadp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rh-service-binding-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rh-service-binding-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhacs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhacs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhbk-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhbk-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhdh not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhdh/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-prometheus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-prometheus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhpam-kogito-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhpam-kogito-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhsso-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhsso-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rook-ceph-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rook-ceph-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/run-once-duration-override-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/run-once-duration-override-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sandboxed-containers-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sandboxed-containers-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/security-profiles-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/security-profiles-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/serverless-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/serverless-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-registry-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-registry-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator3/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/submariner not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/submariner/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tang-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tang-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustee-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustee-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volsync-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volsync-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/web-terminal not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/web-terminal/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-utilities/bc8d0691 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-utilities/6b76097a not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-utilities/34d1af30 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-content/312ba61c not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-content/645d5dd1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-content/16e825f0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/registry-server/4cf51fc9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/registry-server/2a23d348 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/registry-server/075dbd49 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/containers/node-ca/dd585ddd not reset as customized by admin to system_u:object_r:container_file_t:s0:c377,c642 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/containers/node-ca/17ebd0ab not reset as customized by admin to system_u:object_r:container_file_t:s0:c338,c343 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/containers/node-ca/005579f4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_23_05_23_11.449897510 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_23_05_23_11.449897510/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_23_05_23_11.1287037894 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/..2025_02_23_05_23_11.1301053334 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/..2025_02_23_05_23_11.1301053334/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/fix-audit-permissions/bf5f3b9c not reset as customized by admin to system_u:object_r:container_file_t:s0:c49,c263 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/fix-audit-permissions/af276eb7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c701 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/fix-audit-permissions/ea28e322 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/oauth-apiserver/692e6683 not reset as customized by admin to system_u:object_r:container_file_t:s0:c49,c263 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/oauth-apiserver/871746a7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c701 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/oauth-apiserver/4eb2e958 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/..2025_02_24_06_09_06.2875086261 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/..2025_02_24_06_09_06.2875086261/console-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/console-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_09_06.286118152 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_09_06.286118152/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/..2025_02_24_06_09_06.3865795478 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/..2025_02_24_06_09_06.3865795478/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/..2025_02_24_06_09_06.584414814 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/..2025_02_24_06_09_06.584414814/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/containers/console/ca9b62da not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/containers/console/0edd6fce not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837 not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/openshift-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/openshift-controller-manager.openshift-global-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/openshift-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/openshift-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/openshift-controller-manager.openshift-global-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/openshift-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.1071801880 not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.1071801880/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/..2025_02_24_06_20_07.2494444877 not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/..2025_02_24_06_20_07.2494444877/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/containers/controller-manager/89b4555f not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/..2025_02_23_05_23_22.4071100442 not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/..2025_02_23_05_23_22.4071100442/Corefile not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/Corefile not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/dns/655fcd71 not reset as customized by admin to system_u:object_r:container_file_t:s0:c457,c841 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/dns/0d43c002 not reset as customized by admin to system_u:object_r:container_file_t:s0:c55,c1022 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/dns/e68efd17 not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/kube-rbac-proxy/9acf9b65 not reset as customized by admin to system_u:object_r:container_file_t:s0:c457,c841 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/kube-rbac-proxy/5ae3ff11 not reset as customized by admin to system_u:object_r:container_file_t:s0:c55,c1022 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/kube-rbac-proxy/1e59206a not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/containers/dns-node-resolver/27af16d1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c304,c1017 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/containers/dns-node-resolver/7918e729 not reset as customized by admin to system_u:object_r:container_file_t:s0:c853,c893 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/containers/dns-node-resolver/5d976d0e not reset as customized by admin to system_u:object_r:container_file_t:s0:c585,c981 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/..2025_02_23_05_38_56.1112187283 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/..2025_02_23_05_38_56.1112187283/controller-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/controller-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_38_56.2839772658 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_38_56.2839772658/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/d7f55cbb not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/f0812073 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/1a56cbeb not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/7fdd437e not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/cdfb5652 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_24_06_17_29.3844392896 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_24_06_17_29.3844392896/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/..2025_02_24_06_17_29.848549803 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/..2025_02_24_06_17_29.848549803/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/..2025_02_24_06_17_29.780046231 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/..2025_02_24_06_17_29.780046231/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_17_29.2729721485 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_17_29.2729721485/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/containers/fix-audit-permissions/fb93119e not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/containers/openshift-apiserver/f1e8fc0e not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/containers/openshift-apiserver-check-endpoints/218511f3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes/kubernetes.io~empty-dir/tmpfs not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes/kubernetes.io~empty-dir/tmpfs/k8s-webhook-server not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes/kubernetes.io~empty-dir/tmpfs/k8s-webhook-server/serving-certs not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/containers/packageserver/ca8af7b3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/containers/packageserver/72cc8a75 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/containers/packageserver/6e8a3760 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/..2025_02_23_05_27_30.557428972 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/..2025_02_23_05_27_30.557428972/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/4c3455c0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/2278acb0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/4b453e4f not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/3ec09bda not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_24_06_25_03.422633132 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_24_06_25_03.422633132/anchors not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_24_06_25_03.422633132/anchors/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/anchors not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/edk2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/edk2/cacerts.bin not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/java not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/java/cacerts not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/openssl not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/openssl/ca-bundle.trust.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/email-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/objsign-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2ae6433e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fde84897.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/75680d2e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/openshift-service-serving-signer_1740288168.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/facfc4fa.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8f5a969c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CFCA_EV_ROOT.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9ef4a08a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ingress-operator_1740288202.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2f332aed.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/248c8271.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d10a21f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ACCVRAIZ1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a94d09e5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3c9a4d3b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/40193066.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AC_RAIZ_FNMT-RCM.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cd8c0d63.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b936d1c6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CA_Disig_Root_R2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4fd49c6c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AC_RAIZ_FNMT-RCM_SERVIDORES_SEGUROS.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b81b93f0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f9a69fa.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certigna.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b30d5fda.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ANF_Secure_Server_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b433981b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/93851c9e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9282e51c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e7dd1bc4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Actalis_Authentication_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/930ac5d2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f47b495.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e113c810.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5931b5bc.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Commercial.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2b349938.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e48193cf.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/302904dd.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a716d4ed.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Networking.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/93bc0acc.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/86212b19.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certigna_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Premium.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b727005e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dbc54cab.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f51bb24c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c28a8a30.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Premium_ECC.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9c8dfbd4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ccc52f49.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cb1c3204.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ce5e74ef.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fd08c599.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_Trusted_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6d41d539.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fb5fa911.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e35234b1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8cb5ee0f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a7c655d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f8fc53da.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/de6d66f3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d41b5e2a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/41a3f684.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1df5a75f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Atos_TrustedRoot_2011.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e36a6752.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b872f2b4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9576d26b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/228f89db.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Atos_TrustedRoot_Root_CA_ECC_TLS_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fb717492.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2d21b73c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0b1b94ef.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/595e996b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Atos_TrustedRoot_Root_CA_RSA_TLS_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9b46e03d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/128f4b91.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Buypass_Class_3_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/81f2d2b1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Autoridad_de_Certificacion_Firmaprofesional_CIF_A62634068.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3bde41ac.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d16a5865.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_EC-384_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/BJCA_Global_Root_CA1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0179095f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ffa7f1eb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9482e63a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d4dae3dd.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/BJCA_Global_Root_CA2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3e359ba6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7e067d03.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/95aff9e3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d7746a63.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Baltimore_CyberTrust_Root.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/653b494a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3ad48a91.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_Trusted_Network_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Buypass_Class_2_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/54657681.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/82223c44.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e8de2f56.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2d9dafe4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d96b65e2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ee64a828.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/COMODO_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/40547a79.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5a3f0ff8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a780d93.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/34d996fb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/COMODO_ECC_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/eed8c118.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/89c02a45.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certainly_Root_R1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b1159c4c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/COMODO_RSA_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d6325660.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d4c339cb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8312c4c1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certainly_Root_E1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8508e720.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5fdd185d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/48bec511.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/69105f4f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0b9bc432.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_Trusted_Network_CA_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/32888f65.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_ECC_Root-01.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6b03dec0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/219d9499.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_ECC_Root-02.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5acf816d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cbf06781.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_RSA_Root-01.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dc99f41e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_RSA_Root-02.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AAA_Certificate_Services.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/985c1f52.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8794b4e3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_BR_Root_CA_1_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e7c037b4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ef954a4e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_EV_Root_CA_1_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2add47b6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/90c5a3c8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_Root_Class_3_CA_2_2009.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b0f3e76e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/53a1b57a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_Root_Class_3_CA_2_EV_2009.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Assured_ID_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5ad8a5d6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/68dd7389.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Assured_ID_Root_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9d04f354.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d6437c3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/062cdee6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bd43e1dd.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Assured_ID_Root_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7f3d5d1d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c491639e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign_Root_E46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Global_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3513523f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/399e7759.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/feffd413.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d18e9066.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Global_Root_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/607986c7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c90bc37d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1b0f7e5c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e08bfd1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Global_Root_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dd8e9d41.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ed39abd0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a3418fda.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bc3f2570.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_High_Assurance_EV_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/244b5494.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/81b9768f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4be590e0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_TLS_ECC_P384_Root_G5.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9846683b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/252252d2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e8e7201.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ISRG_Root_X1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_TLS_RSA4096_Root_G5.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d52c538d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c44cc0c0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign_Root_R46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Trusted_Root_G4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/75d1b2ed.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a2c66da8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ecccd8db.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust.net_Certification_Authority__2048_.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/aee5f10d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3e7271e8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b0e59380.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4c3982f2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6b99d060.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bf64f35b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0a775a30.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/002c0b4f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cc450945.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority_-_EC1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/106f3e4d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b3fb433b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4042bcee.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/02265526.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/455f1b52.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0d69c7e1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9f727ac7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority_-_G4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5e98733a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f0cd152c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dc4d6a89.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6187b673.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/FIRMAPROFESIONAL_CA_ROOT-A_WEB.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ba8887ce.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/068570d1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f081611a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/48a195d8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GDCA_TrustAUTH_R5_ROOT.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0f6fa695.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ab59055e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b92fd57f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GLOBALTRUST_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fa5da96b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1ec40989.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7719f463.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1001acf7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f013ecaf.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/626dceaf.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c559d742.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1d3472b9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9479c8c3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a81e292b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4bfab552.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Go_Daddy_Class_2_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Sectigo_Public_Server_Authentication_Root_E46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Go_Daddy_Root_Certificate_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e071171e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/57bcb2da.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/HARICA_TLS_ECC_Root_CA_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ab5346f4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5046c355.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/HARICA_TLS_RSA_Root_CA_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/865fbdf9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/da0cfd1d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/85cde254.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Hellenic_Academic_and_Research_Institutions_ECC_RootCA_2015.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cbb3f32b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SecureSign_RootCA11.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Hellenic_Academic_and_Research_Institutions_RootCA_2015.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5860aaa6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/31188b5e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/HiPKI_Root_CA_-_G1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c7f1359b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f15c80c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Hongkong_Post_Root_CA_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/09789157.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ISRG_Root_X2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/18856ac4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e09d511.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/IdenTrust_Commercial_Root_CA_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cf701eeb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d06393bb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/IdenTrust_Public_Sector_Root_CA_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/10531352.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Izenpe.com.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SecureTrust_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b0ed035a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Microsec_e-Szigno_Root_CA_2009.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8160b96c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e8651083.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2c63f966.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Security_Communication_RootCA2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Microsoft_ECC_Root_Certificate_Authority_2017.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d89cda1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/01419da9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_TLS_RSA_Root_CA_2022.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b7a5b843.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Microsoft_RSA_Root_Certificate_Authority_2017.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bf53fb88.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9591a472.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3afde786.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SwissSign_Gold_CA_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/NAVER_Global_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3fb36b73.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d39b0a2c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a89d74c2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cd58d51e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b7db1890.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/NetLock_Arany__Class_Gold__F__tan__s__tv__ny.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/988a38cb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/60afe812.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f39fc864.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5443e9e3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/OISTE_WISeKey_Global_Root_GB_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e73d606e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dfc0fe80.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b66938e9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e1eab7c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/OISTE_WISeKey_Global_Root_GC_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/773e07ad.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3c899c73.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d59297b8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ddcda989.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_1_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/749e9e03.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/52b525c7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Security_Communication_RootCA3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d7e8dc79.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a819ef2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/08063a00.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6b483515.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_2_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/064e0aa9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1f58a078.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6f7454b3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7fa05551.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/76faf6c0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9339512a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f387163d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ee37c333.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_3_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e18bfb83.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e442e424.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fe8a2cd8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/23f4c490.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5cd81ad7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_EV_Root_Certification_Authority_ECC.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f0c70a8d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7892ad52.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SZAFIR_ROOT_CA2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4f316efb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_EV_Root_Certification_Authority_RSA_R2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/06dc52d5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/583d0756.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Sectigo_Public_Server_Authentication_Root_R46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_Root_Certification_Authority_ECC.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0bf05006.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/88950faa.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9046744a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3c860d51.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_Root_Certification_Authority_RSA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6fa5da56.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/33ee480d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Secure_Global_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/63a2c897.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_TLS_ECC_Root_CA_2022.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bdacca6f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ff34af3f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dbff3a01.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Security_Communication_ECC_RootCA1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_Root_CA_-_C1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Starfield_Class_2_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/406c9bb1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Starfield_Root_Certificate_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_ECC_Root_CA_-_C3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Starfield_Services_Root_Certificate_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SwissSign_Silver_CA_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/99e1b953.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/T-TeleSec_GlobalRoot_Class_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/vTrus_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/T-TeleSec_GlobalRoot_Class_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/14bc7599.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TUBITAK_Kamu_SM_SSL_Kok_Sertifikasi_-_Surum_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TWCA_Global_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a3adc42.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TWCA_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f459871d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Telekom_Security_TLS_ECC_Root_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_Root_CA_-_G1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Telekom_Security_TLS_RSA_Root_2023.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TeliaSonera_Root_CA_v1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Telia_Root_CA_v2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8f103249.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f058632f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ca-certificates.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TrustAsia_Global_Root_CA_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9bf03295.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/98aaf404.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TrustAsia_Global_Root_CA_G4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1cef98f5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/073bfcc5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2923b3f9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Trustwave_Global_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f249de83.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/edcbddb5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_ECC_Root_CA_-_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Trustwave_Global_ECC_P256_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9b5697b0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1ae85e5e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b74d2bd5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Trustwave_Global_ECC_P384_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d887a5bb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9aef356c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TunTrust_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fd64f3fc.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e13665f9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/UCA_Extended_Validation_Root.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0f5dc4f3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/da7377f6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/UCA_Global_G2_Root.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c01eb047.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/304d27c3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ed858448.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/USERTrust_ECC_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f30dd6ad.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/04f60c28.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/vTrus_ECC_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/USERTrust_RSA_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fc5a8f99.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/35105088.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ee532fd5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/XRamp_Global_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/706f604c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/76579174.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/certSIGN_ROOT_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d86cdd1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/882de061.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/certSIGN_ROOT_CA_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f618aec.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a9d40e02.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e-Szigno_Root_CA_2017.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e868b802.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/83e9984f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ePKI_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ca6e4ad9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9d6523ce.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4b718d9b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/869fbf79.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/containers/registry/f8d22bdb not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator/6e8bbfac not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator/54dd7996 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator/a4f1bb05 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator-watch/207129da not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator-watch/c1df39e1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator-watch/15b8f1cd not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3523263858 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3523263858/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/..2025_02_23_05_27_49.3256605594 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/..2025_02_23_05_27_49.3256605594/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/kube-rbac-proxy/77bd6913 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/kube-rbac-proxy/2382c1b1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/kube-rbac-proxy/704ce128 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/machine-api-operator/70d16fe0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/machine-api-operator/bfb95535 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/machine-api-operator/57a8e8e2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3413793711 not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3413793711/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/containers/kube-apiserver-operator/1b9d3e5e not reset as customized by admin to system_u:object_r:container_file_t:s0:c107,c917 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/containers/kube-apiserver-operator/fddb173c not reset as customized by admin to system_u:object_r:container_file_t:s0:c202,c983 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/containers/kube-apiserver-operator/95d3c6c4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/containers/check-endpoints/bfb5fff5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/containers/check-endpoints/2aef40aa not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/containers/check-endpoints/c0391cad not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager/1119e69d not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager/660608b4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager/8220bd53 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/cluster-policy-controller/85f99d5c not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/cluster-policy-controller/4b0225f6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-cert-syncer/9c2a3394 not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-cert-syncer/e820b243 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-recovery-controller/1ca52ea0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-recovery-controller/e6988e45 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/..2025_02_24_06_09_21.2517297950 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/..2025_02_24_06_09_21.2517297950/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/machine-config-controller/6655f00b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/machine-config-controller/98bc3986 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/machine-config-controller/08e3458a not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/kube-rbac-proxy/2a191cb0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/kube-rbac-proxy/6c4eeefb not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/kube-rbac-proxy/f61a549c not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/hostpath-provisioner/24891863 not reset as customized by admin to system_u:object_r:container_file_t:s0:c37,c572 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/hostpath-provisioner/fbdfd89c not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/liveness-probe/9b63b3bc not reset as customized by admin to system_u:object_r:container_file_t:s0:c37,c572 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/liveness-probe/8acde6d6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/node-driver-registrar/59ecbba3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/csi-provisioner/685d4be3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300/openshift-route-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300/openshift-route-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/openshift-route-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/openshift-route-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.2950937851 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.2950937851/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/containers/route-controller-manager/feaea55e not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abinitio-runtime-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abinitio-runtime-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/accuknox-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/accuknox-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aci-containers-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aci-containers-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airlock-microgateway not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airlock-microgateway/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ako-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ako-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloy not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloy/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anchore-engine not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anchore-engine/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-cloud-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-cloud-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-dcap-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-dcap-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cfm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cfm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium-enterprise not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium-enterprise/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloud-native-postgresql not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloud-native-postgresql/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudera-streams-messaging-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudera-streams-messaging-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudnative-pg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudnative-pg/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cnfv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cnfv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/conjur-follower-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/conjur-follower-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/coroot-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/coroot-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cte-k8s-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cte-k8s-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-deploy-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-deploy-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-release-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-release-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edb-hcp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edb-hcp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-eck-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-eck-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/federatorai-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/federatorai-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fujitsu-enterprise-postgres-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fujitsu-enterprise-postgres-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/function-mesh not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/function-mesh/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/harness-gitops-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/harness-gitops-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hcp-terraform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hcp-terraform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hpe-ezmeral-csi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hpe-ezmeral-csi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-application-gateway-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-application-gateway-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-directory-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-directory-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-dr-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-dr-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-licensing-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-licensing-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-sds-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-sds-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infrastructure-asset-orchestrator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infrastructure-asset-orchestrator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-device-plugins-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-device-plugins-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-kubernetes-power-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-kubernetes-power-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-openshift-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-openshift-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8s-triliovault not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8s-triliovault/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-ati-updates not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-ati-updates/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-framework not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-framework/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-ingress not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-ingress/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-licensing not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-licensing/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-sso not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-sso/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-keycloak-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-keycloak-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-load-core not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-load-core/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-loadcore-agents not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-loadcore-agents/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nats-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nats-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nimbusmosaic-dusim not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nimbusmosaic-dusim/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-rest-api-browser-v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-rest-api-browser-v1/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-appsec not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-appsec/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-core not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-core/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-db/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-diagnostics not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-diagnostics/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-logging not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-logging/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-migration not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-migration/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-msg-broker not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-msg-broker/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-notifications not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-notifications/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-stats-dashboards not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-stats-dashboards/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-storage not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-storage/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-test-core not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-test-core/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-ui not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-ui/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-websocket-service not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-websocket-service/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kong-gateway-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kong-gateway-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubearmor-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubearmor-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lenovo-locd-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lenovo-locd-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memcached-operator-ogaye not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memcached-operator-ogaye/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memory-machine-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memory-machine-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-enterprise not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-enterprise/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netapp-spark-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netapp-spark-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-adm-agent-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-adm-agent-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-repository-ha-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-repository-ha-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nginx-ingress-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nginx-ingress-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nim-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nim-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxiq-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxiq-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxrm-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxrm-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odigos-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odigos-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/open-liberty-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/open-liberty-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftartifactoryha-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftartifactoryha-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftxray-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftxray-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/operator-certification-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/operator-certification-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pmem-csi-operator-os not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pmem-csi-operator-os/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-component-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-component-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-fabric-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-fabric-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sanstoragecsi-operator-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sanstoragecsi-operator-bundle/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/smilecdr-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/smilecdr-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sriov-fec not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sriov-fec/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-commons-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-commons-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-zookeeper-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-zookeeper-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-tsc-client-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-tsc-client-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tawon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tawon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tigera-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tigera-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-secrets-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-secrets-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vcp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vcp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/webotx-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/webotx-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-utilities/63709497 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-utilities/d966b7fd not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-utilities/f5773757 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-content/81c9edb9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-content/57bf57ee not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-content/86f5e6aa not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/registry-server/0aabe31d not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/registry-server/d2af85c2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/registry-server/09d157d9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acm-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acm-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acmpca-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acmpca-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigateway-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigateway-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigatewayv2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigatewayv2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-applicationautoscaling-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-applicationautoscaling-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-athena-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-athena-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudfront-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudfront-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudtrail-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudtrail-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatch-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatch-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatchlogs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatchlogs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-documentdb-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-documentdb-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-dynamodb-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-dynamodb-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ec2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ec2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecr-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecr-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-efs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-efs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eks-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eks-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elasticache-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elasticache-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elbv2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elbv2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-emrcontainers-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-emrcontainers-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eventbridge-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eventbridge-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-iam-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-iam-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kafka-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kafka-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-keyspaces-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-keyspaces-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kinesis-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kinesis-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kms-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kms-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-lambda-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-lambda-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-memorydb-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-memorydb-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-mq-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-mq-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-networkfirewall-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-networkfirewall-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-opensearchservice-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-opensearchservice-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-organizations-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-organizations-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-pipes-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-pipes-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-prometheusservice-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-prometheusservice-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-rds-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-rds-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-recyclebin-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-recyclebin-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53resolver-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53resolver-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-s3-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-s3-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sagemaker-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sagemaker-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-secretsmanager-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-secretsmanager-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ses-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ses-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sfn-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sfn-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sns-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sns-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sqs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sqs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ssm-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ssm-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-wafv2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-wafv2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airflow-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airflow-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloydb-omni-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloydb-omni-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alvearie-imaging-ingestion not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alvearie-imaging-ingestion/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amd-gpu-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amd-gpu-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/analytics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/analytics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/annotationlab not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/annotationlab/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-api-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-api-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurito not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurito/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apimatic-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apimatic-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/application-services-metering-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/application-services-metering-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/argocd-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/argocd-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/assisted-service-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/assisted-service-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/automotive-infra not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/automotive-infra/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-efs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-efs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/awss3-operator-registry not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/awss3-operator-registry/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/azure-service-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/azure-service-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/beegfs-csi-driver-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/beegfs-csi-driver-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-k not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-k/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-karavan-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-karavan-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator-community not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator-community/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-utils-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-utils-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-aas-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-aas-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-impairment-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-impairment-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/codeflare-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/codeflare-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-kubevirt-hyperconverged not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-kubevirt-hyperconverged/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-trivy-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-trivy-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-windows-machine-config-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-windows-machine-config-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/customized-user-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/customized-user-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cxl-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cxl-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dapr-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dapr-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datatrucker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datatrucker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dbaas-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dbaas-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/debezium-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/debezium-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/deployment-validation-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/deployment-validation-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devopsinabox not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devopsinabox/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dns-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dns-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-amlen-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-amlen-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-che not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-che/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ecr-secret-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ecr-secret-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edp-keycloak-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edp-keycloak-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/egressip-ipam-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/egressip-ipam-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ember-csi-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ember-csi-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/etcd not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/etcd/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eventing-kogito not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eventing-kogito/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-secrets-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-secrets-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flink-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flink-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8gb not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8gb/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fossul-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fossul-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/github-arc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/github-arc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitops-primer not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitops-primer/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitwebhook-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitwebhook-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/global-load-balancer-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/global-load-balancer-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/grafana-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/grafana-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/group-sync-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/group-sync-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hawtio-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hawtio-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hedvig-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hedvig-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hive-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hive-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/horreum-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/horreum-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hyperfoil-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hyperfoil-bundle/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator-community not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator-community/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-spectrum-scale-csi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-spectrum-scale-csi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibmcloud-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibmcloud-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infinispan not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infinispan/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/integrity-shield-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/integrity-shield-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ipfs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ipfs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/istio-workspace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/istio-workspace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kaoto-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kaoto-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keda not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keda/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keepalived-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keepalived-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-permissions-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-permissions-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/klusterlet not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/klusterlet/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kogito-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kogito-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/koku-metrics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/koku-metrics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/konveyor-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/konveyor-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/korrel8r not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/korrel8r/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kuadrant-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kuadrant-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kube-green not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kube-green/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubernetes-imagepuller-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubernetes-imagepuller-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/l5-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/l5-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/layer7-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/layer7-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lbconfig-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lbconfig-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lib-bucket-provisioner not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lib-bucket-provisioner/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/limitador-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/limitador-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logging-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logging-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mariadb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mariadb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marin3r not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marin3r/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mercury-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mercury-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/microcks not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/microcks/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/move2kube-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/move2kube-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multi-nic-cni-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multi-nic-cni-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-global-hub-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-global-hub-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-operators-subscription not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-operators-subscription/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/must-gather-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/must-gather-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/namespace-configuration-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/namespace-configuration-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ncn-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ncn-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ndmspc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ndmspc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator-m88i not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator-m88i/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nfs-provisioner-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nfs-provisioner-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nlp-server not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nlp-server/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-discovery-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-discovery-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nsm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nsm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oadp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oadp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/observability-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/observability-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oci-ccm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oci-ccm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odoo-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odoo-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opendatahub-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opendatahub-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openebs not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openebs/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-nfd-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-nfd-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-node-upgrade-mutex-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-node-upgrade-mutex-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-qiskit-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-qiskit-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patch-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patch-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patterns-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patterns-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pelorus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pelorus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/percona-xtradb-cluster-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/percona-xtradb-cluster-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-essentials not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-essentials/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/postgresql not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/postgresql/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/proactive-node-scaling-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/proactive-node-scaling-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/project-quay not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/project-quay/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus-exporter-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus-exporter-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pulp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pulp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-cluster-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-cluster-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-messaging-topology-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-messaging-topology-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/reportportal-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/reportportal-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/resource-locker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/resource-locker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhoas-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhoas-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ripsaw not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ripsaw/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sailoperator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sailoperator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-commerce-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-commerce-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-data-intelligence-observer-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-data-intelligence-observer-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-hana-express-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-hana-express-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-binding-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-binding-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/shipwright-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/shipwright-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sigstore-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sigstore-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snapscheduler not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snapscheduler/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snyk-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snyk-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/socmmd not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/socmmd/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonar-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonar-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosivio not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosivio/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonataflow-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonataflow-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosreport-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosreport-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/spark-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/spark-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/special-resource-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/special-resource-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron-engine not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron-engine/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/strimzi-kafka-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/strimzi-kafka-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/syndesis not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/syndesis/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tagger not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tagger/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tf-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tf-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tidb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tidb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trident-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trident-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustify-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustify-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ucs-ci-solutions-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ucs-ci-solutions-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/universal-crossplane not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/universal-crossplane/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/varnish-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/varnish-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-config-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-config-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/verticadb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/verticadb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volume-expander-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volume-expander-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/wandb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/wandb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/windup-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/windup-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yaks not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yaks/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-utilities/c0fe7256 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-utilities/c30319e4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-utilities/e6b1dd45 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-content/2bb643f0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-content/920de426 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-content/70fa1e87 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/registry-server/a1c12a2f not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/registry-server/9442e6c7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/registry-server/5b45ec72 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abot-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abot-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/entando-k8s-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/entando-k8s-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-paygo-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-paygo-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-term-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-term-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/linstor-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/linstor-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-deploy-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-deploy-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-paygo-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-paygo-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vfunction-server-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vfunction-server-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yugabyte-platform-operator-bundle-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yugabyte-platform-operator-bundle-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-utilities/3c9f3a59 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-utilities/1091c11b not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-utilities/9a6821c6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-content/ec0c35e2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-content/517f37e7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-content/6214fe78 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/registry-server/ba189c8b not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/registry-server/351e4f31 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/registry-server/c0f219ff not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/wait-for-host-port/8069f607 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/wait-for-host-port/559c3d82 not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/wait-for-host-port/605ad488 not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler/148df488 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler/3bf6dcb4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler/022a2feb not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-cert-syncer/938c3924 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-cert-syncer/729fe23e not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-cert-syncer/1fd5cbd4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-recovery-controller/a96697e1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-recovery-controller/e155ddca not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-recovery-controller/10dd0e0f not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/..2025_02_24_06_09_35.3018472960 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/..2025_02_24_06_09_35.3018472960/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/..2025_02_24_06_09_35.4262376737 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/..2025_02_24_06_09_35.4262376737/audit.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/audit.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/..2025_02_24_06_09_35.2630275752 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/..2025_02_24_06_09_35.2630275752/v4-0-config-system-cliconfig not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/v4-0-config-system-cliconfig not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/..2025_02_24_06_09_35.2376963788 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/..2025_02_24_06_09_35.2376963788/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/containers/oauth-openshift/6f2c8392 not reset as customized by admin to system_u:object_r:container_file_t:s0:c267,c588 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/containers/oauth-openshift/bd241ad9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/plugins not reset as customized by admin to system_u:object_r:container_file_t:s0 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/plugins/csi-hostpath not reset as customized by admin to system_u:object_r:container_file_t:s0 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/plugins/csi-hostpath/csi.sock not reset as customized by admin to system_u:object_r:container_file_t:s0 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/plugins/kubernetes.io not reset as customized by admin to system_u:object_r:container_file_t:s0 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/plugins/kubernetes.io/csi not reset as customized by admin to system_u:object_r:container_file_t:s0 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner not reset as customized by admin to system_u:object_r:container_file_t:s0 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983 not reset as customized by admin to system_u:object_r:container_file_t:s0 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/globalmount not reset as customized by admin to system_u:object_r:container_file_t:s0 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/vol_data.json not reset as customized by admin to system_u:object_r:container_file_t:s0 Oct 01 15:04:50 crc restorecon[4733]: /var/lib/kubelet/plugins_registry not reset as customized by admin to system_u:object_r:container_file_t:s0 Oct 01 15:04:50 crc restorecon[4733]: Relabeled /var/usrlocal/bin/kubenswrapper from system_u:object_r:bin_t:s0 to system_u:object_r:kubelet_exec_t:s0 Oct 01 15:04:51 crc kubenswrapper[4869]: Flag --container-runtime-endpoint has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Oct 01 15:04:51 crc kubenswrapper[4869]: Flag --minimum-container-ttl-duration has been deprecated, Use --eviction-hard or --eviction-soft instead. Will be removed in a future version. Oct 01 15:04:51 crc kubenswrapper[4869]: Flag --volume-plugin-dir has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Oct 01 15:04:51 crc kubenswrapper[4869]: Flag --register-with-taints has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Oct 01 15:04:51 crc kubenswrapper[4869]: Flag --pod-infra-container-image has been deprecated, will be removed in a future release. Image garbage collector will get sandbox image information from CRI. Oct 01 15:04:51 crc kubenswrapper[4869]: Flag --system-reserved has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.295068 4869 server.go:211] "--pod-infra-container-image will not be pruned by the image garbage collector in kubelet and should also be set in the remote runtime" Oct 01 15:04:51 crc kubenswrapper[4869]: W1001 15:04:51.298834 4869 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Oct 01 15:04:51 crc kubenswrapper[4869]: W1001 15:04:51.298859 4869 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Oct 01 15:04:51 crc kubenswrapper[4869]: W1001 15:04:51.298866 4869 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Oct 01 15:04:51 crc kubenswrapper[4869]: W1001 15:04:51.298873 4869 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Oct 01 15:04:51 crc kubenswrapper[4869]: W1001 15:04:51.298880 4869 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Oct 01 15:04:51 crc kubenswrapper[4869]: W1001 15:04:51.298885 4869 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Oct 01 15:04:51 crc kubenswrapper[4869]: W1001 15:04:51.298892 4869 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Oct 01 15:04:51 crc kubenswrapper[4869]: W1001 15:04:51.298898 4869 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Oct 01 15:04:51 crc kubenswrapper[4869]: W1001 15:04:51.298904 4869 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Oct 01 15:04:51 crc kubenswrapper[4869]: W1001 15:04:51.298910 4869 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Oct 01 15:04:51 crc kubenswrapper[4869]: W1001 15:04:51.298916 4869 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Oct 01 15:04:51 crc kubenswrapper[4869]: W1001 15:04:51.298921 4869 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Oct 01 15:04:51 crc kubenswrapper[4869]: W1001 15:04:51.298926 4869 feature_gate.go:330] unrecognized feature gate: GatewayAPI Oct 01 15:04:51 crc kubenswrapper[4869]: W1001 15:04:51.298931 4869 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Oct 01 15:04:51 crc kubenswrapper[4869]: W1001 15:04:51.298958 4869 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Oct 01 15:04:51 crc kubenswrapper[4869]: W1001 15:04:51.298963 4869 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Oct 01 15:04:51 crc kubenswrapper[4869]: W1001 15:04:51.298969 4869 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Oct 01 15:04:51 crc kubenswrapper[4869]: W1001 15:04:51.298976 4869 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Oct 01 15:04:51 crc kubenswrapper[4869]: W1001 15:04:51.298983 4869 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Oct 01 15:04:51 crc kubenswrapper[4869]: W1001 15:04:51.298990 4869 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Oct 01 15:04:51 crc kubenswrapper[4869]: W1001 15:04:51.298995 4869 feature_gate.go:330] unrecognized feature gate: OVNObservability Oct 01 15:04:51 crc kubenswrapper[4869]: W1001 15:04:51.299001 4869 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Oct 01 15:04:51 crc kubenswrapper[4869]: W1001 15:04:51.299006 4869 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Oct 01 15:04:51 crc kubenswrapper[4869]: W1001 15:04:51.299013 4869 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Oct 01 15:04:51 crc kubenswrapper[4869]: W1001 15:04:51.299020 4869 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Oct 01 15:04:51 crc kubenswrapper[4869]: W1001 15:04:51.299026 4869 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Oct 01 15:04:51 crc kubenswrapper[4869]: W1001 15:04:51.299032 4869 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Oct 01 15:04:51 crc kubenswrapper[4869]: W1001 15:04:51.299037 4869 feature_gate.go:330] unrecognized feature gate: PinnedImages Oct 01 15:04:51 crc kubenswrapper[4869]: W1001 15:04:51.299042 4869 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Oct 01 15:04:51 crc kubenswrapper[4869]: W1001 15:04:51.299047 4869 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Oct 01 15:04:51 crc kubenswrapper[4869]: W1001 15:04:51.299055 4869 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Oct 01 15:04:51 crc kubenswrapper[4869]: W1001 15:04:51.299060 4869 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Oct 01 15:04:51 crc kubenswrapper[4869]: W1001 15:04:51.299065 4869 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Oct 01 15:04:51 crc kubenswrapper[4869]: W1001 15:04:51.299070 4869 feature_gate.go:330] unrecognized feature gate: NewOLM Oct 01 15:04:51 crc kubenswrapper[4869]: W1001 15:04:51.299076 4869 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Oct 01 15:04:51 crc kubenswrapper[4869]: W1001 15:04:51.299082 4869 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Oct 01 15:04:51 crc kubenswrapper[4869]: W1001 15:04:51.299088 4869 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Oct 01 15:04:51 crc kubenswrapper[4869]: W1001 15:04:51.299093 4869 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Oct 01 15:04:51 crc kubenswrapper[4869]: W1001 15:04:51.299098 4869 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Oct 01 15:04:51 crc kubenswrapper[4869]: W1001 15:04:51.299105 4869 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Oct 01 15:04:51 crc kubenswrapper[4869]: W1001 15:04:51.299111 4869 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Oct 01 15:04:51 crc kubenswrapper[4869]: W1001 15:04:51.299116 4869 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Oct 01 15:04:51 crc kubenswrapper[4869]: W1001 15:04:51.299123 4869 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Oct 01 15:04:51 crc kubenswrapper[4869]: W1001 15:04:51.299128 4869 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Oct 01 15:04:51 crc kubenswrapper[4869]: W1001 15:04:51.299133 4869 feature_gate.go:330] unrecognized feature gate: PlatformOperators Oct 01 15:04:51 crc kubenswrapper[4869]: W1001 15:04:51.299138 4869 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Oct 01 15:04:51 crc kubenswrapper[4869]: W1001 15:04:51.299143 4869 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Oct 01 15:04:51 crc kubenswrapper[4869]: W1001 15:04:51.299148 4869 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Oct 01 15:04:51 crc kubenswrapper[4869]: W1001 15:04:51.299153 4869 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Oct 01 15:04:51 crc kubenswrapper[4869]: W1001 15:04:51.299158 4869 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Oct 01 15:04:51 crc kubenswrapper[4869]: W1001 15:04:51.299164 4869 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Oct 01 15:04:51 crc kubenswrapper[4869]: W1001 15:04:51.299171 4869 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Oct 01 15:04:51 crc kubenswrapper[4869]: W1001 15:04:51.299177 4869 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Oct 01 15:04:51 crc kubenswrapper[4869]: W1001 15:04:51.299183 4869 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Oct 01 15:04:51 crc kubenswrapper[4869]: W1001 15:04:51.299188 4869 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Oct 01 15:04:51 crc kubenswrapper[4869]: W1001 15:04:51.299194 4869 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Oct 01 15:04:51 crc kubenswrapper[4869]: W1001 15:04:51.299199 4869 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Oct 01 15:04:51 crc kubenswrapper[4869]: W1001 15:04:51.299205 4869 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Oct 01 15:04:51 crc kubenswrapper[4869]: W1001 15:04:51.299209 4869 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Oct 01 15:04:51 crc kubenswrapper[4869]: W1001 15:04:51.299216 4869 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Oct 01 15:04:51 crc kubenswrapper[4869]: W1001 15:04:51.299221 4869 feature_gate.go:330] unrecognized feature gate: InsightsConfig Oct 01 15:04:51 crc kubenswrapper[4869]: W1001 15:04:51.299226 4869 feature_gate.go:330] unrecognized feature gate: Example Oct 01 15:04:51 crc kubenswrapper[4869]: W1001 15:04:51.299232 4869 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Oct 01 15:04:51 crc kubenswrapper[4869]: W1001 15:04:51.299237 4869 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Oct 01 15:04:51 crc kubenswrapper[4869]: W1001 15:04:51.299242 4869 feature_gate.go:330] unrecognized feature gate: SignatureStores Oct 01 15:04:51 crc kubenswrapper[4869]: W1001 15:04:51.299247 4869 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Oct 01 15:04:51 crc kubenswrapper[4869]: W1001 15:04:51.299302 4869 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Oct 01 15:04:51 crc kubenswrapper[4869]: W1001 15:04:51.299307 4869 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Oct 01 15:04:51 crc kubenswrapper[4869]: W1001 15:04:51.299313 4869 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Oct 01 15:04:51 crc kubenswrapper[4869]: W1001 15:04:51.299318 4869 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Oct 01 15:04:51 crc kubenswrapper[4869]: W1001 15:04:51.299323 4869 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.299431 4869 flags.go:64] FLAG: --address="0.0.0.0" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.299445 4869 flags.go:64] FLAG: --allowed-unsafe-sysctls="[]" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.299455 4869 flags.go:64] FLAG: --anonymous-auth="true" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.299464 4869 flags.go:64] FLAG: --application-metrics-count-limit="100" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.299472 4869 flags.go:64] FLAG: --authentication-token-webhook="false" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.299478 4869 flags.go:64] FLAG: --authentication-token-webhook-cache-ttl="2m0s" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.299487 4869 flags.go:64] FLAG: --authorization-mode="AlwaysAllow" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.299494 4869 flags.go:64] FLAG: --authorization-webhook-cache-authorized-ttl="5m0s" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.299501 4869 flags.go:64] FLAG: --authorization-webhook-cache-unauthorized-ttl="30s" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.299507 4869 flags.go:64] FLAG: --boot-id-file="/proc/sys/kernel/random/boot_id" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.299514 4869 flags.go:64] FLAG: --bootstrap-kubeconfig="/etc/kubernetes/kubeconfig" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.299520 4869 flags.go:64] FLAG: --cert-dir="/var/lib/kubelet/pki" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.299526 4869 flags.go:64] FLAG: --cgroup-driver="cgroupfs" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.299533 4869 flags.go:64] FLAG: --cgroup-root="" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.299538 4869 flags.go:64] FLAG: --cgroups-per-qos="true" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.299545 4869 flags.go:64] FLAG: --client-ca-file="" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.299552 4869 flags.go:64] FLAG: --cloud-config="" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.299557 4869 flags.go:64] FLAG: --cloud-provider="" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.299563 4869 flags.go:64] FLAG: --cluster-dns="[]" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.299571 4869 flags.go:64] FLAG: --cluster-domain="" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.299576 4869 flags.go:64] FLAG: --config="/etc/kubernetes/kubelet.conf" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.299583 4869 flags.go:64] FLAG: --config-dir="" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.299589 4869 flags.go:64] FLAG: --container-hints="/etc/cadvisor/container_hints.json" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.299595 4869 flags.go:64] FLAG: --container-log-max-files="5" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.299603 4869 flags.go:64] FLAG: --container-log-max-size="10Mi" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.299609 4869 flags.go:64] FLAG: --container-runtime-endpoint="/var/run/crio/crio.sock" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.299615 4869 flags.go:64] FLAG: --containerd="/run/containerd/containerd.sock" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.299621 4869 flags.go:64] FLAG: --containerd-namespace="k8s.io" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.299627 4869 flags.go:64] FLAG: --contention-profiling="false" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.299633 4869 flags.go:64] FLAG: --cpu-cfs-quota="true" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.299639 4869 flags.go:64] FLAG: --cpu-cfs-quota-period="100ms" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.299645 4869 flags.go:64] FLAG: --cpu-manager-policy="none" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.299651 4869 flags.go:64] FLAG: --cpu-manager-policy-options="" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.299658 4869 flags.go:64] FLAG: --cpu-manager-reconcile-period="10s" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.299664 4869 flags.go:64] FLAG: --enable-controller-attach-detach="true" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.299671 4869 flags.go:64] FLAG: --enable-debugging-handlers="true" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.299677 4869 flags.go:64] FLAG: --enable-load-reader="false" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.299683 4869 flags.go:64] FLAG: --enable-server="true" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.299689 4869 flags.go:64] FLAG: --enforce-node-allocatable="[pods]" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.299696 4869 flags.go:64] FLAG: --event-burst="100" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.299702 4869 flags.go:64] FLAG: --event-qps="50" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.299708 4869 flags.go:64] FLAG: --event-storage-age-limit="default=0" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.299714 4869 flags.go:64] FLAG: --event-storage-event-limit="default=0" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.299720 4869 flags.go:64] FLAG: --eviction-hard="" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.299727 4869 flags.go:64] FLAG: --eviction-max-pod-grace-period="0" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.299733 4869 flags.go:64] FLAG: --eviction-minimum-reclaim="" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.299739 4869 flags.go:64] FLAG: --eviction-pressure-transition-period="5m0s" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.299746 4869 flags.go:64] FLAG: --eviction-soft="" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.299752 4869 flags.go:64] FLAG: --eviction-soft-grace-period="" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.299758 4869 flags.go:64] FLAG: --exit-on-lock-contention="false" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.299764 4869 flags.go:64] FLAG: --experimental-allocatable-ignore-eviction="false" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.299770 4869 flags.go:64] FLAG: --experimental-mounter-path="" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.299776 4869 flags.go:64] FLAG: --fail-cgroupv1="false" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.299782 4869 flags.go:64] FLAG: --fail-swap-on="true" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.299788 4869 flags.go:64] FLAG: --feature-gates="" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.299796 4869 flags.go:64] FLAG: --file-check-frequency="20s" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.299802 4869 flags.go:64] FLAG: --global-housekeeping-interval="1m0s" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.299808 4869 flags.go:64] FLAG: --hairpin-mode="promiscuous-bridge" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.299814 4869 flags.go:64] FLAG: --healthz-bind-address="127.0.0.1" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.299820 4869 flags.go:64] FLAG: --healthz-port="10248" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.299826 4869 flags.go:64] FLAG: --help="false" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.299832 4869 flags.go:64] FLAG: --hostname-override="" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.299838 4869 flags.go:64] FLAG: --housekeeping-interval="10s" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.299844 4869 flags.go:64] FLAG: --http-check-frequency="20s" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.299850 4869 flags.go:64] FLAG: --image-credential-provider-bin-dir="" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.299856 4869 flags.go:64] FLAG: --image-credential-provider-config="" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.299862 4869 flags.go:64] FLAG: --image-gc-high-threshold="85" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.299869 4869 flags.go:64] FLAG: --image-gc-low-threshold="80" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.299875 4869 flags.go:64] FLAG: --image-service-endpoint="" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.299881 4869 flags.go:64] FLAG: --kernel-memcg-notification="false" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.299887 4869 flags.go:64] FLAG: --kube-api-burst="100" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.299893 4869 flags.go:64] FLAG: --kube-api-content-type="application/vnd.kubernetes.protobuf" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.299899 4869 flags.go:64] FLAG: --kube-api-qps="50" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.299905 4869 flags.go:64] FLAG: --kube-reserved="" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.299911 4869 flags.go:64] FLAG: --kube-reserved-cgroup="" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.299916 4869 flags.go:64] FLAG: --kubeconfig="/var/lib/kubelet/kubeconfig" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.299923 4869 flags.go:64] FLAG: --kubelet-cgroups="" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.299928 4869 flags.go:64] FLAG: --local-storage-capacity-isolation="true" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.299934 4869 flags.go:64] FLAG: --lock-file="" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.299940 4869 flags.go:64] FLAG: --log-cadvisor-usage="false" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.299946 4869 flags.go:64] FLAG: --log-flush-frequency="5s" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.299952 4869 flags.go:64] FLAG: --log-json-info-buffer-size="0" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.299961 4869 flags.go:64] FLAG: --log-json-split-stream="false" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.299967 4869 flags.go:64] FLAG: --log-text-info-buffer-size="0" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.299973 4869 flags.go:64] FLAG: --log-text-split-stream="false" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.299979 4869 flags.go:64] FLAG: --logging-format="text" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.299984 4869 flags.go:64] FLAG: --machine-id-file="/etc/machine-id,/var/lib/dbus/machine-id" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.299993 4869 flags.go:64] FLAG: --make-iptables-util-chains="true" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.299999 4869 flags.go:64] FLAG: --manifest-url="" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.300005 4869 flags.go:64] FLAG: --manifest-url-header="" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.300013 4869 flags.go:64] FLAG: --max-housekeeping-interval="15s" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.300019 4869 flags.go:64] FLAG: --max-open-files="1000000" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.300026 4869 flags.go:64] FLAG: --max-pods="110" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.300032 4869 flags.go:64] FLAG: --maximum-dead-containers="-1" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.300038 4869 flags.go:64] FLAG: --maximum-dead-containers-per-container="1" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.300045 4869 flags.go:64] FLAG: --memory-manager-policy="None" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.300050 4869 flags.go:64] FLAG: --minimum-container-ttl-duration="6m0s" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.300057 4869 flags.go:64] FLAG: --minimum-image-ttl-duration="2m0s" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.300063 4869 flags.go:64] FLAG: --node-ip="192.168.126.11" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.300069 4869 flags.go:64] FLAG: --node-labels="node-role.kubernetes.io/control-plane=,node-role.kubernetes.io/master=,node.openshift.io/os_id=rhcos" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.300082 4869 flags.go:64] FLAG: --node-status-max-images="50" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.300088 4869 flags.go:64] FLAG: --node-status-update-frequency="10s" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.300094 4869 flags.go:64] FLAG: --oom-score-adj="-999" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.300100 4869 flags.go:64] FLAG: --pod-cidr="" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.300106 4869 flags.go:64] FLAG: --pod-infra-container-image="quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:33549946e22a9ffa738fd94b1345f90921bc8f92fa6137784cb33c77ad806f9d" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.300116 4869 flags.go:64] FLAG: --pod-manifest-path="" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.300122 4869 flags.go:64] FLAG: --pod-max-pids="-1" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.300127 4869 flags.go:64] FLAG: --pods-per-core="0" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.300133 4869 flags.go:64] FLAG: --port="10250" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.300140 4869 flags.go:64] FLAG: --protect-kernel-defaults="false" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.300145 4869 flags.go:64] FLAG: --provider-id="" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.300151 4869 flags.go:64] FLAG: --qos-reserved="" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.300157 4869 flags.go:64] FLAG: --read-only-port="10255" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.300163 4869 flags.go:64] FLAG: --register-node="true" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.300169 4869 flags.go:64] FLAG: --register-schedulable="true" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.300175 4869 flags.go:64] FLAG: --register-with-taints="node-role.kubernetes.io/master=:NoSchedule" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.300184 4869 flags.go:64] FLAG: --registry-burst="10" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.300190 4869 flags.go:64] FLAG: --registry-qps="5" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.300196 4869 flags.go:64] FLAG: --reserved-cpus="" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.300202 4869 flags.go:64] FLAG: --reserved-memory="" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.300210 4869 flags.go:64] FLAG: --resolv-conf="/etc/resolv.conf" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.300215 4869 flags.go:64] FLAG: --root-dir="/var/lib/kubelet" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.300221 4869 flags.go:64] FLAG: --rotate-certificates="false" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.300228 4869 flags.go:64] FLAG: --rotate-server-certificates="false" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.300234 4869 flags.go:64] FLAG: --runonce="false" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.300240 4869 flags.go:64] FLAG: --runtime-cgroups="/system.slice/crio.service" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.300246 4869 flags.go:64] FLAG: --runtime-request-timeout="2m0s" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.300252 4869 flags.go:64] FLAG: --seccomp-default="false" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.300285 4869 flags.go:64] FLAG: --serialize-image-pulls="true" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.300295 4869 flags.go:64] FLAG: --storage-driver-buffer-duration="1m0s" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.300302 4869 flags.go:64] FLAG: --storage-driver-db="cadvisor" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.300308 4869 flags.go:64] FLAG: --storage-driver-host="localhost:8086" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.300314 4869 flags.go:64] FLAG: --storage-driver-password="root" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.300321 4869 flags.go:64] FLAG: --storage-driver-secure="false" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.300327 4869 flags.go:64] FLAG: --storage-driver-table="stats" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.300333 4869 flags.go:64] FLAG: --storage-driver-user="root" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.300340 4869 flags.go:64] FLAG: --streaming-connection-idle-timeout="4h0m0s" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.300346 4869 flags.go:64] FLAG: --sync-frequency="1m0s" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.300353 4869 flags.go:64] FLAG: --system-cgroups="" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.300358 4869 flags.go:64] FLAG: --system-reserved="cpu=200m,ephemeral-storage=350Mi,memory=350Mi" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.300368 4869 flags.go:64] FLAG: --system-reserved-cgroup="" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.300374 4869 flags.go:64] FLAG: --tls-cert-file="" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.300379 4869 flags.go:64] FLAG: --tls-cipher-suites="[]" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.300387 4869 flags.go:64] FLAG: --tls-min-version="" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.300393 4869 flags.go:64] FLAG: --tls-private-key-file="" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.300399 4869 flags.go:64] FLAG: --topology-manager-policy="none" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.300406 4869 flags.go:64] FLAG: --topology-manager-policy-options="" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.300412 4869 flags.go:64] FLAG: --topology-manager-scope="container" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.300419 4869 flags.go:64] FLAG: --v="2" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.300428 4869 flags.go:64] FLAG: --version="false" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.300435 4869 flags.go:64] FLAG: --vmodule="" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.300442 4869 flags.go:64] FLAG: --volume-plugin-dir="/etc/kubernetes/kubelet-plugins/volume/exec" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.300449 4869 flags.go:64] FLAG: --volume-stats-agg-period="1m0s" Oct 01 15:04:51 crc kubenswrapper[4869]: W1001 15:04:51.300581 4869 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Oct 01 15:04:51 crc kubenswrapper[4869]: W1001 15:04:51.300588 4869 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Oct 01 15:04:51 crc kubenswrapper[4869]: W1001 15:04:51.300594 4869 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Oct 01 15:04:51 crc kubenswrapper[4869]: W1001 15:04:51.300601 4869 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Oct 01 15:04:51 crc kubenswrapper[4869]: W1001 15:04:51.300608 4869 feature_gate.go:330] unrecognized feature gate: NewOLM Oct 01 15:04:51 crc kubenswrapper[4869]: W1001 15:04:51.300614 4869 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Oct 01 15:04:51 crc kubenswrapper[4869]: W1001 15:04:51.300621 4869 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Oct 01 15:04:51 crc kubenswrapper[4869]: W1001 15:04:51.300627 4869 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Oct 01 15:04:51 crc kubenswrapper[4869]: W1001 15:04:51.300634 4869 feature_gate.go:330] unrecognized feature gate: OVNObservability Oct 01 15:04:51 crc kubenswrapper[4869]: W1001 15:04:51.300639 4869 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Oct 01 15:04:51 crc kubenswrapper[4869]: W1001 15:04:51.300645 4869 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Oct 01 15:04:51 crc kubenswrapper[4869]: W1001 15:04:51.300650 4869 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Oct 01 15:04:51 crc kubenswrapper[4869]: W1001 15:04:51.300655 4869 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Oct 01 15:04:51 crc kubenswrapper[4869]: W1001 15:04:51.300662 4869 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Oct 01 15:04:51 crc kubenswrapper[4869]: W1001 15:04:51.300669 4869 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Oct 01 15:04:51 crc kubenswrapper[4869]: W1001 15:04:51.300675 4869 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Oct 01 15:04:51 crc kubenswrapper[4869]: W1001 15:04:51.300681 4869 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Oct 01 15:04:51 crc kubenswrapper[4869]: W1001 15:04:51.300688 4869 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Oct 01 15:04:51 crc kubenswrapper[4869]: W1001 15:04:51.300693 4869 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Oct 01 15:04:51 crc kubenswrapper[4869]: W1001 15:04:51.300699 4869 feature_gate.go:330] unrecognized feature gate: PinnedImages Oct 01 15:04:51 crc kubenswrapper[4869]: W1001 15:04:51.300704 4869 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Oct 01 15:04:51 crc kubenswrapper[4869]: W1001 15:04:51.300709 4869 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Oct 01 15:04:51 crc kubenswrapper[4869]: W1001 15:04:51.300714 4869 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Oct 01 15:04:51 crc kubenswrapper[4869]: W1001 15:04:51.300720 4869 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Oct 01 15:04:51 crc kubenswrapper[4869]: W1001 15:04:51.300726 4869 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Oct 01 15:04:51 crc kubenswrapper[4869]: W1001 15:04:51.300731 4869 feature_gate.go:330] unrecognized feature gate: SignatureStores Oct 01 15:04:51 crc kubenswrapper[4869]: W1001 15:04:51.300736 4869 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Oct 01 15:04:51 crc kubenswrapper[4869]: W1001 15:04:51.300741 4869 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Oct 01 15:04:51 crc kubenswrapper[4869]: W1001 15:04:51.300746 4869 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Oct 01 15:04:51 crc kubenswrapper[4869]: W1001 15:04:51.300752 4869 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Oct 01 15:04:51 crc kubenswrapper[4869]: W1001 15:04:51.300757 4869 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Oct 01 15:04:51 crc kubenswrapper[4869]: W1001 15:04:51.300762 4869 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Oct 01 15:04:51 crc kubenswrapper[4869]: W1001 15:04:51.300767 4869 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Oct 01 15:04:51 crc kubenswrapper[4869]: W1001 15:04:51.300772 4869 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Oct 01 15:04:51 crc kubenswrapper[4869]: W1001 15:04:51.300777 4869 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Oct 01 15:04:51 crc kubenswrapper[4869]: W1001 15:04:51.300783 4869 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Oct 01 15:04:51 crc kubenswrapper[4869]: W1001 15:04:51.300788 4869 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Oct 01 15:04:51 crc kubenswrapper[4869]: W1001 15:04:51.300793 4869 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Oct 01 15:04:51 crc kubenswrapper[4869]: W1001 15:04:51.300798 4869 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Oct 01 15:04:51 crc kubenswrapper[4869]: W1001 15:04:51.300803 4869 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Oct 01 15:04:51 crc kubenswrapper[4869]: W1001 15:04:51.300811 4869 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Oct 01 15:04:51 crc kubenswrapper[4869]: W1001 15:04:51.300816 4869 feature_gate.go:330] unrecognized feature gate: InsightsConfig Oct 01 15:04:51 crc kubenswrapper[4869]: W1001 15:04:51.300823 4869 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Oct 01 15:04:51 crc kubenswrapper[4869]: W1001 15:04:51.300830 4869 feature_gate.go:330] unrecognized feature gate: Example Oct 01 15:04:51 crc kubenswrapper[4869]: W1001 15:04:51.300835 4869 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Oct 01 15:04:51 crc kubenswrapper[4869]: W1001 15:04:51.300841 4869 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Oct 01 15:04:51 crc kubenswrapper[4869]: W1001 15:04:51.300846 4869 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Oct 01 15:04:51 crc kubenswrapper[4869]: W1001 15:04:51.300851 4869 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Oct 01 15:04:51 crc kubenswrapper[4869]: W1001 15:04:51.300856 4869 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Oct 01 15:04:51 crc kubenswrapper[4869]: W1001 15:04:51.300861 4869 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Oct 01 15:04:51 crc kubenswrapper[4869]: W1001 15:04:51.300866 4869 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Oct 01 15:04:51 crc kubenswrapper[4869]: W1001 15:04:51.300872 4869 feature_gate.go:330] unrecognized feature gate: GatewayAPI Oct 01 15:04:51 crc kubenswrapper[4869]: W1001 15:04:51.300877 4869 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Oct 01 15:04:51 crc kubenswrapper[4869]: W1001 15:04:51.300882 4869 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Oct 01 15:04:51 crc kubenswrapper[4869]: W1001 15:04:51.300887 4869 feature_gate.go:330] unrecognized feature gate: PlatformOperators Oct 01 15:04:51 crc kubenswrapper[4869]: W1001 15:04:51.300894 4869 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Oct 01 15:04:51 crc kubenswrapper[4869]: W1001 15:04:51.300901 4869 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Oct 01 15:04:51 crc kubenswrapper[4869]: W1001 15:04:51.300906 4869 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Oct 01 15:04:51 crc kubenswrapper[4869]: W1001 15:04:51.300912 4869 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Oct 01 15:04:51 crc kubenswrapper[4869]: W1001 15:04:51.300918 4869 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Oct 01 15:04:51 crc kubenswrapper[4869]: W1001 15:04:51.300923 4869 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Oct 01 15:04:51 crc kubenswrapper[4869]: W1001 15:04:51.300929 4869 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Oct 01 15:04:51 crc kubenswrapper[4869]: W1001 15:04:51.300935 4869 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Oct 01 15:04:51 crc kubenswrapper[4869]: W1001 15:04:51.300940 4869 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Oct 01 15:04:51 crc kubenswrapper[4869]: W1001 15:04:51.300945 4869 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Oct 01 15:04:51 crc kubenswrapper[4869]: W1001 15:04:51.300951 4869 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Oct 01 15:04:51 crc kubenswrapper[4869]: W1001 15:04:51.300956 4869 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Oct 01 15:04:51 crc kubenswrapper[4869]: W1001 15:04:51.300961 4869 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Oct 01 15:04:51 crc kubenswrapper[4869]: W1001 15:04:51.300966 4869 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Oct 01 15:04:51 crc kubenswrapper[4869]: W1001 15:04:51.300971 4869 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Oct 01 15:04:51 crc kubenswrapper[4869]: W1001 15:04:51.300977 4869 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.301766 4869 feature_gate.go:386] feature gates: {map[CloudDualStackNodeIPs:true DisableKubeletCloudCredentialProviders:true DynamicResourceAllocation:false EventedPLEG:false KMSv1:true MaxUnavailableStatefulSet:false NodeSwap:false ProcMountType:false RouteExternalCertificate:false ServiceAccountTokenNodeBinding:false TranslateStreamCloseWebsocketRequests:false UserNamespacesPodSecurityStandards:false UserNamespacesSupport:false ValidatingAdmissionPolicy:true VolumeAttributesClass:false]} Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.312564 4869 server.go:491] "Kubelet version" kubeletVersion="v1.31.5" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.312611 4869 server.go:493] "Golang settings" GOGC="" GOMAXPROCS="" GOTRACEBACK="" Oct 01 15:04:51 crc kubenswrapper[4869]: W1001 15:04:51.312749 4869 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Oct 01 15:04:51 crc kubenswrapper[4869]: W1001 15:04:51.312764 4869 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Oct 01 15:04:51 crc kubenswrapper[4869]: W1001 15:04:51.312773 4869 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Oct 01 15:04:51 crc kubenswrapper[4869]: W1001 15:04:51.312782 4869 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Oct 01 15:04:51 crc kubenswrapper[4869]: W1001 15:04:51.312790 4869 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Oct 01 15:04:51 crc kubenswrapper[4869]: W1001 15:04:51.312802 4869 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Oct 01 15:04:51 crc kubenswrapper[4869]: W1001 15:04:51.312815 4869 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Oct 01 15:04:51 crc kubenswrapper[4869]: W1001 15:04:51.312825 4869 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Oct 01 15:04:51 crc kubenswrapper[4869]: W1001 15:04:51.312834 4869 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Oct 01 15:04:51 crc kubenswrapper[4869]: W1001 15:04:51.312843 4869 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Oct 01 15:04:51 crc kubenswrapper[4869]: W1001 15:04:51.312851 4869 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Oct 01 15:04:51 crc kubenswrapper[4869]: W1001 15:04:51.312859 4869 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Oct 01 15:04:51 crc kubenswrapper[4869]: W1001 15:04:51.312867 4869 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Oct 01 15:04:51 crc kubenswrapper[4869]: W1001 15:04:51.312877 4869 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Oct 01 15:04:51 crc kubenswrapper[4869]: W1001 15:04:51.312885 4869 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Oct 01 15:04:51 crc kubenswrapper[4869]: W1001 15:04:51.312895 4869 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Oct 01 15:04:51 crc kubenswrapper[4869]: W1001 15:04:51.312905 4869 feature_gate.go:330] unrecognized feature gate: PinnedImages Oct 01 15:04:51 crc kubenswrapper[4869]: W1001 15:04:51.312917 4869 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Oct 01 15:04:51 crc kubenswrapper[4869]: W1001 15:04:51.312926 4869 feature_gate.go:330] unrecognized feature gate: PlatformOperators Oct 01 15:04:51 crc kubenswrapper[4869]: W1001 15:04:51.312934 4869 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Oct 01 15:04:51 crc kubenswrapper[4869]: W1001 15:04:51.312942 4869 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Oct 01 15:04:51 crc kubenswrapper[4869]: W1001 15:04:51.312951 4869 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Oct 01 15:04:51 crc kubenswrapper[4869]: W1001 15:04:51.312959 4869 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Oct 01 15:04:51 crc kubenswrapper[4869]: W1001 15:04:51.312966 4869 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Oct 01 15:04:51 crc kubenswrapper[4869]: W1001 15:04:51.312974 4869 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Oct 01 15:04:51 crc kubenswrapper[4869]: W1001 15:04:51.312982 4869 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Oct 01 15:04:51 crc kubenswrapper[4869]: W1001 15:04:51.312991 4869 feature_gate.go:330] unrecognized feature gate: SignatureStores Oct 01 15:04:51 crc kubenswrapper[4869]: W1001 15:04:51.312999 4869 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Oct 01 15:04:51 crc kubenswrapper[4869]: W1001 15:04:51.313009 4869 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Oct 01 15:04:51 crc kubenswrapper[4869]: W1001 15:04:51.313017 4869 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Oct 01 15:04:51 crc kubenswrapper[4869]: W1001 15:04:51.313025 4869 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Oct 01 15:04:51 crc kubenswrapper[4869]: W1001 15:04:51.313033 4869 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Oct 01 15:04:51 crc kubenswrapper[4869]: W1001 15:04:51.313042 4869 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Oct 01 15:04:51 crc kubenswrapper[4869]: W1001 15:04:51.313049 4869 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Oct 01 15:04:51 crc kubenswrapper[4869]: W1001 15:04:51.313057 4869 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Oct 01 15:04:51 crc kubenswrapper[4869]: W1001 15:04:51.313065 4869 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Oct 01 15:04:51 crc kubenswrapper[4869]: W1001 15:04:51.313072 4869 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Oct 01 15:04:51 crc kubenswrapper[4869]: W1001 15:04:51.313080 4869 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Oct 01 15:04:51 crc kubenswrapper[4869]: W1001 15:04:51.313088 4869 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Oct 01 15:04:51 crc kubenswrapper[4869]: W1001 15:04:51.313096 4869 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Oct 01 15:04:51 crc kubenswrapper[4869]: W1001 15:04:51.313103 4869 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Oct 01 15:04:51 crc kubenswrapper[4869]: W1001 15:04:51.313111 4869 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Oct 01 15:04:51 crc kubenswrapper[4869]: W1001 15:04:51.313119 4869 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Oct 01 15:04:51 crc kubenswrapper[4869]: W1001 15:04:51.313126 4869 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Oct 01 15:04:51 crc kubenswrapper[4869]: W1001 15:04:51.313134 4869 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Oct 01 15:04:51 crc kubenswrapper[4869]: W1001 15:04:51.313141 4869 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Oct 01 15:04:51 crc kubenswrapper[4869]: W1001 15:04:51.313149 4869 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Oct 01 15:04:51 crc kubenswrapper[4869]: W1001 15:04:51.313157 4869 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Oct 01 15:04:51 crc kubenswrapper[4869]: W1001 15:04:51.313165 4869 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Oct 01 15:04:51 crc kubenswrapper[4869]: W1001 15:04:51.313173 4869 feature_gate.go:330] unrecognized feature gate: InsightsConfig Oct 01 15:04:51 crc kubenswrapper[4869]: W1001 15:04:51.313180 4869 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Oct 01 15:04:51 crc kubenswrapper[4869]: W1001 15:04:51.313188 4869 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Oct 01 15:04:51 crc kubenswrapper[4869]: W1001 15:04:51.313196 4869 feature_gate.go:330] unrecognized feature gate: OVNObservability Oct 01 15:04:51 crc kubenswrapper[4869]: W1001 15:04:51.313205 4869 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Oct 01 15:04:51 crc kubenswrapper[4869]: W1001 15:04:51.313213 4869 feature_gate.go:330] unrecognized feature gate: NewOLM Oct 01 15:04:51 crc kubenswrapper[4869]: W1001 15:04:51.313221 4869 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Oct 01 15:04:51 crc kubenswrapper[4869]: W1001 15:04:51.313229 4869 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Oct 01 15:04:51 crc kubenswrapper[4869]: W1001 15:04:51.313237 4869 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Oct 01 15:04:51 crc kubenswrapper[4869]: W1001 15:04:51.313245 4869 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Oct 01 15:04:51 crc kubenswrapper[4869]: W1001 15:04:51.313255 4869 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Oct 01 15:04:51 crc kubenswrapper[4869]: W1001 15:04:51.313326 4869 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Oct 01 15:04:51 crc kubenswrapper[4869]: W1001 15:04:51.313336 4869 feature_gate.go:330] unrecognized feature gate: GatewayAPI Oct 01 15:04:51 crc kubenswrapper[4869]: W1001 15:04:51.313346 4869 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Oct 01 15:04:51 crc kubenswrapper[4869]: W1001 15:04:51.313356 4869 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Oct 01 15:04:51 crc kubenswrapper[4869]: W1001 15:04:51.313365 4869 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Oct 01 15:04:51 crc kubenswrapper[4869]: W1001 15:04:51.313373 4869 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Oct 01 15:04:51 crc kubenswrapper[4869]: W1001 15:04:51.313384 4869 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Oct 01 15:04:51 crc kubenswrapper[4869]: W1001 15:04:51.313393 4869 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Oct 01 15:04:51 crc kubenswrapper[4869]: W1001 15:04:51.313401 4869 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Oct 01 15:04:51 crc kubenswrapper[4869]: W1001 15:04:51.313409 4869 feature_gate.go:330] unrecognized feature gate: Example Oct 01 15:04:51 crc kubenswrapper[4869]: W1001 15:04:51.313417 4869 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.313431 4869 feature_gate.go:386] feature gates: {map[CloudDualStackNodeIPs:true DisableKubeletCloudCredentialProviders:true DynamicResourceAllocation:false EventedPLEG:false KMSv1:true MaxUnavailableStatefulSet:false NodeSwap:false ProcMountType:false RouteExternalCertificate:false ServiceAccountTokenNodeBinding:false TranslateStreamCloseWebsocketRequests:false UserNamespacesPodSecurityStandards:false UserNamespacesSupport:false ValidatingAdmissionPolicy:true VolumeAttributesClass:false]} Oct 01 15:04:51 crc kubenswrapper[4869]: W1001 15:04:51.313653 4869 feature_gate.go:330] unrecognized feature gate: PinnedImages Oct 01 15:04:51 crc kubenswrapper[4869]: W1001 15:04:51.313665 4869 feature_gate.go:330] unrecognized feature gate: NewOLM Oct 01 15:04:51 crc kubenswrapper[4869]: W1001 15:04:51.313674 4869 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Oct 01 15:04:51 crc kubenswrapper[4869]: W1001 15:04:51.313683 4869 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Oct 01 15:04:51 crc kubenswrapper[4869]: W1001 15:04:51.313692 4869 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Oct 01 15:04:51 crc kubenswrapper[4869]: W1001 15:04:51.313701 4869 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Oct 01 15:04:51 crc kubenswrapper[4869]: W1001 15:04:51.313711 4869 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Oct 01 15:04:51 crc kubenswrapper[4869]: W1001 15:04:51.313725 4869 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Oct 01 15:04:51 crc kubenswrapper[4869]: W1001 15:04:51.313733 4869 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Oct 01 15:04:51 crc kubenswrapper[4869]: W1001 15:04:51.313742 4869 feature_gate.go:330] unrecognized feature gate: SignatureStores Oct 01 15:04:51 crc kubenswrapper[4869]: W1001 15:04:51.313751 4869 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Oct 01 15:04:51 crc kubenswrapper[4869]: W1001 15:04:51.313761 4869 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Oct 01 15:04:51 crc kubenswrapper[4869]: W1001 15:04:51.313769 4869 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Oct 01 15:04:51 crc kubenswrapper[4869]: W1001 15:04:51.313777 4869 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Oct 01 15:04:51 crc kubenswrapper[4869]: W1001 15:04:51.313785 4869 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Oct 01 15:04:51 crc kubenswrapper[4869]: W1001 15:04:51.313793 4869 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Oct 01 15:04:51 crc kubenswrapper[4869]: W1001 15:04:51.313801 4869 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Oct 01 15:04:51 crc kubenswrapper[4869]: W1001 15:04:51.313809 4869 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Oct 01 15:04:51 crc kubenswrapper[4869]: W1001 15:04:51.313817 4869 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Oct 01 15:04:51 crc kubenswrapper[4869]: W1001 15:04:51.313825 4869 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Oct 01 15:04:51 crc kubenswrapper[4869]: W1001 15:04:51.313832 4869 feature_gate.go:330] unrecognized feature gate: Example Oct 01 15:04:51 crc kubenswrapper[4869]: W1001 15:04:51.313840 4869 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Oct 01 15:04:51 crc kubenswrapper[4869]: W1001 15:04:51.313848 4869 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Oct 01 15:04:51 crc kubenswrapper[4869]: W1001 15:04:51.313855 4869 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Oct 01 15:04:51 crc kubenswrapper[4869]: W1001 15:04:51.313863 4869 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Oct 01 15:04:51 crc kubenswrapper[4869]: W1001 15:04:51.313871 4869 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Oct 01 15:04:51 crc kubenswrapper[4869]: W1001 15:04:51.313881 4869 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Oct 01 15:04:51 crc kubenswrapper[4869]: W1001 15:04:51.313891 4869 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Oct 01 15:04:51 crc kubenswrapper[4869]: W1001 15:04:51.313900 4869 feature_gate.go:330] unrecognized feature gate: OVNObservability Oct 01 15:04:51 crc kubenswrapper[4869]: W1001 15:04:51.313908 4869 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Oct 01 15:04:51 crc kubenswrapper[4869]: W1001 15:04:51.313917 4869 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Oct 01 15:04:51 crc kubenswrapper[4869]: W1001 15:04:51.313927 4869 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Oct 01 15:04:51 crc kubenswrapper[4869]: W1001 15:04:51.313937 4869 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Oct 01 15:04:51 crc kubenswrapper[4869]: W1001 15:04:51.313945 4869 feature_gate.go:330] unrecognized feature gate: InsightsConfig Oct 01 15:04:51 crc kubenswrapper[4869]: W1001 15:04:51.313954 4869 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Oct 01 15:04:51 crc kubenswrapper[4869]: W1001 15:04:51.313963 4869 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Oct 01 15:04:51 crc kubenswrapper[4869]: W1001 15:04:51.313970 4869 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Oct 01 15:04:51 crc kubenswrapper[4869]: W1001 15:04:51.313979 4869 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Oct 01 15:04:51 crc kubenswrapper[4869]: W1001 15:04:51.313986 4869 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Oct 01 15:04:51 crc kubenswrapper[4869]: W1001 15:04:51.313994 4869 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Oct 01 15:04:51 crc kubenswrapper[4869]: W1001 15:04:51.314002 4869 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Oct 01 15:04:51 crc kubenswrapper[4869]: W1001 15:04:51.314009 4869 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Oct 01 15:04:51 crc kubenswrapper[4869]: W1001 15:04:51.314018 4869 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Oct 01 15:04:51 crc kubenswrapper[4869]: W1001 15:04:51.314025 4869 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Oct 01 15:04:51 crc kubenswrapper[4869]: W1001 15:04:51.314033 4869 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Oct 01 15:04:51 crc kubenswrapper[4869]: W1001 15:04:51.314040 4869 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Oct 01 15:04:51 crc kubenswrapper[4869]: W1001 15:04:51.314048 4869 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Oct 01 15:04:51 crc kubenswrapper[4869]: W1001 15:04:51.314056 4869 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Oct 01 15:04:51 crc kubenswrapper[4869]: W1001 15:04:51.314063 4869 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Oct 01 15:04:51 crc kubenswrapper[4869]: W1001 15:04:51.314071 4869 feature_gate.go:330] unrecognized feature gate: GatewayAPI Oct 01 15:04:51 crc kubenswrapper[4869]: W1001 15:04:51.314078 4869 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Oct 01 15:04:51 crc kubenswrapper[4869]: W1001 15:04:51.314085 4869 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Oct 01 15:04:51 crc kubenswrapper[4869]: W1001 15:04:51.314094 4869 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Oct 01 15:04:51 crc kubenswrapper[4869]: W1001 15:04:51.314105 4869 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Oct 01 15:04:51 crc kubenswrapper[4869]: W1001 15:04:51.314114 4869 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Oct 01 15:04:51 crc kubenswrapper[4869]: W1001 15:04:51.314123 4869 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Oct 01 15:04:51 crc kubenswrapper[4869]: W1001 15:04:51.314130 4869 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Oct 01 15:04:51 crc kubenswrapper[4869]: W1001 15:04:51.314138 4869 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Oct 01 15:04:51 crc kubenswrapper[4869]: W1001 15:04:51.314147 4869 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Oct 01 15:04:51 crc kubenswrapper[4869]: W1001 15:04:51.314154 4869 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Oct 01 15:04:51 crc kubenswrapper[4869]: W1001 15:04:51.314162 4869 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Oct 01 15:04:51 crc kubenswrapper[4869]: W1001 15:04:51.314169 4869 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Oct 01 15:04:51 crc kubenswrapper[4869]: W1001 15:04:51.314177 4869 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Oct 01 15:04:51 crc kubenswrapper[4869]: W1001 15:04:51.314185 4869 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Oct 01 15:04:51 crc kubenswrapper[4869]: W1001 15:04:51.314192 4869 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Oct 01 15:04:51 crc kubenswrapper[4869]: W1001 15:04:51.314200 4869 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Oct 01 15:04:51 crc kubenswrapper[4869]: W1001 15:04:51.314207 4869 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Oct 01 15:04:51 crc kubenswrapper[4869]: W1001 15:04:51.314215 4869 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Oct 01 15:04:51 crc kubenswrapper[4869]: W1001 15:04:51.314223 4869 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Oct 01 15:04:51 crc kubenswrapper[4869]: W1001 15:04:51.314230 4869 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Oct 01 15:04:51 crc kubenswrapper[4869]: W1001 15:04:51.314238 4869 feature_gate.go:330] unrecognized feature gate: PlatformOperators Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.314251 4869 feature_gate.go:386] feature gates: {map[CloudDualStackNodeIPs:true DisableKubeletCloudCredentialProviders:true DynamicResourceAllocation:false EventedPLEG:false KMSv1:true MaxUnavailableStatefulSet:false NodeSwap:false ProcMountType:false RouteExternalCertificate:false ServiceAccountTokenNodeBinding:false TranslateStreamCloseWebsocketRequests:false UserNamespacesPodSecurityStandards:false UserNamespacesSupport:false ValidatingAdmissionPolicy:true VolumeAttributesClass:false]} Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.315488 4869 server.go:940] "Client rotation is on, will bootstrap in background" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.323641 4869 bootstrap.go:85] "Current kubeconfig file contents are still valid, no bootstrap necessary" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.323813 4869 certificate_store.go:130] Loading cert/key pair from "/var/lib/kubelet/pki/kubelet-client-current.pem". Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.325714 4869 server.go:997] "Starting client certificate rotation" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.325762 4869 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Certificate rotation is enabled Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.326038 4869 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Certificate expiration is 2026-02-24 05:52:08 +0000 UTC, rotation deadline is 2025-12-27 15:52:23.617288421 +0000 UTC Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.326187 4869 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Waiting 2088h47m32.291107709s for next certificate rotation Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.355340 4869 dynamic_cafile_content.go:123] "Loaded a new CA Bundle and Verifier" name="client-ca-bundle::/etc/kubernetes/kubelet-ca.crt" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.358800 4869 dynamic_cafile_content.go:161] "Starting controller" name="client-ca-bundle::/etc/kubernetes/kubelet-ca.crt" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.376863 4869 log.go:25] "Validated CRI v1 runtime API" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.415102 4869 log.go:25] "Validated CRI v1 image API" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.419072 4869 server.go:1437] "Using cgroup driver setting received from the CRI runtime" cgroupDriver="systemd" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.428222 4869 fs.go:133] Filesystem UUIDs: map[0b076daa-c26a-46d2-b3a6-72a8dbc6e257:/dev/vda4 2025-10-01-15-00-26-00:/dev/sr0 7B77-95E7:/dev/vda2 de0497b0-db1b-465a-b278-03db02455c71:/dev/vda3] Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.428304 4869 fs.go:134] Filesystem partitions: map[/dev/shm:{mountpoint:/dev/shm major:0 minor:22 fsType:tmpfs blockSize:0} /dev/vda3:{mountpoint:/boot major:252 minor:3 fsType:ext4 blockSize:0} /dev/vda4:{mountpoint:/var major:252 minor:4 fsType:xfs blockSize:0} /run:{mountpoint:/run major:0 minor:24 fsType:tmpfs blockSize:0} /run/user/1000:{mountpoint:/run/user/1000 major:0 minor:42 fsType:tmpfs blockSize:0} /tmp:{mountpoint:/tmp major:0 minor:30 fsType:tmpfs blockSize:0} /var/lib/etcd:{mountpoint:/var/lib/etcd major:0 minor:43 fsType:tmpfs blockSize:0}] Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.457212 4869 manager.go:217] Machine: {Timestamp:2025-10-01 15:04:51.453320522 +0000 UTC m=+0.600163708 CPUVendorID:AuthenticAMD NumCores:12 NumPhysicalCores:1 NumSockets:12 CpuFrequency:2800000 MemoryCapacity:33654128640 SwapCapacity:0 MemoryByType:map[] NVMInfo:{MemoryModeCapacity:0 AppDirectModeCapacity:0 AvgPowerBudget:0} HugePages:[{PageSize:1048576 NumPages:0} {PageSize:2048 NumPages:0}] MachineID:21801e6708c44f15b81395eb736a7cec SystemUUID:17824854-cecc-4ad3-9cc5-d2cb904f9411 BootID:019e6235-8a6c-4363-92b0-b87baa5a55f8 Filesystems:[{Device:/dev/shm DeviceMajor:0 DeviceMinor:22 Capacity:16827064320 Type:vfs Inodes:4108170 HasInodes:true} {Device:/run DeviceMajor:0 DeviceMinor:24 Capacity:6730825728 Type:vfs Inodes:819200 HasInodes:true} {Device:/dev/vda4 DeviceMajor:252 DeviceMinor:4 Capacity:85292941312 Type:vfs Inodes:41679680 HasInodes:true} {Device:/tmp DeviceMajor:0 DeviceMinor:30 Capacity:16827064320 Type:vfs Inodes:1048576 HasInodes:true} {Device:/dev/vda3 DeviceMajor:252 DeviceMinor:3 Capacity:366869504 Type:vfs Inodes:98304 HasInodes:true} {Device:/run/user/1000 DeviceMajor:0 DeviceMinor:42 Capacity:3365412864 Type:vfs Inodes:821634 HasInodes:true} {Device:/var/lib/etcd DeviceMajor:0 DeviceMinor:43 Capacity:1073741824 Type:vfs Inodes:4108170 HasInodes:true}] DiskMap:map[252:0:{Name:vda Major:252 Minor:0 Size:214748364800 Scheduler:none}] NetworkDevices:[{Name:br-ex MacAddress:fa:16:3e:35:7e:30 Speed:0 Mtu:1500} {Name:br-int MacAddress:d6:39:55:2e:22:71 Speed:0 Mtu:1400} {Name:ens3 MacAddress:fa:16:3e:35:7e:30 Speed:-1 Mtu:1500} {Name:ens7 MacAddress:fa:16:3e:e1:bd:b1 Speed:-1 Mtu:1500} {Name:ens7.20 MacAddress:52:54:00:77:71:07 Speed:-1 Mtu:1496} {Name:ens7.21 MacAddress:52:54:00:96:36:87 Speed:-1 Mtu:1496} {Name:ens7.22 MacAddress:52:54:00:bc:26:d3 Speed:-1 Mtu:1496} {Name:ens7.23 MacAddress:52:54:00:b4:97:56 Speed:-1 Mtu:1496} {Name:eth10 MacAddress:da:3e:13:9f:c8:e7 Speed:0 Mtu:1500} {Name:ovn-k8s-mp0 MacAddress:0a:58:0a:d9:00:02 Speed:0 Mtu:1400} {Name:ovs-system MacAddress:d6:a3:f1:9b:bb:3d Speed:0 Mtu:1500}] Topology:[{Id:0 Memory:33654128640 HugePages:[{PageSize:1048576 NumPages:0} {PageSize:2048 NumPages:0}] Cores:[{Id:0 Threads:[0] Caches:[{Id:0 Size:32768 Type:Data Level:1} {Id:0 Size:32768 Type:Instruction Level:1} {Id:0 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:0 Size:16777216 Type:Unified Level:3}] SocketID:0 BookID: DrawerID:} {Id:0 Threads:[1] Caches:[{Id:1 Size:32768 Type:Data Level:1} {Id:1 Size:32768 Type:Instruction Level:1} {Id:1 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:1 Size:16777216 Type:Unified Level:3}] SocketID:1 BookID: DrawerID:} {Id:0 Threads:[10] Caches:[{Id:10 Size:32768 Type:Data Level:1} {Id:10 Size:32768 Type:Instruction Level:1} {Id:10 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:10 Size:16777216 Type:Unified Level:3}] SocketID:10 BookID: DrawerID:} {Id:0 Threads:[11] Caches:[{Id:11 Size:32768 Type:Data Level:1} {Id:11 Size:32768 Type:Instruction Level:1} {Id:11 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:11 Size:16777216 Type:Unified Level:3}] SocketID:11 BookID: DrawerID:} {Id:0 Threads:[2] Caches:[{Id:2 Size:32768 Type:Data Level:1} {Id:2 Size:32768 Type:Instruction Level:1} {Id:2 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:2 Size:16777216 Type:Unified Level:3}] SocketID:2 BookID: DrawerID:} {Id:0 Threads:[3] Caches:[{Id:3 Size:32768 Type:Data Level:1} {Id:3 Size:32768 Type:Instruction Level:1} {Id:3 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:3 Size:16777216 Type:Unified Level:3}] SocketID:3 BookID: DrawerID:} {Id:0 Threads:[4] Caches:[{Id:4 Size:32768 Type:Data Level:1} {Id:4 Size:32768 Type:Instruction Level:1} {Id:4 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:4 Size:16777216 Type:Unified Level:3}] SocketID:4 BookID: DrawerID:} {Id:0 Threads:[5] Caches:[{Id:5 Size:32768 Type:Data Level:1} {Id:5 Size:32768 Type:Instruction Level:1} {Id:5 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:5 Size:16777216 Type:Unified Level:3}] SocketID:5 BookID: DrawerID:} {Id:0 Threads:[6] Caches:[{Id:6 Size:32768 Type:Data Level:1} {Id:6 Size:32768 Type:Instruction Level:1} {Id:6 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:6 Size:16777216 Type:Unified Level:3}] SocketID:6 BookID: DrawerID:} {Id:0 Threads:[7] Caches:[{Id:7 Size:32768 Type:Data Level:1} {Id:7 Size:32768 Type:Instruction Level:1} {Id:7 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:7 Size:16777216 Type:Unified Level:3}] SocketID:7 BookID: DrawerID:} {Id:0 Threads:[8] Caches:[{Id:8 Size:32768 Type:Data Level:1} {Id:8 Size:32768 Type:Instruction Level:1} {Id:8 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:8 Size:16777216 Type:Unified Level:3}] SocketID:8 BookID: DrawerID:} {Id:0 Threads:[9] Caches:[{Id:9 Size:32768 Type:Data Level:1} {Id:9 Size:32768 Type:Instruction Level:1} {Id:9 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:9 Size:16777216 Type:Unified Level:3}] SocketID:9 BookID: DrawerID:}] Caches:[] Distances:[10]}] CloudProvider:Unknown InstanceType:Unknown InstanceID:None} Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.457749 4869 manager_no_libpfm.go:29] cAdvisor is build without cgo and/or libpfm support. Perf event counters are not available. Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.457944 4869 manager.go:233] Version: {KernelVersion:5.14.0-427.50.2.el9_4.x86_64 ContainerOsVersion:Red Hat Enterprise Linux CoreOS 418.94.202502100215-0 DockerVersion: DockerAPIVersion: CadvisorVersion: CadvisorRevision:} Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.458502 4869 swap_util.go:113] "Swap is on" /proc/swaps contents="Filename\t\t\t\tType\t\tSize\t\tUsed\t\tPriority" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.458848 4869 container_manager_linux.go:267] "Container manager verified user specified cgroup-root exists" cgroupRoot=[] Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.458917 4869 container_manager_linux.go:272] "Creating Container Manager object based on Node Config" nodeConfig={"NodeName":"crc","RuntimeCgroupsName":"/system.slice/crio.service","SystemCgroupsName":"/system.slice","KubeletCgroupsName":"","KubeletOOMScoreAdj":-999,"ContainerRuntime":"","CgroupsPerQOS":true,"CgroupRoot":"/","CgroupDriver":"systemd","KubeletRootDir":"/var/lib/kubelet","ProtectKernelDefaults":true,"KubeReservedCgroupName":"","SystemReservedCgroupName":"","ReservedSystemCPUs":{},"EnforceNodeAllocatable":{"pods":{}},"KubeReserved":null,"SystemReserved":{"cpu":"200m","ephemeral-storage":"350Mi","memory":"350Mi"},"HardEvictionThresholds":[{"Signal":"imagefs.inodesFree","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.05},"GracePeriod":0,"MinReclaim":null},{"Signal":"memory.available","Operator":"LessThan","Value":{"Quantity":"100Mi","Percentage":0},"GracePeriod":0,"MinReclaim":null},{"Signal":"nodefs.available","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.1},"GracePeriod":0,"MinReclaim":null},{"Signal":"nodefs.inodesFree","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.05},"GracePeriod":0,"MinReclaim":null},{"Signal":"imagefs.available","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.15},"GracePeriod":0,"MinReclaim":null}],"QOSReserved":{},"CPUManagerPolicy":"none","CPUManagerPolicyOptions":null,"TopologyManagerScope":"container","CPUManagerReconcilePeriod":10000000000,"ExperimentalMemoryManagerPolicy":"None","ExperimentalMemoryManagerReservedMemory":null,"PodPidsLimit":4096,"EnforceCPULimits":true,"CPUCFSQuotaPeriod":100000000,"TopologyManagerPolicy":"none","TopologyManagerPolicyOptions":null,"CgroupVersion":2} Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.459341 4869 topology_manager.go:138] "Creating topology manager with none policy" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.459362 4869 container_manager_linux.go:303] "Creating device plugin manager" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.459895 4869 manager.go:142] "Creating Device Plugin manager" path="/var/lib/kubelet/device-plugins/kubelet.sock" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.459934 4869 server.go:66] "Creating device plugin registration server" version="v1beta1" socket="/var/lib/kubelet/device-plugins/kubelet.sock" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.460737 4869 state_mem.go:36] "Initialized new in-memory state store" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.460909 4869 server.go:1245] "Using root directory" path="/var/lib/kubelet" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.465435 4869 kubelet.go:418] "Attempting to sync node with API server" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.465476 4869 kubelet.go:313] "Adding static pod path" path="/etc/kubernetes/manifests" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.465560 4869 file.go:69] "Watching path" path="/etc/kubernetes/manifests" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.465585 4869 kubelet.go:324] "Adding apiserver pod source" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.465606 4869 apiserver.go:42] "Waiting for node sync before watching apiserver pods" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.473730 4869 kuberuntime_manager.go:262] "Container runtime initialized" containerRuntime="cri-o" version="1.31.5-4.rhaos4.18.gitdad78d5.el9" apiVersion="v1" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.475237 4869 certificate_store.go:130] Loading cert/key pair from "/var/lib/kubelet/pki/kubelet-server-current.pem". Oct 01 15:04:51 crc kubenswrapper[4869]: W1001 15:04:51.478416 4869 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Node: Get "https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0": dial tcp 38.102.83.30:6443: connect: connection refused Oct 01 15:04:51 crc kubenswrapper[4869]: E1001 15:04:51.478579 4869 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Node: failed to list *v1.Node: Get \"https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0\": dial tcp 38.102.83.30:6443: connect: connection refused" logger="UnhandledError" Oct 01 15:04:51 crc kubenswrapper[4869]: W1001 15:04:51.478742 4869 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Service: Get "https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0": dial tcp 38.102.83.30:6443: connect: connection refused Oct 01 15:04:51 crc kubenswrapper[4869]: E1001 15:04:51.478969 4869 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Service: failed to list *v1.Service: Get \"https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0\": dial tcp 38.102.83.30:6443: connect: connection refused" logger="UnhandledError" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.479570 4869 kubelet.go:854] "Not starting ClusterTrustBundle informer because we are in static kubelet mode" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.481457 4869 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/portworx-volume" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.481511 4869 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/empty-dir" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.481531 4869 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/git-repo" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.481550 4869 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/host-path" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.481581 4869 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/nfs" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.481598 4869 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/secret" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.481616 4869 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/iscsi" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.481644 4869 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/downward-api" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.481665 4869 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/fc" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.481682 4869 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/configmap" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.481703 4869 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/projected" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.481717 4869 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/local-volume" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.482951 4869 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/csi" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.483754 4869 server.go:1280] "Started kubelet" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.484957 4869 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": dial tcp 38.102.83.30:6443: connect: connection refused Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.485193 4869 server.go:163] "Starting to listen" address="0.0.0.0" port=10250 Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.485211 4869 ratelimit.go:55] "Setting rate limiting for endpoint" service="podresources" qps=100 burstTokens=10 Oct 01 15:04:51 crc systemd[1]: Started Kubernetes Kubelet. Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.486323 4869 server.go:236] "Starting to serve the podresources API" endpoint="unix:/var/lib/kubelet/pod-resources/kubelet.sock" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.487168 4869 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate rotation is enabled Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.487214 4869 fs_resource_analyzer.go:67] "Starting FS ResourceAnalyzer" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.487706 4869 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-19 05:35:05.374956005 +0000 UTC Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.487941 4869 certificate_manager.go:356] kubernetes.io/kubelet-serving: Waiting 1166h30m13.887025899s for next certificate rotation Oct 01 15:04:51 crc kubenswrapper[4869]: E1001 15:04:51.488039 4869 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.487907 4869 desired_state_of_world_populator.go:146] "Desired state populator starts to run" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.487893 4869 volume_manager.go:287] "The desired_state_of_world populator starts" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.488164 4869 volume_manager.go:289] "Starting Kubelet Volume Manager" Oct 01 15:04:51 crc kubenswrapper[4869]: W1001 15:04:51.488524 4869 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.CSIDriver: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0": dial tcp 38.102.83.30:6443: connect: connection refused Oct 01 15:04:51 crc kubenswrapper[4869]: E1001 15:04:51.488593 4869 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.CSIDriver: failed to list *v1.CSIDriver: Get \"https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0\": dial tcp 38.102.83.30:6443: connect: connection refused" logger="UnhandledError" Oct 01 15:04:51 crc kubenswrapper[4869]: E1001 15:04:51.489658 4869 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.30:6443: connect: connection refused" interval="200ms" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.491687 4869 factory.go:153] Registering CRI-O factory Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.492479 4869 factory.go:221] Registration of the crio container factory successfully Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.492732 4869 factory.go:219] Registration of the containerd container factory failed: unable to create containerd client: containerd: cannot unix dial containerd api service: dial unix /run/containerd/containerd.sock: connect: no such file or directory Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.492760 4869 factory.go:55] Registering systemd factory Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.492778 4869 factory.go:221] Registration of the systemd container factory successfully Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.492816 4869 factory.go:103] Registering Raw factory Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.492839 4869 manager.go:1196] Started watching for new ooms in manager Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.493763 4869 manager.go:319] Starting recovery of all containers Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.498040 4869 server.go:460] "Adding debug handlers to kubelet server" Oct 01 15:04:51 crc kubenswrapper[4869]: E1001 15:04:51.514996 4869 event.go:368] "Unable to write event (may retry after sleeping)" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/default/events\": dial tcp 38.102.83.30:6443: connect: connection refused" event="&Event{ObjectMeta:{crc.186a664e6e2fb4e2 default 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Node,Namespace:,Name:crc,UID:crc,APIVersion:,ResourceVersion:,FieldPath:,},Reason:Starting,Message:Starting kubelet.,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2025-10-01 15:04:51.483702498 +0000 UTC m=+0.630545654,LastTimestamp:2025-10-01 15:04:51.483702498 +0000 UTC m=+0.630545654,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.525177 4869 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert" seLinuxMountContext="" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.525542 4869 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe579f8-e8a6-4643-bce5-a661393c4dde" volumeName="kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token" seLinuxMountContext="" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.525707 4869 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" volumeName="kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4" seLinuxMountContext="" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.525836 4869 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides" seLinuxMountContext="" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.525948 4869 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert" seLinuxMountContext="" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.526064 4869 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1d611f23-29be-4491-8495-bee1670e935f" volumeName="kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content" seLinuxMountContext="" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.526187 4869 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" volumeName="kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert" seLinuxMountContext="" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.526382 4869 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="4bb40260-dbaa-4fb0-84df-5e680505d512" volumeName="kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy" seLinuxMountContext="" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.526579 4869 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" volumeName="kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca" seLinuxMountContext="" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.526751 4869 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates" seLinuxMountContext="" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.526909 4869 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a0128f3a-b052-44ed-a84e-c4c8aaf17c13" volumeName="kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m" seLinuxMountContext="" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.527075 4869 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca" seLinuxMountContext="" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.527299 4869 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1386a44e-36a2-460c-96d0-0359d2b6f0f5" volumeName="kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config" seLinuxMountContext="" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.527530 4869 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images" seLinuxMountContext="" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.527718 4869 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs" seLinuxMountContext="" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.527875 4869 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="4bb40260-dbaa-4fb0-84df-5e680505d512" volumeName="kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config" seLinuxMountContext="" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.528053 4869 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5225d0e4-402f-4861-b410-819f433b1803" volumeName="kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content" seLinuxMountContext="" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.528849 4869 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle" seLinuxMountContext="" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.529025 4869 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="96b93a3a-6083-4aea-8eab-fe1aa8245ad9" volumeName="kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7" seLinuxMountContext="" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.529196 4869 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config" seLinuxMountContext="" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.529424 4869 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="20b0d48f-5fd6-431c-a545-e3c800c7b866" volumeName="kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert" seLinuxMountContext="" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.529582 4869 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="25e176fe-21b4-4974-b1ed-c8b94f112a7f" volumeName="kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle" seLinuxMountContext="" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.529715 4869 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" volumeName="kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert" seLinuxMountContext="" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.529832 4869 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk" seLinuxMountContext="" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.529947 4869 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/secret/ef543e1b-8068-4ea3-b32a-61027b32e95d-webhook-cert" seLinuxMountContext="" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.530066 4869 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="87cf06ed-a83f-41a7-828d-70653580a8cb" volumeName="kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx" seLinuxMountContext="" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.530232 4869 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token" seLinuxMountContext="" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.530407 4869 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="d75a4c96-2883-4a0b-bab2-0fab2b6c0b49" volumeName="kubernetes.io/configmap/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-iptables-alerter-script" seLinuxMountContext="" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.530551 4869 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="efdd0498-1daa-4136-9a4a-3b948c2293fc" volumeName="kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt" seLinuxMountContext="" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.530672 4869 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b78653f-4ff9-4508-8672-245ed9b561e3" volumeName="kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert" seLinuxMountContext="" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.530788 4869 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1d611f23-29be-4491-8495-bee1670e935f" volumeName="kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz" seLinuxMountContext="" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.530901 4869 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config" seLinuxMountContext="" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.531018 4869 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client" seLinuxMountContext="" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.531141 4869 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7bb08738-c794-4ee8-9972-3a62ca171029" volumeName="kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb" seLinuxMountContext="" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.531304 4869 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth" seLinuxMountContext="" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.531441 4869 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" volumeName="kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh" seLinuxMountContext="" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.531558 4869 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config" seLinuxMountContext="" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.531685 4869 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe579f8-e8a6-4643-bce5-a661393c4dde" volumeName="kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp" seLinuxMountContext="" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.531818 4869 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted" seLinuxMountContext="" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.532013 4869 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3ab1a177-2de0-46d9-b765-d0d0649bb42e" volumeName="kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj" seLinuxMountContext="" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.532143 4869 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3ab1a177-2de0-46d9-b765-d0d0649bb42e" volumeName="kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert" seLinuxMountContext="" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.532288 4869 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert" seLinuxMountContext="" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.532428 4869 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="4bb40260-dbaa-4fb0-84df-5e680505d512" volumeName="kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh" seLinuxMountContext="" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.532542 4869 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="fda69060-fa79-4696-b1a6-7980f124bf7c" volumeName="kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh" seLinuxMountContext="" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.532678 4869 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca" seLinuxMountContext="" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.532797 4869 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config" seLinuxMountContext="" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.532925 4869 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="25e176fe-21b4-4974-b1ed-c8b94f112a7f" volumeName="kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv" seLinuxMountContext="" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.533041 4869 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7539238d-5fe0-46ed-884e-1c3b566537ec" volumeName="kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c" seLinuxMountContext="" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.533167 4869 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d751cbb-f2e2-430d-9754-c882a5e924a5" volumeName="kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl" seLinuxMountContext="" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.533335 4869 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" volumeName="kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782" seLinuxMountContext="" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.533483 4869 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d" volumeName="kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85" seLinuxMountContext="" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.533616 4869 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="fda69060-fa79-4696-b1a6-7980f124bf7c" volumeName="kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config" seLinuxMountContext="" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.533764 4869 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="01ab3dd5-8196-46d0-ad33-122e2ca51def" volumeName="kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config" seLinuxMountContext="" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.533904 4869 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert" seLinuxMountContext="" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.534048 4869 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle" seLinuxMountContext="" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.534213 4869 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert" seLinuxMountContext="" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.535771 4869 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles" seLinuxMountContext="" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.535971 4869 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl" seLinuxMountContext="" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.536125 4869 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="87cf06ed-a83f-41a7-828d-70653580a8cb" volumeName="kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls" seLinuxMountContext="" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.536350 4869 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" volumeName="kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config" seLinuxMountContext="" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.536527 4869 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client" seLinuxMountContext="" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.536698 4869 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls" seLinuxMountContext="" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.536868 4869 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" volumeName="kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca" seLinuxMountContext="" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.537025 4869 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs" seLinuxMountContext="" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.537191 4869 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz" seLinuxMountContext="" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.537452 4869 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="57a731c4-ef35-47a8-b875-bfb08a7f8011" volumeName="kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities" seLinuxMountContext="" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.537635 4869 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images" seLinuxMountContext="" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.537803 4869 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle" seLinuxMountContext="" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.537961 4869 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7bb08738-c794-4ee8-9972-3a62ca171029" volumeName="kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist" seLinuxMountContext="" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.538123 4869 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b574797-001e-440a-8f4e-c0be86edad0f" volumeName="kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls" seLinuxMountContext="" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.538332 4869 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert" seLinuxMountContext="" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.538488 4869 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data" seLinuxMountContext="" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.538609 4869 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="e7e6199b-1264-4501-8953-767f51328d08" volumeName="kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert" seLinuxMountContext="" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.538724 4869 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config" seLinuxMountContext="" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.538844 4869 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token" seLinuxMountContext="" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.538992 4869 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="e7e6199b-1264-4501-8953-767f51328d08" volumeName="kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config" seLinuxMountContext="" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.539115 4869 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies" seLinuxMountContext="" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.539229 4869 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="96b93a3a-6083-4aea-8eab-fe1aa8245ad9" volumeName="kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls" seLinuxMountContext="" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.539405 4869 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="01ab3dd5-8196-46d0-ad33-122e2ca51def" volumeName="kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j" seLinuxMountContext="" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.539570 4869 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="20b0d48f-5fd6-431c-a545-e3c800c7b866" volumeName="kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds" seLinuxMountContext="" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.539734 4869 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs" seLinuxMountContext="" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.539957 4869 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a0128f3a-b052-44ed-a84e-c4c8aaf17c13" volumeName="kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls" seLinuxMountContext="" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.540128 4869 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca" seLinuxMountContext="" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.540325 4869 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1386a44e-36a2-460c-96d0-0359d2b6f0f5" volumeName="kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access" seLinuxMountContext="" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.540486 4869 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3b6479f0-333b-4a96-9adf-2099afdc2447" volumeName="kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr" seLinuxMountContext="" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.540616 4869 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config" seLinuxMountContext="" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.540738 4869 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca" seLinuxMountContext="" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.540855 4869 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" volumeName="kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert" seLinuxMountContext="" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.540969 4869 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="25e176fe-21b4-4974-b1ed-c8b94f112a7f" volumeName="kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key" seLinuxMountContext="" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.541085 4869 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="496e6271-fb68-4057-954e-a0d97a4afa3f" volumeName="kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config" seLinuxMountContext="" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.541207 4869 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6312bbd-5731-4ea0-a20f-81d5a57df44a" volumeName="kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert" seLinuxMountContext="" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.541231 4869 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls" seLinuxMountContext="" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.541250 4869 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert" seLinuxMountContext="" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.541303 4869 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" volumeName="kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg" seLinuxMountContext="" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.541322 4869 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config" seLinuxMountContext="" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.541342 4869 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6" seLinuxMountContext="" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.541360 4869 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn" seLinuxMountContext="" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.541377 4869 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="44663579-783b-4372-86d6-acf235a62d72" volumeName="kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc" seLinuxMountContext="" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.541392 4869 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5225d0e4-402f-4861-b410-819f433b1803" volumeName="kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7" seLinuxMountContext="" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.541409 4869 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5b88f790-22fa-440e-b583-365168c0b23d" volumeName="kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn" seLinuxMountContext="" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.541425 4869 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client" seLinuxMountContext="" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.541442 4869 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" volumeName="kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf" seLinuxMountContext="" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.541458 4869 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" volumeName="kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert" seLinuxMountContext="" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.541474 4869 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca" seLinuxMountContext="" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.541497 4869 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="57a731c4-ef35-47a8-b875-bfb08a7f8011" volumeName="kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct" seLinuxMountContext="" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.541516 4869 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert" seLinuxMountContext="" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.541537 4869 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bd23aa5c-e532-4e53-bccf-e79f130c5ae8" volumeName="kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2" seLinuxMountContext="" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.541555 4869 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate" seLinuxMountContext="" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.541574 4869 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle" seLinuxMountContext="" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.541592 4869 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b574797-001e-440a-8f4e-c0be86edad0f" volumeName="kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config" seLinuxMountContext="" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.541611 4869 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config" seLinuxMountContext="" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.541629 4869 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-ovnkube-identity-cm" seLinuxMountContext="" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.541648 4869 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp" seLinuxMountContext="" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.541664 4869 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe579f8-e8a6-4643-bce5-a661393c4dde" volumeName="kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs" seLinuxMountContext="" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.541680 4869 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7" seLinuxMountContext="" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.541695 4869 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" volumeName="kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities" seLinuxMountContext="" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.541710 4869 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5" seLinuxMountContext="" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.541726 4869 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="37a5e44f-9a88-4405-be8a-b645485e7312" volumeName="kubernetes.io/secret/37a5e44f-9a88-4405-be8a-b645485e7312-metrics-tls" seLinuxMountContext="" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.541739 4869 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="496e6271-fb68-4057-954e-a0d97a4afa3f" volumeName="kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access" seLinuxMountContext="" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.541754 4869 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config" seLinuxMountContext="" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.541769 4869 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls" seLinuxMountContext="" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.541785 4869 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls" seLinuxMountContext="" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.541802 4869 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" volumeName="kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates" seLinuxMountContext="" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.541815 4869 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/projected/ef543e1b-8068-4ea3-b32a-61027b32e95d-kube-api-access-s2kz5" seLinuxMountContext="" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.541831 4869 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" volumeName="kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp" seLinuxMountContext="" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.541845 4869 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8" seLinuxMountContext="" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.541859 4869 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config" seLinuxMountContext="" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.541875 4869 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf" seLinuxMountContext="" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.541889 4869 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="01ab3dd5-8196-46d0-ad33-122e2ca51def" volumeName="kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert" seLinuxMountContext="" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.541905 4869 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config" seLinuxMountContext="" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.541920 4869 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca" seLinuxMountContext="" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.541933 4869 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template" seLinuxMountContext="" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.541947 4869 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config" seLinuxMountContext="" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.541960 4869 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls" seLinuxMountContext="" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.541974 4869 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="e7e6199b-1264-4501-8953-767f51328d08" volumeName="kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access" seLinuxMountContext="" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.541988 4869 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca" seLinuxMountContext="" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.542003 4869 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" volumeName="kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config" seLinuxMountContext="" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.542017 4869 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig" seLinuxMountContext="" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.542030 4869 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="496e6271-fb68-4057-954e-a0d97a4afa3f" volumeName="kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert" seLinuxMountContext="" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.542042 4869 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error" seLinuxMountContext="" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.542055 4869 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls" seLinuxMountContext="" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.542070 4869 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" volumeName="kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content" seLinuxMountContext="" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.542087 4869 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca" seLinuxMountContext="" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.542103 4869 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b78653f-4ff9-4508-8672-245ed9b561e3" volumeName="kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca" seLinuxMountContext="" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.542116 4869 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config" seLinuxMountContext="" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.542133 4869 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" volumeName="kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8" seLinuxMountContext="" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.542152 4869 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login" seLinuxMountContext="" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.542171 4869 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token" seLinuxMountContext="" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.542211 4869 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6312bbd-5731-4ea0-a20f-81d5a57df44a" volumeName="kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert" seLinuxMountContext="" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.542227 4869 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" volumeName="kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics" seLinuxMountContext="" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.542243 4869 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b574797-001e-440a-8f4e-c0be86edad0f" volumeName="kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88" seLinuxMountContext="" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.542256 4869 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7" seLinuxMountContext="" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.542317 4869 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1d611f23-29be-4491-8495-bee1670e935f" volumeName="kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities" seLinuxMountContext="" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.542330 4869 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle" seLinuxMountContext="" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.542344 4869 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-env-overrides" seLinuxMountContext="" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.542383 4869 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz" seLinuxMountContext="" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.542403 4869 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6731426b-95fe-49ff-bb5f-40441049fde2" volumeName="kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh" seLinuxMountContext="" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.542420 4869 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config" seLinuxMountContext="" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.542442 4869 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca" seLinuxMountContext="" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.542458 4869 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs" seLinuxMountContext="" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.542472 4869 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config" seLinuxMountContext="" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.542486 4869 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session" seLinuxMountContext="" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.542500 4869 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7bb08738-c794-4ee8-9972-3a62ca171029" volumeName="kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy" seLinuxMountContext="" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.542536 4869 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert" seLinuxMountContext="" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.542549 4869 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5" seLinuxMountContext="" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.542562 4869 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca" seLinuxMountContext="" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.542576 4869 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" volumeName="kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert" seLinuxMountContext="" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.542589 4869 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="fda69060-fa79-4696-b1a6-7980f124bf7c" volumeName="kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls" seLinuxMountContext="" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.542603 4869 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert" seLinuxMountContext="" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.542617 4869 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert" seLinuxMountContext="" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.542631 4869 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="37a5e44f-9a88-4405-be8a-b645485e7312" volumeName="kubernetes.io/projected/37a5e44f-9a88-4405-be8a-b645485e7312-kube-api-access-rdwmf" seLinuxMountContext="" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.542644 4869 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert" seLinuxMountContext="" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.542657 4869 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7539238d-5fe0-46ed-884e-1c3b566537ec" volumeName="kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config" seLinuxMountContext="" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.542670 4869 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7539238d-5fe0-46ed-884e-1c3b566537ec" volumeName="kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert" seLinuxMountContext="" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.542686 4869 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1386a44e-36a2-460c-96d0-0359d2b6f0f5" volumeName="kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert" seLinuxMountContext="" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.542687 4869 manager.go:324] Recovery completed Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.542707 4869 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config" seLinuxMountContext="" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.542828 4869 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert" seLinuxMountContext="" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.542871 4869 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49ef4625-1d3a-4a9f-b595-c2433d32326d" volumeName="kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v" seLinuxMountContext="" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.542901 4869 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5225d0e4-402f-4861-b410-819f433b1803" volumeName="kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities" seLinuxMountContext="" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.542929 4869 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="57a731c4-ef35-47a8-b875-bfb08a7f8011" volumeName="kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content" seLinuxMountContext="" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.542955 4869 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6731426b-95fe-49ff-bb5f-40441049fde2" volumeName="kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls" seLinuxMountContext="" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.543001 4869 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="87cf06ed-a83f-41a7-828d-70653580a8cb" volumeName="kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume" seLinuxMountContext="" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.543036 4869 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf" seLinuxMountContext="" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.543062 4869 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle" seLinuxMountContext="" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.543094 4869 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection" seLinuxMountContext="" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.543123 4869 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" volumeName="kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert" seLinuxMountContext="" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.543160 4869 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" seLinuxMountContext="" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.546150 4869 reconstruct.go:144] "Volume is marked device as uncertain and added into the actual state" volumeName="kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" deviceMountPath="/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/globalmount" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.546215 4869 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert" seLinuxMountContext="" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.546242 4869 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" volumeName="kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd" seLinuxMountContext="" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.546300 4869 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib" seLinuxMountContext="" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.546327 4869 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca" seLinuxMountContext="" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.546353 4869 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets" seLinuxMountContext="" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.546384 4869 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52" seLinuxMountContext="" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.546410 4869 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6312bbd-5731-4ea0-a20f-81d5a57df44a" volumeName="kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr" seLinuxMountContext="" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.546454 4869 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz" seLinuxMountContext="" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.546482 4869 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca" seLinuxMountContext="" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.546508 4869 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5b88f790-22fa-440e-b583-365168c0b23d" volumeName="kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs" seLinuxMountContext="" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.546536 4869 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides" seLinuxMountContext="" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.546564 4869 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca" seLinuxMountContext="" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.546591 4869 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="d75a4c96-2883-4a0b-bab2-0fab2b6c0b49" volumeName="kubernetes.io/projected/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-kube-api-access-rczfb" seLinuxMountContext="" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.546617 4869 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies" seLinuxMountContext="" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.546668 4869 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv" seLinuxMountContext="" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.546701 4869 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle" seLinuxMountContext="" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.546728 4869 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="efdd0498-1daa-4136-9a4a-3b948c2293fc" volumeName="kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs" seLinuxMountContext="" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.546758 4869 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b78653f-4ff9-4508-8672-245ed9b561e3" volumeName="kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access" seLinuxMountContext="" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.546820 4869 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit" seLinuxMountContext="" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.546861 4869 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb" seLinuxMountContext="" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.546885 4869 reconstruct.go:97] "Volume reconstruction finished" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.546904 4869 reconciler.go:26] "Reconciler: start to sync state" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.557205 4869 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.559485 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.560123 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.561838 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.563353 4869 cpu_manager.go:225] "Starting CPU manager" policy="none" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.563381 4869 cpu_manager.go:226] "Reconciling" reconcilePeriod="10s" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.563411 4869 state_mem.go:36] "Initialized new in-memory state store" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.577551 4869 kubelet_network_linux.go:50] "Initialized iptables rules." protocol="IPv4" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.579540 4869 kubelet_network_linux.go:50] "Initialized iptables rules." protocol="IPv6" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.579603 4869 status_manager.go:217] "Starting to sync pod status with apiserver" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.579662 4869 kubelet.go:2335] "Starting kubelet main sync loop" Oct 01 15:04:51 crc kubenswrapper[4869]: E1001 15:04:51.579738 4869 kubelet.go:2359] "Skipping pod synchronization" err="[container runtime status check may not have completed yet, PLEG is not healthy: pleg has yet to be successful]" Oct 01 15:04:51 crc kubenswrapper[4869]: W1001 15:04:51.581131 4869 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.RuntimeClass: Get "https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0": dial tcp 38.102.83.30:6443: connect: connection refused Oct 01 15:04:51 crc kubenswrapper[4869]: E1001 15:04:51.581237 4869 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.RuntimeClass: failed to list *v1.RuntimeClass: Get \"https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0\": dial tcp 38.102.83.30:6443: connect: connection refused" logger="UnhandledError" Oct 01 15:04:51 crc kubenswrapper[4869]: E1001 15:04:51.588253 4869 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.592093 4869 policy_none.go:49] "None policy: Start" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.593018 4869 memory_manager.go:170] "Starting memorymanager" policy="None" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.593045 4869 state_mem.go:35] "Initializing new in-memory state store" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.655960 4869 manager.go:334] "Starting Device Plugin manager" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.656476 4869 manager.go:513] "Failed to read data from checkpoint" checkpoint="kubelet_internal_checkpoint" err="checkpoint is not found" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.656499 4869 server.go:79] "Starting device plugin registration server" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.657108 4869 eviction_manager.go:189] "Eviction manager: starting control loop" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.657128 4869 container_log_manager.go:189] "Initializing container log rotate workers" workers=1 monitorPeriod="10s" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.657375 4869 plugin_watcher.go:51] "Plugin Watcher Start" path="/var/lib/kubelet/plugins_registry" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.657537 4869 plugin_manager.go:116] "The desired_state_of_world populator (plugin watcher) starts" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.657552 4869 plugin_manager.go:118] "Starting Kubelet Plugin Manager" Oct 01 15:04:51 crc kubenswrapper[4869]: E1001 15:04:51.668701 4869 eviction_manager.go:285] "Eviction manager: failed to get summary stats" err="failed to get node info: node \"crc\" not found" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.680847 4869 kubelet.go:2421] "SyncLoop ADD" source="file" pods=["openshift-kube-scheduler/openshift-kube-scheduler-crc","openshift-machine-config-operator/kube-rbac-proxy-crio-crc","openshift-etcd/etcd-crc","openshift-kube-apiserver/kube-apiserver-crc","openshift-kube-controller-manager/kube-controller-manager-crc"] Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.680954 4869 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.682661 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.682725 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.682737 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.682875 4869 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.683413 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.683500 4869 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.684105 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.684197 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.684217 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.686506 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.686572 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.686596 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.690338 4869 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.690445 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.690496 4869 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 01 15:04:51 crc kubenswrapper[4869]: E1001 15:04:51.691348 4869 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.30:6443: connect: connection refused" interval="400ms" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.692014 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.692053 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.692075 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.692364 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.692424 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.692444 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.692560 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd/etcd-crc" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.692705 4869 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.692446 4869 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.694300 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.694357 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.694384 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.694763 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.694790 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.694811 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.694931 4869 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.695061 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.695116 4869 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.696314 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.696354 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.696567 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.696693 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.696732 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.696807 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.697046 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.697109 4869 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.698029 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.698399 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.698449 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.749214 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-resource-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.749283 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-cert-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.749315 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-kube\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-etc-kube\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.749341 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-var-lib-kubelet\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.757693 4869 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.759084 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.759128 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.759141 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.759180 4869 kubelet_node_status.go:76] "Attempting to register node" node="crc" Oct 01 15:04:51 crc kubenswrapper[4869]: E1001 15:04:51.759837 4869 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.102.83.30:6443: connect: connection refused" node="crc" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.850426 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.850503 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-cert-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.850596 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-log-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.850705 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.850753 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-resource-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.850797 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.850933 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-cert-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.850971 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-resource-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.851008 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-cert-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.851043 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-resource-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.851079 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-kube\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-etc-kube\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.851097 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-cert-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.851112 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-var-lib-kubelet\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.851170 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"static-pod-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-static-pod-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.851184 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-kube\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-etc-kube\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.851203 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-var-lib-kubelet\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.851219 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"data-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-data-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.851205 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-resource-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.851289 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"usr-local-bin\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-usr-local-bin\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.953231 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"data-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-data-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.953363 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"usr-local-bin\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-usr-local-bin\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.953400 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"static-pod-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-static-pod-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.953431 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-log-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.953439 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"data-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-data-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.953541 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-log-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.953551 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"usr-local-bin\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-usr-local-bin\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.953463 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.953598 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.953579 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"static-pod-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-static-pod-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.953632 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.953614 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.953717 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-cert-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.953759 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-resource-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.953801 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-cert-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.953829 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-resource-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.953833 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-cert-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.953856 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.953877 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-resource-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.953883 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-cert-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.953888 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-resource-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.953965 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.960595 4869 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.962304 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.962357 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.962378 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 15:04:51 crc kubenswrapper[4869]: I1001 15:04:51.962413 4869 kubelet_node_status.go:76] "Attempting to register node" node="crc" Oct 01 15:04:51 crc kubenswrapper[4869]: E1001 15:04:51.963149 4869 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.102.83.30:6443: connect: connection refused" node="crc" Oct 01 15:04:52 crc kubenswrapper[4869]: I1001 15:04:52.013330 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Oct 01 15:04:52 crc kubenswrapper[4869]: I1001 15:04:52.034683 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Oct 01 15:04:52 crc kubenswrapper[4869]: I1001 15:04:52.052506 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd/etcd-crc" Oct 01 15:04:52 crc kubenswrapper[4869]: I1001 15:04:52.060638 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Oct 01 15:04:52 crc kubenswrapper[4869]: I1001 15:04:52.066980 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Oct 01 15:04:52 crc kubenswrapper[4869]: W1001 15:04:52.068174 4869 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod3dcd261975c3d6b9a6ad6367fd4facd3.slice/crio-9d7a48b227d4483899725698f80930fd8aced4a908b5e231b4266f2d46acb0ac WatchSource:0}: Error finding container 9d7a48b227d4483899725698f80930fd8aced4a908b5e231b4266f2d46acb0ac: Status 404 returned error can't find the container with id 9d7a48b227d4483899725698f80930fd8aced4a908b5e231b4266f2d46acb0ac Oct 01 15:04:52 crc kubenswrapper[4869]: W1001 15:04:52.083394 4869 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podd1b160f5dda77d281dd8e69ec8d817f9.slice/crio-758f2f35480c7c9253f596b2c0e7761ae9c09672641585bb28852bff2122facd WatchSource:0}: Error finding container 758f2f35480c7c9253f596b2c0e7761ae9c09672641585bb28852bff2122facd: Status 404 returned error can't find the container with id 758f2f35480c7c9253f596b2c0e7761ae9c09672641585bb28852bff2122facd Oct 01 15:04:52 crc kubenswrapper[4869]: E1001 15:04:52.092675 4869 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.30:6443: connect: connection refused" interval="800ms" Oct 01 15:04:52 crc kubenswrapper[4869]: W1001 15:04:52.094612 4869 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod2139d3e2895fc6797b9c76a1b4c9886d.slice/crio-f31ea0cc85a983b63f65f12825bfe40afe5f2767f866aa387017ad179f4ba6d2 WatchSource:0}: Error finding container f31ea0cc85a983b63f65f12825bfe40afe5f2767f866aa387017ad179f4ba6d2: Status 404 returned error can't find the container with id f31ea0cc85a983b63f65f12825bfe40afe5f2767f866aa387017ad179f4ba6d2 Oct 01 15:04:52 crc kubenswrapper[4869]: W1001 15:04:52.098551 4869 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf4b27818a5e8e43d0dc095d08835c792.slice/crio-1c96c996c023e5a25144d771cb1db648a95750c3b50d6bd62e1bf5dd4c0a82b2 WatchSource:0}: Error finding container 1c96c996c023e5a25144d771cb1db648a95750c3b50d6bd62e1bf5dd4c0a82b2: Status 404 returned error can't find the container with id 1c96c996c023e5a25144d771cb1db648a95750c3b50d6bd62e1bf5dd4c0a82b2 Oct 01 15:04:52 crc kubenswrapper[4869]: W1001 15:04:52.103539 4869 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf614b9022728cf315e60c057852e563e.slice/crio-ec5c8ef5b3536699eea2caeff9b68dbce4d2ba2caee8a5e34d23d0707b6954d4 WatchSource:0}: Error finding container ec5c8ef5b3536699eea2caeff9b68dbce4d2ba2caee8a5e34d23d0707b6954d4: Status 404 returned error can't find the container with id ec5c8ef5b3536699eea2caeff9b68dbce4d2ba2caee8a5e34d23d0707b6954d4 Oct 01 15:04:52 crc kubenswrapper[4869]: I1001 15:04:52.363603 4869 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 01 15:04:52 crc kubenswrapper[4869]: I1001 15:04:52.364862 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 15:04:52 crc kubenswrapper[4869]: I1001 15:04:52.364906 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 15:04:52 crc kubenswrapper[4869]: I1001 15:04:52.364914 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 15:04:52 crc kubenswrapper[4869]: I1001 15:04:52.364944 4869 kubelet_node_status.go:76] "Attempting to register node" node="crc" Oct 01 15:04:52 crc kubenswrapper[4869]: E1001 15:04:52.365383 4869 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.102.83.30:6443: connect: connection refused" node="crc" Oct 01 15:04:52 crc kubenswrapper[4869]: W1001 15:04:52.426382 4869 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.RuntimeClass: Get "https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0": dial tcp 38.102.83.30:6443: connect: connection refused Oct 01 15:04:52 crc kubenswrapper[4869]: E1001 15:04:52.426470 4869 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.RuntimeClass: failed to list *v1.RuntimeClass: Get \"https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0\": dial tcp 38.102.83.30:6443: connect: connection refused" logger="UnhandledError" Oct 01 15:04:52 crc kubenswrapper[4869]: I1001 15:04:52.486774 4869 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": dial tcp 38.102.83.30:6443: connect: connection refused Oct 01 15:04:52 crc kubenswrapper[4869]: E1001 15:04:52.536086 4869 event.go:368] "Unable to write event (may retry after sleeping)" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/default/events\": dial tcp 38.102.83.30:6443: connect: connection refused" event="&Event{ObjectMeta:{crc.186a664e6e2fb4e2 default 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Node,Namespace:,Name:crc,UID:crc,APIVersion:,ResourceVersion:,FieldPath:,},Reason:Starting,Message:Starting kubelet.,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2025-10-01 15:04:51.483702498 +0000 UTC m=+0.630545654,LastTimestamp:2025-10-01 15:04:51.483702498 +0000 UTC m=+0.630545654,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Oct 01 15:04:52 crc kubenswrapper[4869]: I1001 15:04:52.586197 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"f31ea0cc85a983b63f65f12825bfe40afe5f2767f866aa387017ad179f4ba6d2"} Oct 01 15:04:52 crc kubenswrapper[4869]: I1001 15:04:52.588774 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" event={"ID":"d1b160f5dda77d281dd8e69ec8d817f9","Type":"ContainerStarted","Data":"758f2f35480c7c9253f596b2c0e7761ae9c09672641585bb28852bff2122facd"} Oct 01 15:04:52 crc kubenswrapper[4869]: I1001 15:04:52.590185 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"9d7a48b227d4483899725698f80930fd8aced4a908b5e231b4266f2d46acb0ac"} Oct 01 15:04:52 crc kubenswrapper[4869]: I1001 15:04:52.591625 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"ec5c8ef5b3536699eea2caeff9b68dbce4d2ba2caee8a5e34d23d0707b6954d4"} Oct 01 15:04:52 crc kubenswrapper[4869]: I1001 15:04:52.593315 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"1c96c996c023e5a25144d771cb1db648a95750c3b50d6bd62e1bf5dd4c0a82b2"} Oct 01 15:04:52 crc kubenswrapper[4869]: E1001 15:04:52.894315 4869 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.30:6443: connect: connection refused" interval="1.6s" Oct 01 15:04:52 crc kubenswrapper[4869]: W1001 15:04:52.925140 4869 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.CSIDriver: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0": dial tcp 38.102.83.30:6443: connect: connection refused Oct 01 15:04:52 crc kubenswrapper[4869]: E1001 15:04:52.925249 4869 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.CSIDriver: failed to list *v1.CSIDriver: Get \"https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0\": dial tcp 38.102.83.30:6443: connect: connection refused" logger="UnhandledError" Oct 01 15:04:52 crc kubenswrapper[4869]: W1001 15:04:52.980894 4869 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Service: Get "https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0": dial tcp 38.102.83.30:6443: connect: connection refused Oct 01 15:04:52 crc kubenswrapper[4869]: E1001 15:04:52.981019 4869 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Service: failed to list *v1.Service: Get \"https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0\": dial tcp 38.102.83.30:6443: connect: connection refused" logger="UnhandledError" Oct 01 15:04:53 crc kubenswrapper[4869]: W1001 15:04:53.041929 4869 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Node: Get "https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0": dial tcp 38.102.83.30:6443: connect: connection refused Oct 01 15:04:53 crc kubenswrapper[4869]: E1001 15:04:53.042050 4869 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Node: failed to list *v1.Node: Get \"https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0\": dial tcp 38.102.83.30:6443: connect: connection refused" logger="UnhandledError" Oct 01 15:04:53 crc kubenswrapper[4869]: I1001 15:04:53.165731 4869 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 01 15:04:53 crc kubenswrapper[4869]: I1001 15:04:53.167112 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 15:04:53 crc kubenswrapper[4869]: I1001 15:04:53.167164 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 15:04:53 crc kubenswrapper[4869]: I1001 15:04:53.167180 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 15:04:53 crc kubenswrapper[4869]: I1001 15:04:53.167211 4869 kubelet_node_status.go:76] "Attempting to register node" node="crc" Oct 01 15:04:53 crc kubenswrapper[4869]: E1001 15:04:53.167698 4869 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.102.83.30:6443: connect: connection refused" node="crc" Oct 01 15:04:53 crc kubenswrapper[4869]: I1001 15:04:53.486465 4869 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": dial tcp 38.102.83.30:6443: connect: connection refused Oct 01 15:04:53 crc kubenswrapper[4869]: I1001 15:04:53.599203 4869 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="1119a5821d140830d19c6272faade87f2ef99f2436d1011adad88b5b86d964b0" exitCode=0 Oct 01 15:04:53 crc kubenswrapper[4869]: I1001 15:04:53.599368 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerDied","Data":"1119a5821d140830d19c6272faade87f2ef99f2436d1011adad88b5b86d964b0"} Oct 01 15:04:53 crc kubenswrapper[4869]: I1001 15:04:53.599462 4869 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 01 15:04:53 crc kubenswrapper[4869]: I1001 15:04:53.600440 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 15:04:53 crc kubenswrapper[4869]: I1001 15:04:53.600470 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 15:04:53 crc kubenswrapper[4869]: I1001 15:04:53.600483 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 15:04:53 crc kubenswrapper[4869]: I1001 15:04:53.601495 4869 generic.go:334] "Generic (PLEG): container finished" podID="2139d3e2895fc6797b9c76a1b4c9886d" containerID="a3780feb019844a75aef65177dcce97776f296b691a3834bb1989d3167b2b93a" exitCode=0 Oct 01 15:04:53 crc kubenswrapper[4869]: I1001 15:04:53.601578 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerDied","Data":"a3780feb019844a75aef65177dcce97776f296b691a3834bb1989d3167b2b93a"} Oct 01 15:04:53 crc kubenswrapper[4869]: I1001 15:04:53.601622 4869 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 01 15:04:53 crc kubenswrapper[4869]: I1001 15:04:53.602326 4869 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 01 15:04:53 crc kubenswrapper[4869]: I1001 15:04:53.602626 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 15:04:53 crc kubenswrapper[4869]: I1001 15:04:53.602650 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 15:04:53 crc kubenswrapper[4869]: I1001 15:04:53.602659 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 15:04:53 crc kubenswrapper[4869]: I1001 15:04:53.603397 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 15:04:53 crc kubenswrapper[4869]: I1001 15:04:53.603412 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 15:04:53 crc kubenswrapper[4869]: I1001 15:04:53.603419 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 15:04:53 crc kubenswrapper[4869]: I1001 15:04:53.604920 4869 generic.go:334] "Generic (PLEG): container finished" podID="d1b160f5dda77d281dd8e69ec8d817f9" containerID="bc3127de5c2f8d2cebadf974629298793c15c8203efe52ecaaaf485dd800b1e6" exitCode=0 Oct 01 15:04:53 crc kubenswrapper[4869]: I1001 15:04:53.604955 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" event={"ID":"d1b160f5dda77d281dd8e69ec8d817f9","Type":"ContainerDied","Data":"bc3127de5c2f8d2cebadf974629298793c15c8203efe52ecaaaf485dd800b1e6"} Oct 01 15:04:53 crc kubenswrapper[4869]: I1001 15:04:53.605006 4869 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 01 15:04:53 crc kubenswrapper[4869]: I1001 15:04:53.605660 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 15:04:53 crc kubenswrapper[4869]: I1001 15:04:53.605678 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 15:04:53 crc kubenswrapper[4869]: I1001 15:04:53.605687 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 15:04:53 crc kubenswrapper[4869]: I1001 15:04:53.607649 4869 generic.go:334] "Generic (PLEG): container finished" podID="3dcd261975c3d6b9a6ad6367fd4facd3" containerID="9fcead58cd9a93faf29c7f5e7a6147a331ba4c1be9b73a212a277650dfc3ce89" exitCode=0 Oct 01 15:04:53 crc kubenswrapper[4869]: I1001 15:04:53.607868 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerDied","Data":"9fcead58cd9a93faf29c7f5e7a6147a331ba4c1be9b73a212a277650dfc3ce89"} Oct 01 15:04:53 crc kubenswrapper[4869]: I1001 15:04:53.608061 4869 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 01 15:04:53 crc kubenswrapper[4869]: I1001 15:04:53.608786 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 15:04:53 crc kubenswrapper[4869]: I1001 15:04:53.608811 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 15:04:53 crc kubenswrapper[4869]: I1001 15:04:53.608821 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 15:04:53 crc kubenswrapper[4869]: I1001 15:04:53.612686 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"d08b3c48bda55cb82da1c929cd1dff850852c297b3e8612df25bd27b587c2f42"} Oct 01 15:04:53 crc kubenswrapper[4869]: I1001 15:04:53.612712 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"cd32818a8ac16855cf81d75085471907d2579ba8bc8ab434464787d21e630c86"} Oct 01 15:04:53 crc kubenswrapper[4869]: I1001 15:04:53.612722 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"4d0beef438954056c20003a1a29c1356f1fe50e73db01411b4cf082a2e9b2a7a"} Oct 01 15:04:53 crc kubenswrapper[4869]: I1001 15:04:53.612790 4869 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 01 15:04:53 crc kubenswrapper[4869]: I1001 15:04:53.614195 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 15:04:53 crc kubenswrapper[4869]: I1001 15:04:53.614217 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 15:04:53 crc kubenswrapper[4869]: I1001 15:04:53.614226 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 15:04:54 crc kubenswrapper[4869]: I1001 15:04:54.486542 4869 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": dial tcp 38.102.83.30:6443: connect: connection refused Oct 01 15:04:54 crc kubenswrapper[4869]: E1001 15:04:54.494709 4869 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.30:6443: connect: connection refused" interval="3.2s" Oct 01 15:04:54 crc kubenswrapper[4869]: I1001 15:04:54.620525 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"5cddd04fc8db0c24ce9cefef134d143e9927ec632a6829a402069e740b42a429"} Oct 01 15:04:54 crc kubenswrapper[4869]: I1001 15:04:54.620594 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"c5fc6afde533fd1160a9b445d9835126d2c2b0fb019b4b9d13269e91c5ad1e53"} Oct 01 15:04:54 crc kubenswrapper[4869]: I1001 15:04:54.620612 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"ec14a0b8c122b3f70309112b68f07cdd77057e977ceb5f5a061a1892a93e28d6"} Oct 01 15:04:54 crc kubenswrapper[4869]: I1001 15:04:54.620627 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"5ccef45b15da9c0dc92cfbebfda5fddde05397f6a23946a1c7e9df12b3c275bb"} Oct 01 15:04:54 crc kubenswrapper[4869]: I1001 15:04:54.622329 4869 generic.go:334] "Generic (PLEG): container finished" podID="2139d3e2895fc6797b9c76a1b4c9886d" containerID="98e37f5fb1b61ea07e3f0adef080a5112a9f4a10a2d0b48aa20cdc9fd439d1c3" exitCode=0 Oct 01 15:04:54 crc kubenswrapper[4869]: I1001 15:04:54.622418 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerDied","Data":"98e37f5fb1b61ea07e3f0adef080a5112a9f4a10a2d0b48aa20cdc9fd439d1c3"} Oct 01 15:04:54 crc kubenswrapper[4869]: I1001 15:04:54.622475 4869 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 01 15:04:54 crc kubenswrapper[4869]: I1001 15:04:54.629898 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 15:04:54 crc kubenswrapper[4869]: I1001 15:04:54.629950 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 15:04:54 crc kubenswrapper[4869]: I1001 15:04:54.629966 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 15:04:54 crc kubenswrapper[4869]: I1001 15:04:54.637109 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" event={"ID":"d1b160f5dda77d281dd8e69ec8d817f9","Type":"ContainerStarted","Data":"777fd0bf2221fffd6a82f4a5b11d05e0ac9c4a5bbe11c02d5df9a4d142c9e02e"} Oct 01 15:04:54 crc kubenswrapper[4869]: I1001 15:04:54.637152 4869 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 01 15:04:54 crc kubenswrapper[4869]: I1001 15:04:54.638222 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 15:04:54 crc kubenswrapper[4869]: I1001 15:04:54.638291 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 15:04:54 crc kubenswrapper[4869]: I1001 15:04:54.638310 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 15:04:54 crc kubenswrapper[4869]: I1001 15:04:54.641076 4869 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 01 15:04:54 crc kubenswrapper[4869]: I1001 15:04:54.641068 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"ab024b2efbc4050055475f0858f23aebb4f051fd32959c5a90754abb8a4a175d"} Oct 01 15:04:54 crc kubenswrapper[4869]: I1001 15:04:54.641176 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"9fa70a482c96cd98e3219a807eb5fc58b92fcfb5254c638c028b1fc527b06373"} Oct 01 15:04:54 crc kubenswrapper[4869]: I1001 15:04:54.641204 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"17ad372e18e04389c90df54c3b4c5db1815a57a6a11457cce3f31e956c078f40"} Oct 01 15:04:54 crc kubenswrapper[4869]: I1001 15:04:54.642595 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 15:04:54 crc kubenswrapper[4869]: I1001 15:04:54.642622 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 15:04:54 crc kubenswrapper[4869]: I1001 15:04:54.642631 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 15:04:54 crc kubenswrapper[4869]: I1001 15:04:54.648702 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"d09f0ff6fbdc14dfc60551c4bb2db225cda38354193e9e791335d01d9eb8cece"} Oct 01 15:04:54 crc kubenswrapper[4869]: I1001 15:04:54.648825 4869 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 01 15:04:54 crc kubenswrapper[4869]: I1001 15:04:54.649909 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 15:04:54 crc kubenswrapper[4869]: I1001 15:04:54.649938 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 15:04:54 crc kubenswrapper[4869]: I1001 15:04:54.649948 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 15:04:54 crc kubenswrapper[4869]: I1001 15:04:54.768707 4869 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 01 15:04:54 crc kubenswrapper[4869]: I1001 15:04:54.770509 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 15:04:54 crc kubenswrapper[4869]: I1001 15:04:54.770551 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 15:04:54 crc kubenswrapper[4869]: I1001 15:04:54.770565 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 15:04:54 crc kubenswrapper[4869]: I1001 15:04:54.770589 4869 kubelet_node_status.go:76] "Attempting to register node" node="crc" Oct 01 15:04:54 crc kubenswrapper[4869]: E1001 15:04:54.771767 4869 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.102.83.30:6443: connect: connection refused" node="crc" Oct 01 15:04:54 crc kubenswrapper[4869]: W1001 15:04:54.861967 4869 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Node: Get "https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0": dial tcp 38.102.83.30:6443: connect: connection refused Oct 01 15:04:54 crc kubenswrapper[4869]: E1001 15:04:54.862074 4869 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Node: failed to list *v1.Node: Get \"https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0\": dial tcp 38.102.83.30:6443: connect: connection refused" logger="UnhandledError" Oct 01 15:04:55 crc kubenswrapper[4869]: I1001 15:04:55.656378 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"0de9f530ff8a0069f4718fb3b820fef8a17fe5ee04f641326d42503d5b39956a"} Oct 01 15:04:55 crc kubenswrapper[4869]: I1001 15:04:55.656501 4869 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 01 15:04:55 crc kubenswrapper[4869]: I1001 15:04:55.658296 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 15:04:55 crc kubenswrapper[4869]: I1001 15:04:55.658344 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 15:04:55 crc kubenswrapper[4869]: I1001 15:04:55.658363 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 15:04:55 crc kubenswrapper[4869]: I1001 15:04:55.660902 4869 generic.go:334] "Generic (PLEG): container finished" podID="2139d3e2895fc6797b9c76a1b4c9886d" containerID="6b085e9b0d06b7feed4ef00cd1a9d416d27b12b5c0f6ea184e2b1060651b5599" exitCode=0 Oct 01 15:04:55 crc kubenswrapper[4869]: I1001 15:04:55.660997 4869 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 01 15:04:55 crc kubenswrapper[4869]: I1001 15:04:55.661062 4869 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Oct 01 15:04:55 crc kubenswrapper[4869]: I1001 15:04:55.661092 4869 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 01 15:04:55 crc kubenswrapper[4869]: I1001 15:04:55.661118 4869 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 01 15:04:55 crc kubenswrapper[4869]: I1001 15:04:55.661091 4869 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 01 15:04:55 crc kubenswrapper[4869]: I1001 15:04:55.661154 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerDied","Data":"6b085e9b0d06b7feed4ef00cd1a9d416d27b12b5c0f6ea184e2b1060651b5599"} Oct 01 15:04:55 crc kubenswrapper[4869]: I1001 15:04:55.662608 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 15:04:55 crc kubenswrapper[4869]: I1001 15:04:55.662662 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 15:04:55 crc kubenswrapper[4869]: I1001 15:04:55.662681 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 15:04:55 crc kubenswrapper[4869]: I1001 15:04:55.663253 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 15:04:55 crc kubenswrapper[4869]: I1001 15:04:55.663349 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 15:04:55 crc kubenswrapper[4869]: I1001 15:04:55.663368 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 15:04:55 crc kubenswrapper[4869]: I1001 15:04:55.664685 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 15:04:55 crc kubenswrapper[4869]: I1001 15:04:55.664716 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 15:04:55 crc kubenswrapper[4869]: I1001 15:04:55.664725 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 15:04:55 crc kubenswrapper[4869]: I1001 15:04:55.664742 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 15:04:55 crc kubenswrapper[4869]: I1001 15:04:55.664789 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 15:04:55 crc kubenswrapper[4869]: I1001 15:04:55.664806 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 15:04:56 crc kubenswrapper[4869]: I1001 15:04:56.670541 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"37e3096ae19119bddfcba92c64781768052612b2866030f36d4c7e1d9d08b18c"} Oct 01 15:04:56 crc kubenswrapper[4869]: I1001 15:04:56.670623 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"4c8175743c170287d781790fe2a68f3b89b7a283825b957db31d9f7b2c5b3c9d"} Oct 01 15:04:56 crc kubenswrapper[4869]: I1001 15:04:56.670646 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"5a520e8e800de3b7b1800d14ca00163af5a48a79e7e473a32024cea57d107778"} Oct 01 15:04:56 crc kubenswrapper[4869]: I1001 15:04:56.670770 4869 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 01 15:04:56 crc kubenswrapper[4869]: I1001 15:04:56.670830 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Oct 01 15:04:56 crc kubenswrapper[4869]: I1001 15:04:56.672418 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 15:04:56 crc kubenswrapper[4869]: I1001 15:04:56.672472 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 15:04:56 crc kubenswrapper[4869]: I1001 15:04:56.672490 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 15:04:57 crc kubenswrapper[4869]: I1001 15:04:57.057990 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Oct 01 15:04:57 crc kubenswrapper[4869]: I1001 15:04:57.058217 4869 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 01 15:04:57 crc kubenswrapper[4869]: I1001 15:04:57.059445 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 15:04:57 crc kubenswrapper[4869]: I1001 15:04:57.059477 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 15:04:57 crc kubenswrapper[4869]: I1001 15:04:57.059487 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 15:04:57 crc kubenswrapper[4869]: I1001 15:04:57.679562 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"f8adfce5677840a8db2bdc2854e4ed7aad502f11356d13e6263d6eefe0392eea"} Oct 01 15:04:57 crc kubenswrapper[4869]: I1001 15:04:57.679650 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"6f01b311978d352d5e032adea76ff88ddee759eee0d3085cb5a84832587968de"} Oct 01 15:04:57 crc kubenswrapper[4869]: I1001 15:04:57.679685 4869 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 01 15:04:57 crc kubenswrapper[4869]: I1001 15:04:57.679940 4869 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 01 15:04:57 crc kubenswrapper[4869]: I1001 15:04:57.681327 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 15:04:57 crc kubenswrapper[4869]: I1001 15:04:57.681384 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 15:04:57 crc kubenswrapper[4869]: I1001 15:04:57.681404 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 15:04:57 crc kubenswrapper[4869]: I1001 15:04:57.681461 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 15:04:57 crc kubenswrapper[4869]: I1001 15:04:57.681497 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 15:04:57 crc kubenswrapper[4869]: I1001 15:04:57.681514 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 15:04:57 crc kubenswrapper[4869]: I1001 15:04:57.715531 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Oct 01 15:04:57 crc kubenswrapper[4869]: I1001 15:04:57.972784 4869 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 01 15:04:57 crc kubenswrapper[4869]: I1001 15:04:57.974226 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 15:04:57 crc kubenswrapper[4869]: I1001 15:04:57.974326 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 15:04:57 crc kubenswrapper[4869]: I1001 15:04:57.974346 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 15:04:57 crc kubenswrapper[4869]: I1001 15:04:57.974384 4869 kubelet_node_status.go:76] "Attempting to register node" node="crc" Oct 01 15:04:58 crc kubenswrapper[4869]: I1001 15:04:58.682546 4869 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 01 15:04:58 crc kubenswrapper[4869]: I1001 15:04:58.682664 4869 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 01 15:04:58 crc kubenswrapper[4869]: I1001 15:04:58.684016 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 15:04:58 crc kubenswrapper[4869]: I1001 15:04:58.684067 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 15:04:58 crc kubenswrapper[4869]: I1001 15:04:58.684123 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 15:04:58 crc kubenswrapper[4869]: I1001 15:04:58.684172 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 15:04:58 crc kubenswrapper[4869]: I1001 15:04:58.684206 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 15:04:58 crc kubenswrapper[4869]: I1001 15:04:58.684223 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 15:04:58 crc kubenswrapper[4869]: I1001 15:04:58.753662 4869 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-etcd/etcd-crc" Oct 01 15:04:59 crc kubenswrapper[4869]: I1001 15:04:59.076704 4869 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-apiserver/kube-apiserver-crc" Oct 01 15:04:59 crc kubenswrapper[4869]: I1001 15:04:59.172681 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Oct 01 15:04:59 crc kubenswrapper[4869]: I1001 15:04:59.173029 4869 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 01 15:04:59 crc kubenswrapper[4869]: I1001 15:04:59.174700 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 15:04:59 crc kubenswrapper[4869]: I1001 15:04:59.174752 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 15:04:59 crc kubenswrapper[4869]: I1001 15:04:59.174770 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 15:04:59 crc kubenswrapper[4869]: I1001 15:04:59.181810 4869 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Oct 01 15:04:59 crc kubenswrapper[4869]: I1001 15:04:59.685896 4869 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 01 15:04:59 crc kubenswrapper[4869]: I1001 15:04:59.686336 4869 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 01 15:04:59 crc kubenswrapper[4869]: I1001 15:04:59.687016 4869 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 01 15:04:59 crc kubenswrapper[4869]: I1001 15:04:59.687661 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 15:04:59 crc kubenswrapper[4869]: I1001 15:04:59.687712 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 15:04:59 crc kubenswrapper[4869]: I1001 15:04:59.687731 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 15:04:59 crc kubenswrapper[4869]: I1001 15:04:59.687887 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 15:04:59 crc kubenswrapper[4869]: I1001 15:04:59.687943 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 15:04:59 crc kubenswrapper[4869]: I1001 15:04:59.687966 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 15:04:59 crc kubenswrapper[4869]: I1001 15:04:59.688320 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 15:04:59 crc kubenswrapper[4869]: I1001 15:04:59.688359 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 15:04:59 crc kubenswrapper[4869]: I1001 15:04:59.688380 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 15:05:01 crc kubenswrapper[4869]: I1001 15:05:01.373378 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Oct 01 15:05:01 crc kubenswrapper[4869]: I1001 15:05:01.373600 4869 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 01 15:05:01 crc kubenswrapper[4869]: I1001 15:05:01.375219 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 15:05:01 crc kubenswrapper[4869]: I1001 15:05:01.375349 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 15:05:01 crc kubenswrapper[4869]: I1001 15:05:01.375367 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 15:05:01 crc kubenswrapper[4869]: E1001 15:05:01.668855 4869 eviction_manager.go:285] "Eviction manager: failed to get summary stats" err="failed to get node info: node \"crc\" not found" Oct 01 15:05:01 crc kubenswrapper[4869]: I1001 15:05:01.905796 4869 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Oct 01 15:05:01 crc kubenswrapper[4869]: I1001 15:05:01.905936 4869 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 01 15:05:01 crc kubenswrapper[4869]: I1001 15:05:01.907975 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 15:05:01 crc kubenswrapper[4869]: I1001 15:05:01.908035 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 15:05:01 crc kubenswrapper[4869]: I1001 15:05:01.908052 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 15:05:01 crc kubenswrapper[4869]: I1001 15:05:01.913967 4869 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Oct 01 15:05:02 crc kubenswrapper[4869]: I1001 15:05:02.182183 4869 patch_prober.go:28] interesting pod/kube-controller-manager-crc container/cluster-policy-controller namespace/openshift-kube-controller-manager: Startup probe status=failure output="Get \"https://192.168.126.11:10357/healthz\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" start-of-body= Oct 01 15:05:02 crc kubenswrapper[4869]: I1001 15:05:02.182380 4869 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-controller-manager/kube-controller-manager-crc" podUID="f614b9022728cf315e60c057852e563e" containerName="cluster-policy-controller" probeResult="failure" output="Get \"https://192.168.126.11:10357/healthz\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Oct 01 15:05:02 crc kubenswrapper[4869]: I1001 15:05:02.333677 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-etcd/etcd-crc" Oct 01 15:05:02 crc kubenswrapper[4869]: I1001 15:05:02.334121 4869 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 01 15:05:02 crc kubenswrapper[4869]: I1001 15:05:02.336084 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 15:05:02 crc kubenswrapper[4869]: I1001 15:05:02.336148 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 15:05:02 crc kubenswrapper[4869]: I1001 15:05:02.336168 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 15:05:02 crc kubenswrapper[4869]: I1001 15:05:02.700089 4869 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 01 15:05:02 crc kubenswrapper[4869]: I1001 15:05:02.705307 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 15:05:02 crc kubenswrapper[4869]: I1001 15:05:02.705383 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 15:05:02 crc kubenswrapper[4869]: I1001 15:05:02.705403 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 15:05:02 crc kubenswrapper[4869]: I1001 15:05:02.707241 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Oct 01 15:05:03 crc kubenswrapper[4869]: I1001 15:05:03.702635 4869 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 01 15:05:03 crc kubenswrapper[4869]: I1001 15:05:03.703867 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 15:05:03 crc kubenswrapper[4869]: I1001 15:05:03.703914 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 15:05:03 crc kubenswrapper[4869]: I1001 15:05:03.703927 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 15:05:04 crc kubenswrapper[4869]: I1001 15:05:04.903320 4869 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver-check-endpoints namespace/openshift-kube-apiserver: Readiness probe status=failure output="Get \"https://192.168.126.11:17697/healthz\": dial tcp 192.168.126.11:17697: connect: connection refused" start-of-body= Oct 01 15:05:04 crc kubenswrapper[4869]: I1001 15:05:04.903408 4869 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" probeResult="failure" output="Get \"https://192.168.126.11:17697/healthz\": dial tcp 192.168.126.11:17697: connect: connection refused" Oct 01 15:05:05 crc kubenswrapper[4869]: W1001 15:05:05.094515 4869 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.CSIDriver: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0": net/http: TLS handshake timeout Oct 01 15:05:05 crc kubenswrapper[4869]: I1001 15:05:05.094631 4869 trace.go:236] Trace[756909647]: "Reflector ListAndWatch" name:k8s.io/client-go/informers/factory.go:160 (01-Oct-2025 15:04:55.093) (total time: 10001ms): Oct 01 15:05:05 crc kubenswrapper[4869]: Trace[756909647]: ---"Objects listed" error:Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0": net/http: TLS handshake timeout 10001ms (15:05:05.094) Oct 01 15:05:05 crc kubenswrapper[4869]: Trace[756909647]: [10.001250507s] [10.001250507s] END Oct 01 15:05:05 crc kubenswrapper[4869]: E1001 15:05:05.094663 4869 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.CSIDriver: failed to list *v1.CSIDriver: Get \"https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0\": net/http: TLS handshake timeout" logger="UnhandledError" Oct 01 15:05:05 crc kubenswrapper[4869]: W1001 15:05:05.478485 4869 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Service: Get "https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0": net/http: TLS handshake timeout Oct 01 15:05:05 crc kubenswrapper[4869]: I1001 15:05:05.478626 4869 trace.go:236] Trace[1951531480]: "Reflector ListAndWatch" name:k8s.io/client-go/informers/factory.go:160 (01-Oct-2025 15:04:55.477) (total time: 10001ms): Oct 01 15:05:05 crc kubenswrapper[4869]: Trace[1951531480]: ---"Objects listed" error:Get "https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0": net/http: TLS handshake timeout 10001ms (15:05:05.478) Oct 01 15:05:05 crc kubenswrapper[4869]: Trace[1951531480]: [10.001301659s] [10.001301659s] END Oct 01 15:05:05 crc kubenswrapper[4869]: E1001 15:05:05.478658 4869 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Service: failed to list *v1.Service: Get \"https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0\": net/http: TLS handshake timeout" logger="UnhandledError" Oct 01 15:05:05 crc kubenswrapper[4869]: I1001 15:05:05.487249 4869 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": net/http: TLS handshake timeout Oct 01 15:05:05 crc kubenswrapper[4869]: W1001 15:05:05.570436 4869 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.RuntimeClass: Get "https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0": net/http: TLS handshake timeout Oct 01 15:05:05 crc kubenswrapper[4869]: I1001 15:05:05.570575 4869 trace.go:236] Trace[474438054]: "Reflector ListAndWatch" name:k8s.io/client-go/informers/factory.go:160 (01-Oct-2025 15:04:55.567) (total time: 10003ms): Oct 01 15:05:05 crc kubenswrapper[4869]: Trace[474438054]: ---"Objects listed" error:Get "https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0": net/http: TLS handshake timeout 10003ms (15:05:05.570) Oct 01 15:05:05 crc kubenswrapper[4869]: Trace[474438054]: [10.003244571s] [10.003244571s] END Oct 01 15:05:05 crc kubenswrapper[4869]: E1001 15:05:05.570613 4869 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.RuntimeClass: failed to list *v1.RuntimeClass: Get \"https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0\": net/http: TLS handshake timeout" logger="UnhandledError" Oct 01 15:05:05 crc kubenswrapper[4869]: I1001 15:05:05.762987 4869 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver namespace/openshift-kube-apiserver: Startup probe status=failure output="HTTP probe failed with statuscode: 403" start-of-body={"kind":"Status","apiVersion":"v1","metadata":{},"status":"Failure","message":"forbidden: User \"system:anonymous\" cannot get path \"/livez\"","reason":"Forbidden","details":{},"code":403} Oct 01 15:05:05 crc kubenswrapper[4869]: I1001 15:05:05.763064 4869 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" probeResult="failure" output="HTTP probe failed with statuscode: 403" Oct 01 15:05:05 crc kubenswrapper[4869]: I1001 15:05:05.780519 4869 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver namespace/openshift-kube-apiserver: Startup probe status=failure output="HTTP probe failed with statuscode: 403" start-of-body={"kind":"Status","apiVersion":"v1","metadata":{},"status":"Failure","message":"forbidden: User \"system:anonymous\" cannot get path \"/livez\": RBAC: [clusterrole.rbac.authorization.k8s.io \"system:public-info-viewer\" not found, clusterrole.rbac.authorization.k8s.io \"system:openshift:public-info-viewer\" not found]","reason":"Forbidden","details":{},"code":403} Oct 01 15:05:05 crc kubenswrapper[4869]: I1001 15:05:05.780587 4869 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" probeResult="failure" output="HTTP probe failed with statuscode: 403" Oct 01 15:05:08 crc kubenswrapper[4869]: I1001 15:05:08.798104 4869 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-etcd/etcd-crc" Oct 01 15:05:08 crc kubenswrapper[4869]: I1001 15:05:08.798976 4869 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 01 15:05:08 crc kubenswrapper[4869]: I1001 15:05:08.801178 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 15:05:08 crc kubenswrapper[4869]: I1001 15:05:08.801309 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 15:05:08 crc kubenswrapper[4869]: I1001 15:05:08.801329 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 15:05:08 crc kubenswrapper[4869]: I1001 15:05:08.823517 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-etcd/etcd-crc" Oct 01 15:05:08 crc kubenswrapper[4869]: I1001 15:05:08.883481 4869 reflector.go:368] Caches populated for *v1.RuntimeClass from k8s.io/client-go/informers/factory.go:160 Oct 01 15:05:09 crc kubenswrapper[4869]: I1001 15:05:09.084643 4869 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-apiserver/kube-apiserver-crc" Oct 01 15:05:09 crc kubenswrapper[4869]: I1001 15:05:09.084881 4869 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 01 15:05:09 crc kubenswrapper[4869]: I1001 15:05:09.086565 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 15:05:09 crc kubenswrapper[4869]: I1001 15:05:09.086683 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 15:05:09 crc kubenswrapper[4869]: I1001 15:05:09.086711 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 15:05:09 crc kubenswrapper[4869]: I1001 15:05:09.092453 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-apiserver/kube-apiserver-crc" Oct 01 15:05:09 crc kubenswrapper[4869]: I1001 15:05:09.718713 4869 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 01 15:05:09 crc kubenswrapper[4869]: I1001 15:05:09.718721 4869 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 01 15:05:09 crc kubenswrapper[4869]: I1001 15:05:09.720030 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 15:05:09 crc kubenswrapper[4869]: I1001 15:05:09.720091 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 15:05:09 crc kubenswrapper[4869]: I1001 15:05:09.720103 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 15:05:09 crc kubenswrapper[4869]: I1001 15:05:09.720450 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 15:05:09 crc kubenswrapper[4869]: I1001 15:05:09.720526 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 15:05:09 crc kubenswrapper[4869]: I1001 15:05:09.720549 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 15:05:09 crc kubenswrapper[4869]: I1001 15:05:09.770792 4869 reflector.go:368] Caches populated for *v1.CSIDriver from k8s.io/client-go/informers/factory.go:160 Oct 01 15:05:10 crc kubenswrapper[4869]: I1001 15:05:10.280411 4869 reflector.go:368] Caches populated for *v1.Service from k8s.io/client-go/informers/factory.go:160 Oct 01 15:05:10 crc kubenswrapper[4869]: E1001 15:05:10.771822 4869 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": context deadline exceeded" interval="6.4s" Oct 01 15:05:10 crc kubenswrapper[4869]: I1001 15:05:10.774534 4869 reflector.go:368] Caches populated for *v1.Node from k8s.io/client-go/informers/factory.go:160 Oct 01 15:05:10 crc kubenswrapper[4869]: E1001 15:05:10.777447 4869 kubelet_node_status.go:99] "Unable to register node with API server" err="nodes \"crc\" is forbidden: autoscaling.openshift.io/ManagedNode infra config cache not synchronized" node="crc" Oct 01 15:05:10 crc kubenswrapper[4869]: I1001 15:05:10.778520 4869 reconstruct.go:205] "DevicePaths of reconstructed volumes updated" Oct 01 15:05:10 crc kubenswrapper[4869]: I1001 15:05:10.850448 4869 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver-check-endpoints namespace/openshift-kube-apiserver: Liveness probe status=failure output="Get \"https://192.168.126.11:17697/healthz\": read tcp 192.168.126.11:56296->192.168.126.11:17697: read: connection reset by peer" start-of-body= Oct 01 15:05:10 crc kubenswrapper[4869]: I1001 15:05:10.850544 4869 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" probeResult="failure" output="Get \"https://192.168.126.11:17697/healthz\": read tcp 192.168.126.11:56296->192.168.126.11:17697: read: connection reset by peer" Oct 01 15:05:10 crc kubenswrapper[4869]: I1001 15:05:10.850448 4869 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver-check-endpoints namespace/openshift-kube-apiserver: Readiness probe status=failure output="Get \"https://192.168.126.11:17697/healthz\": read tcp 192.168.126.11:43712->192.168.126.11:17697: read: connection reset by peer" start-of-body= Oct 01 15:05:10 crc kubenswrapper[4869]: I1001 15:05:10.850622 4869 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" probeResult="failure" output="Get \"https://192.168.126.11:17697/healthz\": read tcp 192.168.126.11:43712->192.168.126.11:17697: read: connection reset by peer" Oct 01 15:05:10 crc kubenswrapper[4869]: I1001 15:05:10.851023 4869 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver-check-endpoints namespace/openshift-kube-apiserver: Readiness probe status=failure output="Get \"https://192.168.126.11:17697/healthz\": dial tcp 192.168.126.11:17697: connect: connection refused" start-of-body= Oct 01 15:05:10 crc kubenswrapper[4869]: I1001 15:05:10.851078 4869 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" probeResult="failure" output="Get \"https://192.168.126.11:17697/healthz\": dial tcp 192.168.126.11:17697: connect: connection refused" Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.216839 4869 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.221466 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.477847 4869 apiserver.go:52] "Watching apiserver" Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.482024 4869 reflector.go:368] Caches populated for *v1.Pod from pkg/kubelet/config/apiserver.go:66 Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.482312 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-network-console/networking-console-plugin-85b44fc459-gdk6g","openshift-network-diagnostics/network-check-source-55646444c4-trplf","openshift-network-diagnostics/network-check-target-xd92c","openshift-network-node-identity/network-node-identity-vrzqb","openshift-network-operator/iptables-alerter-4ln5h","openshift-network-operator/network-operator-58b4c7f79c-55gtf","openshift-image-registry/node-ca-528sf","openshift-kube-controller-manager/kube-controller-manager-crc"] Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.482600 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.482966 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.483020 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/node-ca-528sf" Oct 01 15:05:11 crc kubenswrapper[4869]: E1001 15:05:11.483084 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.483145 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/iptables-alerter-4ln5h" Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.483330 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.483379 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 01 15:05:11 crc kubenswrapper[4869]: E1001 15:05:11.483455 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.483512 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-node-identity/network-node-identity-vrzqb" Oct 01 15:05:11 crc kubenswrapper[4869]: E1001 15:05:11.483540 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.484697 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-operator"/"metrics-tls" Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.485011 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"kube-root-ca.crt" Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.485159 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"iptables-alerter-script" Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.485335 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"openshift-service-ca.crt" Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.485567 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"kube-root-ca.crt" Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.485786 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"ovnkube-identity-cm" Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.486273 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"openshift-service-ca.crt" Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.486387 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"env-overrides" Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.486489 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"node-ca-dockercfg-4777p" Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.486590 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"image-registry-certificates" Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.486763 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-node-identity"/"network-node-identity-cert" Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.486886 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"kube-root-ca.crt" Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.488704 4869 desired_state_of_world_populator.go:154] "Finished populating initial desired state of world" Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.488977 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"openshift-service-ca.crt" Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.512088 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T15:05:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T15:05:11Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.526503 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"51d3e7b3-811a-42d8-a711-abf28a181753\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T15:04:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T15:04:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T15:05:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T15:05:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T15:04:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cd32818a8ac16855cf81d75085471907d2579ba8bc8ab434464787d21e630c86\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T15:04:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4d0beef438954056c20003a1a29c1356f1fe50e73db01411b4cf082a2e9b2a7a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T15:04:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d08b3c48bda55cb82da1c929cd1dff850852c297b3e8612df25bd27b587c2f42\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T15:04:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d09f0ff6fbdc14dfc60551c4bb2db225cda38354193e9e791335d01d9eb8cece\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T15:04:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T15:04:51Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.537367 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T15:05:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T15:05:11Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.549419 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T15:05:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T15:05:11Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.561557 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T15:05:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T15:05:11Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.571356 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T15:05:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T15:05:11Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.581703 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T15:05:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T15:05:11Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.582502 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities\") pod \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\" (UID: \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\") " Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.582541 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.582567 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qs4fp\" (UniqueName: \"kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp\") pod \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\" (UID: \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\") " Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.582590 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-sb6h7\" (UniqueName: \"kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.582611 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mnrrd\" (UniqueName: \"kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd\") pod \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\" (UID: \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\") " Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.582636 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.582658 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content\") pod \"57a731c4-ef35-47a8-b875-bfb08a7f8011\" (UID: \"57a731c4-ef35-47a8-b875-bfb08a7f8011\") " Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.582680 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls\") pod \"6731426b-95fe-49ff-bb5f-40441049fde2\" (UID: \"6731426b-95fe-49ff-bb5f-40441049fde2\") " Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.582704 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.582724 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.582744 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x2m85\" (UniqueName: \"kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85\") pod \"cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d\" (UID: \"cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d\") " Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.582765 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert\") pod \"e7e6199b-1264-4501-8953-767f51328d08\" (UID: \"e7e6199b-1264-4501-8953-767f51328d08\") " Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.582793 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.582814 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.582834 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.582886 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert\") pod \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\" (UID: \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\") " Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.582890 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp" (OuterVolumeSpecName: "kube-api-access-qs4fp") pod "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" (UID: "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c"). InnerVolumeSpecName "kube-api-access-qs4fp". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.582910 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.582935 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x4zgh\" (UniqueName: \"kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh\") pod \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\" (UID: \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\") " Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.582957 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.582978 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.583000 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.583025 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xcgwh\" (UniqueName: \"kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh\") pod \"fda69060-fa79-4696-b1a6-7980f124bf7c\" (UID: \"fda69060-fa79-4696-b1a6-7980f124bf7c\") " Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.583052 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.583080 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.583109 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9xfj7\" (UniqueName: \"kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7\") pod \"5225d0e4-402f-4861-b410-819f433b1803\" (UID: \"5225d0e4-402f-4861-b410-819f433b1803\") " Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.583136 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.583157 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-s4n52\" (UniqueName: \"kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.583185 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities\") pod \"1d611f23-29be-4491-8495-bee1670e935f\" (UID: \"1d611f23-29be-4491-8495-bee1670e935f\") " Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.583191 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd" (OuterVolumeSpecName: "kube-api-access-mnrrd") pod "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" (UID: "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d"). InnerVolumeSpecName "kube-api-access-mnrrd". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.583207 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config\") pod \"4bb40260-dbaa-4fb0-84df-5e680505d512\" (UID: \"4bb40260-dbaa-4fb0-84df-5e680505d512\") " Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.583227 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.583247 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w9rds\" (UniqueName: \"kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds\") pod \"20b0d48f-5fd6-431c-a545-e3c800c7b866\" (UID: \"20b0d48f-5fd6-431c-a545-e3c800c7b866\") " Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.583250 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities" (OuterVolumeSpecName: "utilities") pod "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" (UID: "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.583289 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pjr6v\" (UniqueName: \"kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v\") pod \"49ef4625-1d3a-4a9f-b595-c2433d32326d\" (UID: \"49ef4625-1d3a-4a9f-b595-c2433d32326d\") " Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.583312 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume\") pod \"87cf06ed-a83f-41a7-828d-70653580a8cb\" (UID: \"87cf06ed-a83f-41a7-828d-70653580a8cb\") " Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.583338 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.583362 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7" (OuterVolumeSpecName: "kube-api-access-sb6h7") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "kube-api-access-sb6h7". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.583368 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-279lb\" (UniqueName: \"kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb\") pod \"7bb08738-c794-4ee8-9972-3a62ca171029\" (UID: \"7bb08738-c794-4ee8-9972-3a62ca171029\") " Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.583390 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.583410 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.583430 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7c4vf\" (UniqueName: \"kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.583452 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config\") pod \"7539238d-5fe0-46ed-884e-1c3b566537ec\" (UID: \"7539238d-5fe0-46ed-884e-1c3b566537ec\") " Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.583473 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs\") pod \"5b88f790-22fa-440e-b583-365168c0b23d\" (UID: \"5b88f790-22fa-440e-b583-365168c0b23d\") " Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.583471 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls" (OuterVolumeSpecName: "metrics-tls") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "metrics-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.583482 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "e7e6199b-1264-4501-8953-767f51328d08" (UID: "e7e6199b-1264-4501-8953-767f51328d08"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.583497 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2w9zh\" (UniqueName: \"kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh\") pod \"4bb40260-dbaa-4fb0-84df-5e680505d512\" (UID: \"4bb40260-dbaa-4fb0-84df-5e680505d512\") " Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.583510 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.583520 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-d4lsv\" (UniqueName: \"kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv\") pod \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\" (UID: \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\") " Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.583551 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pj782\" (UniqueName: \"kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782\") pod \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\" (UID: \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\") " Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.583573 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.583619 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access\") pod \"496e6271-fb68-4057-954e-a0d97a4afa3f\" (UID: \"496e6271-fb68-4057-954e-a0d97a4afa3f\") " Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.583639 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert\") pod \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\" (UID: \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\") " Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.583659 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls" (OuterVolumeSpecName: "control-plane-machine-set-operator-tls") pod "6731426b-95fe-49ff-bb5f-40441049fde2" (UID: "6731426b-95fe-49ff-bb5f-40441049fde2"). InnerVolumeSpecName "control-plane-machine-set-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.583662 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fcqwp\" (UniqueName: \"kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp\") pod \"5fe579f8-e8a6-4643-bce5-a661393c4dde\" (UID: \"5fe579f8-e8a6-4643-bce5-a661393c4dde\") " Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.583693 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.583712 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mg5zb\" (UniqueName: \"kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.583729 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.583744 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.583760 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.583777 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.583796 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w4xd4\" (UniqueName: \"kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4\") pod \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\" (UID: \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\") " Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.583814 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jhbk2\" (UniqueName: \"kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2\") pod \"bd23aa5c-e532-4e53-bccf-e79f130c5ae8\" (UID: \"bd23aa5c-e532-4e53-bccf-e79f130c5ae8\") " Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.583835 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.583859 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rnphk\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.583874 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.583889 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.583890 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config" (OuterVolumeSpecName: "config") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.583892 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig" (OuterVolumeSpecName: "v4-0-config-system-cliconfig") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-cliconfig". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.583904 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs\") pod \"5fe579f8-e8a6-4643-bce5-a661393c4dde\" (UID: \"5fe579f8-e8a6-4643-bce5-a661393c4dde\") " Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.583951 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nzwt7\" (UniqueName: \"kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7\") pod \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\" (UID: \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\") " Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.583979 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.584005 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy\") pod \"7bb08738-c794-4ee8-9972-3a62ca171029\" (UID: \"7bb08738-c794-4ee8-9972-3a62ca171029\") " Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.584029 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert\") pod \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\" (UID: \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\") " Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.584055 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.584080 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs\") pod \"efdd0498-1daa-4136-9a4a-3b948c2293fc\" (UID: \"efdd0498-1daa-4136-9a4a-3b948c2293fc\") " Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.584106 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert\") pod \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\" (UID: \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\") " Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.584127 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.584153 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.584179 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities\") pod \"5225d0e4-402f-4861-b410-819f433b1803\" (UID: \"5225d0e4-402f-4861-b410-819f433b1803\") " Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.584200 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w7l8j\" (UniqueName: \"kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j\") pod \"01ab3dd5-8196-46d0-ad33-122e2ca51def\" (UID: \"01ab3dd5-8196-46d0-ad33-122e2ca51def\") " Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.584227 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.584250 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert\") pod \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\" (UID: \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\") " Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.584289 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key\") pod \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\" (UID: \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\") " Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.584319 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wxkg8\" (UniqueName: \"kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8\") pod \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\" (UID: \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\") " Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.584343 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert\") pod \"20b0d48f-5fd6-431c-a545-e3c800c7b866\" (UID: \"20b0d48f-5fd6-431c-a545-e3c800c7b866\") " Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.584365 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.584393 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca\") pod \"0b78653f-4ff9-4508-8672-245ed9b561e3\" (UID: \"0b78653f-4ff9-4508-8672-245ed9b561e3\") " Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.584416 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.584442 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert\") pod \"496e6271-fb68-4057-954e-a0d97a4afa3f\" (UID: \"496e6271-fb68-4057-954e-a0d97a4afa3f\") " Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.584468 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.584492 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.584518 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xcphl\" (UniqueName: \"kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.584542 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls\") pod \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\" (UID: \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\") " Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.584566 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.584590 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6ccd8\" (UniqueName: \"kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.584688 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.584715 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8tdtz\" (UniqueName: \"kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.584738 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.584762 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access\") pod \"0b78653f-4ff9-4508-8672-245ed9b561e3\" (UID: \"0b78653f-4ff9-4508-8672-245ed9b561e3\") " Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.584796 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.584819 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.584843 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle\") pod \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\" (UID: \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\") " Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.584866 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.584906 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.584931 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert\") pod \"0b78653f-4ff9-4508-8672-245ed9b561e3\" (UID: \"0b78653f-4ff9-4508-8672-245ed9b561e3\") " Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.584955 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vt5rc\" (UniqueName: \"kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc\") pod \"44663579-783b-4372-86d6-acf235a62d72\" (UID: \"44663579-783b-4372-86d6-acf235a62d72\") " Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.584981 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.585007 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.585029 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config\") pod \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\" (UID: \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\") " Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.585051 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.585075 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy\") pod \"4bb40260-dbaa-4fb0-84df-5e680505d512\" (UID: \"4bb40260-dbaa-4fb0-84df-5e680505d512\") " Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.585098 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.585121 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config\") pod \"496e6271-fb68-4057-954e-a0d97a4afa3f\" (UID: \"496e6271-fb68-4057-954e-a0d97a4afa3f\") " Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.585145 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.585171 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-249nr\" (UniqueName: \"kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr\") pod \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\" (UID: \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\") " Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.585198 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.585223 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.585246 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.585288 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kfwg7\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.585312 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.585338 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content\") pod \"5225d0e4-402f-4861-b410-819f433b1803\" (UID: \"5225d0e4-402f-4861-b410-819f433b1803\") " Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.585365 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert\") pod \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\" (UID: \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\") " Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.585390 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config\") pod \"01ab3dd5-8196-46d0-ad33-122e2ca51def\" (UID: \"01ab3dd5-8196-46d0-ad33-122e2ca51def\") " Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.585414 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.585440 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access\") pod \"e7e6199b-1264-4501-8953-767f51328d08\" (UID: \"e7e6199b-1264-4501-8953-767f51328d08\") " Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.585462 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config\") pod \"fda69060-fa79-4696-b1a6-7980f124bf7c\" (UID: \"fda69060-fa79-4696-b1a6-7980f124bf7c\") " Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.585504 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fqsjt\" (UniqueName: \"kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt\") pod \"efdd0498-1daa-4136-9a4a-3b948c2293fc\" (UID: \"efdd0498-1daa-4136-9a4a-3b948c2293fc\") " Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.585530 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jkwtn\" (UniqueName: \"kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn\") pod \"5b88f790-22fa-440e-b583-365168c0b23d\" (UID: \"5b88f790-22fa-440e-b583-365168c0b23d\") " Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.585554 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token\") pod \"5fe579f8-e8a6-4643-bce5-a661393c4dde\" (UID: \"5fe579f8-e8a6-4643-bce5-a661393c4dde\") " Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.585579 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config\") pod \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\" (UID: \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\") " Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.585603 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert\") pod \"01ab3dd5-8196-46d0-ad33-122e2ca51def\" (UID: \"01ab3dd5-8196-46d0-ad33-122e2ca51def\") " Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.585630 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.585655 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dbsvg\" (UniqueName: \"kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg\") pod \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\" (UID: \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\") " Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.585681 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lz9wn\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.585708 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.585733 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cfbct\" (UniqueName: \"kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct\") pod \"57a731c4-ef35-47a8-b875-bfb08a7f8011\" (UID: \"57a731c4-ef35-47a8-b875-bfb08a7f8011\") " Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.585758 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zgdk5\" (UniqueName: \"kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.585782 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pcxfs\" (UniqueName: \"kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.585805 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.585831 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-v47cf\" (UniqueName: \"kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.585856 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert\") pod \"7539238d-5fe0-46ed-884e-1c3b566537ec\" (UID: \"7539238d-5fe0-46ed-884e-1c3b566537ec\") " Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.585879 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.585905 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2d4wz\" (UniqueName: \"kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.585943 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.585970 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.585994 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.586017 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls\") pod \"fda69060-fa79-4696-b1a6-7980f124bf7c\" (UID: \"fda69060-fa79-4696-b1a6-7980f124bf7c\") " Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.586081 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.586117 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert\") pod \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\" (UID: \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\") " Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.586141 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-d6qdx\" (UniqueName: \"kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx\") pod \"87cf06ed-a83f-41a7-828d-70653580a8cb\" (UID: \"87cf06ed-a83f-41a7-828d-70653580a8cb\") " Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.583908 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp" (OuterVolumeSpecName: "kube-api-access-fcqwp") pod "5fe579f8-e8a6-4643-bce5-a661393c4dde" (UID: "5fe579f8-e8a6-4643-bce5-a661393c4dde"). InnerVolumeSpecName "kube-api-access-fcqwp". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.586166 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config\") pod \"e7e6199b-1264-4501-8953-767f51328d08\" (UID: \"e7e6199b-1264-4501-8953-767f51328d08\") " Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.584031 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs" (OuterVolumeSpecName: "certs") pod "5fe579f8-e8a6-4643-bce5-a661393c4dde" (UID: "5fe579f8-e8a6-4643-bce5-a661393c4dde"). InnerVolumeSpecName "certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.584041 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85" (OuterVolumeSpecName: "kube-api-access-x2m85") pod "cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d" (UID: "cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d"). InnerVolumeSpecName "kube-api-access-x2m85". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.584080 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.584147 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.584269 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs" (OuterVolumeSpecName: "metrics-certs") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "metrics-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.584283 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds" (OuterVolumeSpecName: "kube-api-access-w9rds") pod "20b0d48f-5fd6-431c-a545-e3c800c7b866" (UID: "20b0d48f-5fd6-431c-a545-e3c800c7b866"). InnerVolumeSpecName "kube-api-access-w9rds". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.584292 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb" (OuterVolumeSpecName: "kube-api-access-mg5zb") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "kube-api-access-mg5zb". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.584456 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v" (OuterVolumeSpecName: "kube-api-access-pjr6v") pod "49ef4625-1d3a-4a9f-b595-c2433d32326d" (UID: "49ef4625-1d3a-4a9f-b595-c2433d32326d"). InnerVolumeSpecName "kube-api-access-pjr6v". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.584609 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.584745 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.584854 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle" (OuterVolumeSpecName: "service-ca-bundle") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "service-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.584879 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" (UID: "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.584935 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume" (OuterVolumeSpecName: "config-volume") pod "87cf06ed-a83f-41a7-828d-70653580a8cb" (UID: "87cf06ed-a83f-41a7-828d-70653580a8cb"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.586268 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk" (OuterVolumeSpecName: "kube-api-access-rnphk") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "kube-api-access-rnphk". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.585095 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca" (OuterVolumeSpecName: "etcd-ca") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "etcd-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.585109 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.585183 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config" (OuterVolumeSpecName: "auth-proxy-config") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.585217 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh" (OuterVolumeSpecName: "kube-api-access-x4zgh") pod "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" (UID: "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d"). InnerVolumeSpecName "kube-api-access-x4zgh". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.585306 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb" (OuterVolumeSpecName: "kube-api-access-279lb") pod "7bb08738-c794-4ee8-9972-3a62ca171029" (UID: "7bb08738-c794-4ee8-9972-3a62ca171029"). InnerVolumeSpecName "kube-api-access-279lb". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.585476 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.585541 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config" (OuterVolumeSpecName: "auth-proxy-config") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.585683 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert" (OuterVolumeSpecName: "ovn-control-plane-metrics-cert") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "ovn-control-plane-metrics-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.585811 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config" (OuterVolumeSpecName: "config") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.585885 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4" (OuterVolumeSpecName: "kube-api-access-w4xd4") pod "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" (UID: "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b"). InnerVolumeSpecName "kube-api-access-w4xd4". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.586067 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2" (OuterVolumeSpecName: "kube-api-access-jhbk2") pod "bd23aa5c-e532-4e53-bccf-e79f130c5ae8" (UID: "bd23aa5c-e532-4e53-bccf-e79f130c5ae8"). InnerVolumeSpecName "kube-api-access-jhbk2". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.586129 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv" (OuterVolumeSpecName: "kube-api-access-d4lsv") pod "25e176fe-21b4-4974-b1ed-c8b94f112a7f" (UID: "25e176fe-21b4-4974-b1ed-c8b94f112a7f"). InnerVolumeSpecName "kube-api-access-d4lsv". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.586229 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config" (OuterVolumeSpecName: "encryption-config") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "encryption-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.586365 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf" (OuterVolumeSpecName: "kube-api-access-7c4vf") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "kube-api-access-7c4vf". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.587517 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7" (OuterVolumeSpecName: "kube-api-access-nzwt7") pod "96b93a3a-6083-4aea-8eab-fe1aa8245ad9" (UID: "96b93a3a-6083-4aea-8eab-fe1aa8245ad9"). InnerVolumeSpecName "kube-api-access-nzwt7". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.589838 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782" (OuterVolumeSpecName: "kube-api-access-pj782") pod "b6cd30de-2eeb-49a2-ab40-9167f4560ff5" (UID: "b6cd30de-2eeb-49a2-ab40-9167f4560ff5"). InnerVolumeSpecName "kube-api-access-pj782". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.590434 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "0b78653f-4ff9-4508-8672-245ed9b561e3" (UID: "0b78653f-4ff9-4508-8672-245ed9b561e3"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.590505 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh" (OuterVolumeSpecName: "kube-api-access-2w9zh") pod "4bb40260-dbaa-4fb0-84df-5e680505d512" (UID: "4bb40260-dbaa-4fb0-84df-5e680505d512"). InnerVolumeSpecName "kube-api-access-2w9zh". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.590510 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52" (OuterVolumeSpecName: "kube-api-access-s4n52") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "kube-api-access-s4n52". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.591046 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities" (OuterVolumeSpecName: "utilities") pod "1d611f23-29be-4491-8495-bee1670e935f" (UID: "1d611f23-29be-4491-8495-bee1670e935f"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.591202 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert" (OuterVolumeSpecName: "srv-cert") pod "b6312bbd-5731-4ea0-a20f-81d5a57df44a" (UID: "b6312bbd-5731-4ea0-a20f-81d5a57df44a"). InnerVolumeSpecName "srv-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.591308 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca" (OuterVolumeSpecName: "service-ca") pod "0b78653f-4ff9-4508-8672-245ed9b561e3" (UID: "0b78653f-4ff9-4508-8672-245ed9b561e3"). InnerVolumeSpecName "service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.591350 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert" (OuterVolumeSpecName: "oauth-serving-cert") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "oauth-serving-cert". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.593188 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.594360 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config" (OuterVolumeSpecName: "encryption-config") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "encryption-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.595287 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy" (OuterVolumeSpecName: "cni-binary-copy") pod "7bb08738-c794-4ee8-9972-3a62ca171029" (UID: "7bb08738-c794-4ee8-9972-3a62ca171029"). InnerVolumeSpecName "cni-binary-copy". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.595290 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config" (OuterVolumeSpecName: "config") pod "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" (UID: "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.595475 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn" (OuterVolumeSpecName: "kube-api-access-lz9wn") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "kube-api-access-lz9wn". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.595668 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" (UID: "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.595813 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg" (OuterVolumeSpecName: "kube-api-access-dbsvg") pod "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" (UID: "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9"). InnerVolumeSpecName "kube-api-access-dbsvg". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.595856 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca" (OuterVolumeSpecName: "service-ca") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.596494 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls" (OuterVolumeSpecName: "proxy-tls") pod "fda69060-fa79-4696-b1a6-7980f124bf7c" (UID: "fda69060-fa79-4696-b1a6-7980f124bf7c"). InnerVolumeSpecName "proxy-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.596548 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7" (OuterVolumeSpecName: "kube-api-access-9xfj7") pod "5225d0e4-402f-4861-b410-819f433b1803" (UID: "5225d0e4-402f-4861-b410-819f433b1803"). InnerVolumeSpecName "kube-api-access-9xfj7". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.596604 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct" (OuterVolumeSpecName: "kube-api-access-cfbct") pod "57a731c4-ef35-47a8-b875-bfb08a7f8011" (UID: "57a731c4-ef35-47a8-b875-bfb08a7f8011"). InnerVolumeSpecName "kube-api-access-cfbct". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.596701 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx" (OuterVolumeSpecName: "kube-api-access-d6qdx") pod "87cf06ed-a83f-41a7-828d-70653580a8cb" (UID: "87cf06ed-a83f-41a7-828d-70653580a8cb"). InnerVolumeSpecName "kube-api-access-d6qdx". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.596747 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs" (OuterVolumeSpecName: "webhook-certs") pod "efdd0498-1daa-4136-9a4a-3b948c2293fc" (UID: "efdd0498-1daa-4136-9a4a-3b948c2293fc"). InnerVolumeSpecName "webhook-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.596945 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images" (OuterVolumeSpecName: "images") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "images". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.596966 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy" (OuterVolumeSpecName: "cni-binary-copy") pod "4bb40260-dbaa-4fb0-84df-5e680505d512" (UID: "4bb40260-dbaa-4fb0-84df-5e680505d512"). InnerVolumeSpecName "cni-binary-copy". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.597146 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz" (OuterVolumeSpecName: "kube-api-access-2d4wz") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "kube-api-access-2d4wz". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.597156 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "1386a44e-36a2-460c-96d0-0359d2b6f0f5" (UID: "1386a44e-36a2-460c-96d0-0359d2b6f0f5"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 15:05:11 crc kubenswrapper[4869]: E1001 15:05:11.597184 4869 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-01 15:05:12.097157695 +0000 UTC m=+21.244000921 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.597154 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" (UID: "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.597529 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz" (OuterVolumeSpecName: "kube-api-access-8tdtz") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "kube-api-access-8tdtz". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.597925 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config" (OuterVolumeSpecName: "console-oauth-config") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "console-oauth-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.597989 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.598278 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies" (OuterVolumeSpecName: "audit-policies") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "audit-policies". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.598616 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "496e6271-fb68-4057-954e-a0d97a4afa3f" (UID: "496e6271-fb68-4057-954e-a0d97a4afa3f"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.598965 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "496e6271-fb68-4057-954e-a0d97a4afa3f" (UID: "496e6271-fb68-4057-954e-a0d97a4afa3f"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.599206 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs" (OuterVolumeSpecName: "metrics-certs") pod "5b88f790-22fa-440e-b583-365168c0b23d" (UID: "5b88f790-22fa-440e-b583-365168c0b23d"). InnerVolumeSpecName "metrics-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.599598 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config" (OuterVolumeSpecName: "mcd-auth-proxy-config") pod "fda69060-fa79-4696-b1a6-7980f124bf7c" (UID: "fda69060-fa79-4696-b1a6-7980f124bf7c"). InnerVolumeSpecName "mcd-auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.599775 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "01ab3dd5-8196-46d0-ad33-122e2ca51def" (UID: "01ab3dd5-8196-46d0-ad33-122e2ca51def"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.599794 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5" (OuterVolumeSpecName: "kube-api-access-zgdk5") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "kube-api-access-zgdk5". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.599878 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.600205 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs" (OuterVolumeSpecName: "kube-api-access-pcxfs") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "kube-api-access-pcxfs". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.600609 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh" (OuterVolumeSpecName: "kube-api-access-xcgwh") pod "fda69060-fa79-4696-b1a6-7980f124bf7c" (UID: "fda69060-fa79-4696-b1a6-7980f124bf7c"). InnerVolumeSpecName "kube-api-access-xcgwh". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.600684 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl" (OuterVolumeSpecName: "kube-api-access-xcphl") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "kube-api-access-xcphl". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.600763 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth" (OuterVolumeSpecName: "stats-auth") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "stats-auth". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.600773 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets" (OuterVolumeSpecName: "installation-pull-secrets") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "installation-pull-secrets". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.600909 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config" (OuterVolumeSpecName: "config") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.600922 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate" (OuterVolumeSpecName: "default-certificate") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "default-certificate". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.601159 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca" (OuterVolumeSpecName: "etcd-service-ca") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "etcd-service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.601600 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config" (OuterVolumeSpecName: "config") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.601740 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "0b78653f-4ff9-4508-8672-245ed9b561e3" (UID: "0b78653f-4ff9-4508-8672-245ed9b561e3"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.604181 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config" (OuterVolumeSpecName: "multus-daemon-config") pod "4bb40260-dbaa-4fb0-84df-5e680505d512" (UID: "4bb40260-dbaa-4fb0-84df-5e680505d512"). InnerVolumeSpecName "multus-daemon-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.604653 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config" (OuterVolumeSpecName: "config") pod "496e6271-fb68-4057-954e-a0d97a4afa3f" (UID: "496e6271-fb68-4057-954e-a0d97a4afa3f"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.604945 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities" (OuterVolumeSpecName: "utilities") pod "5225d0e4-402f-4861-b410-819f433b1803" (UID: "5225d0e4-402f-4861-b410-819f433b1803"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.604998 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies" (OuterVolumeSpecName: "audit-policies") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "audit-policies". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.605320 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config" (OuterVolumeSpecName: "ovnkube-config") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "ovnkube-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.605873 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config" (OuterVolumeSpecName: "config") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.606317 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.606359 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "e7e6199b-1264-4501-8953-767f51328d08" (UID: "e7e6199b-1264-4501-8953-767f51328d08"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.606481 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client" (OuterVolumeSpecName: "etcd-client") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "etcd-client". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.606809 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert" (OuterVolumeSpecName: "srv-cert") pod "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" (UID: "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9"). InnerVolumeSpecName "srv-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.606989 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf" (OuterVolumeSpecName: "kube-api-access-v47cf") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "kube-api-access-v47cf". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.607101 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key" (OuterVolumeSpecName: "signing-key") pod "25e176fe-21b4-4974-b1ed-c8b94f112a7f" (UID: "25e176fe-21b4-4974-b1ed-c8b94f112a7f"). InnerVolumeSpecName "signing-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.607394 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8" (OuterVolumeSpecName: "kube-api-access-wxkg8") pod "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" (UID: "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59"). InnerVolumeSpecName "kube-api-access-wxkg8". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.607394 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config" (OuterVolumeSpecName: "config") pod "7539238d-5fe0-46ed-884e-1c3b566537ec" (UID: "7539238d-5fe0-46ed-884e-1c3b566537ec"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.607798 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert" (OuterVolumeSpecName: "cert") pod "20b0d48f-5fd6-431c-a545-e3c800c7b866" (UID: "20b0d48f-5fd6-431c-a545-e3c800c7b866"). InnerVolumeSpecName "cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.608076 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client" (OuterVolumeSpecName: "etcd-client") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "etcd-client". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.608100 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session" (OuterVolumeSpecName: "v4-0-config-system-session") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-session". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.595829 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls" (OuterVolumeSpecName: "metrics-tls") pod "96b93a3a-6083-4aea-8eab-fe1aa8245ad9" (UID: "96b93a3a-6083-4aea-8eab-fe1aa8245ad9"). InnerVolumeSpecName "metrics-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.608381 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.608435 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca" (OuterVolumeSpecName: "etcd-serving-ca") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "etcd-serving-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.608557 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-528sf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8edc04b1-dbb4-4d18-a110-e925d19ac049\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T15:05:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T15:05:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T15:05:11Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T15:05:11Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hssdd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T15:05:11Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-528sf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.608602 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config" (OuterVolumeSpecName: "config") pod "e7e6199b-1264-4501-8953-767f51328d08" (UID: "e7e6199b-1264-4501-8953-767f51328d08"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.586195 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config\") pod \"0b574797-001e-440a-8f4e-c0be86edad0f\" (UID: \"0b574797-001e-440a-8f4e-c0be86edad0f\") " Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.608847 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.608881 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lzf88\" (UniqueName: \"kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88\") pod \"0b574797-001e-440a-8f4e-c0be86edad0f\" (UID: \"0b574797-001e-440a-8f4e-c0be86edad0f\") " Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.608906 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.608933 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca\") pod \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\" (UID: \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\") " Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.608958 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.608987 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content\") pod \"1d611f23-29be-4491-8495-bee1670e935f\" (UID: \"1d611f23-29be-4491-8495-bee1670e935f\") " Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.609009 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bf2bz\" (UniqueName: \"kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz\") pod \"1d611f23-29be-4491-8495-bee1670e935f\" (UID: \"1d611f23-29be-4491-8495-bee1670e935f\") " Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.609030 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls\") pod \"0b574797-001e-440a-8f4e-c0be86edad0f\" (UID: \"0b574797-001e-440a-8f4e-c0be86edad0f\") " Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.609053 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config" (OuterVolumeSpecName: "config") pod "01ab3dd5-8196-46d0-ad33-122e2ca51def" (UID: "01ab3dd5-8196-46d0-ad33-122e2ca51def"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.609092 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.609117 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert\") pod \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\" (UID: \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\") " Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.609139 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.609161 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.609185 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.609189 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca" (OuterVolumeSpecName: "client-ca") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.609311 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert" (OuterVolumeSpecName: "package-server-manager-serving-cert") pod "3ab1a177-2de0-46d9-b765-d0d0649bb42e" (UID: "3ab1a177-2de0-46d9-b765-d0d0649bb42e"). InnerVolumeSpecName "package-server-manager-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.609210 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4d4hj\" (UniqueName: \"kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj\") pod \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\" (UID: \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\") " Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.609449 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.609505 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities\") pod \"57a731c4-ef35-47a8-b875-bfb08a7f8011\" (UID: \"57a731c4-ef35-47a8-b875-bfb08a7f8011\") " Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.609557 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls\") pod \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\" (UID: \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\") " Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.609584 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.609605 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content\") pod \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\" (UID: \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\") " Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.609655 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config\") pod \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\" (UID: \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\") " Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.609679 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.609725 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x7zkh\" (UniqueName: \"kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh\") pod \"6731426b-95fe-49ff-bb5f-40441049fde2\" (UID: \"6731426b-95fe-49ff-bb5f-40441049fde2\") " Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.609728 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz" (OuterVolumeSpecName: "kube-api-access-bf2bz") pod "1d611f23-29be-4491-8495-bee1670e935f" (UID: "1d611f23-29be-4491-8495-bee1670e935f"). InnerVolumeSpecName "kube-api-access-bf2bz". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.609749 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.609769 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.609819 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.609843 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.609898 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.609930 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zkvpv\" (UniqueName: \"kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.610011 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tk88c\" (UniqueName: \"kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c\") pod \"7539238d-5fe0-46ed-884e-1c3b566537ec\" (UID: \"7539238d-5fe0-46ed-884e-1c3b566537ec\") " Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.610065 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ngvvp\" (UniqueName: \"kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.610093 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates\") pod \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\" (UID: \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\") " Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.610145 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.610167 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert\") pod \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\" (UID: \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\") " Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.610213 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qg5z5\" (UniqueName: \"kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.610238 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.610290 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.610315 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.610374 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca\") pod \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\" (UID: \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\") " Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.610389 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca" (OuterVolumeSpecName: "client-ca") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.610399 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.610445 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.610471 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.610495 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls\") pod \"87cf06ed-a83f-41a7-828d-70653580a8cb\" (UID: \"87cf06ed-a83f-41a7-828d-70653580a8cb\") " Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.610519 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-daemon-c86m8"] Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.610552 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist\") pod \"7bb08738-c794-4ee8-9972-3a62ca171029\" (UID: \"7bb08738-c794-4ee8-9972-3a62ca171029\") " Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.610564 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.610578 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.610655 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6g6sz\" (UniqueName: \"kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.610692 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access\") pod \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\" (UID: \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\") " Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.610723 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.610752 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.610781 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics\") pod \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\" (UID: \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\") " Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.610812 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.610859 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gf66m\" (UniqueName: \"kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m\") pod \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\" (UID: \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\") " Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.610878 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-htfz6\" (UniqueName: \"kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.610934 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.610966 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/8edc04b1-dbb4-4d18-a110-e925d19ac049-serviceca\") pod \"node-ca-528sf\" (UID: \"8edc04b1-dbb4-4d18-a110-e925d19ac049\") " pod="openshift-image-registry/node-ca-528sf" Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.610989 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2kz5\" (UniqueName: \"kubernetes.io/projected/ef543e1b-8068-4ea3-b32a-61027b32e95d-kube-api-access-s2kz5\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.611011 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.611029 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-env-overrides\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.611048 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-identity-cm\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-ovnkube-identity-cm\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.611066 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rczfb\" (UniqueName: \"kubernetes.io/projected/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-kube-api-access-rczfb\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.611086 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.611102 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"iptables-alerter-script\" (UniqueName: \"kubernetes.io/configmap/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-iptables-alerter-script\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.611122 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/37a5e44f-9a88-4405-be8a-b645485e7312-metrics-tls\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.611168 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-daemon-c86m8" Oct 01 15:05:11 crc kubenswrapper[4869]: E1001 15:05:11.611244 4869 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Oct 01 15:05:11 crc kubenswrapper[4869]: E1001 15:05:11.611353 4869 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-10-01 15:05:12.111330354 +0000 UTC m=+21.258173470 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.611652 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles" (OuterVolumeSpecName: "proxy-ca-bundles") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.611659 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert" (OuterVolumeSpecName: "profile-collector-cert") pod "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" (UID: "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9"). InnerVolumeSpecName "profile-collector-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.611175 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.611711 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj" (OuterVolumeSpecName: "kube-api-access-4d4hj") pod "3ab1a177-2de0-46d9-b765-d0d0649bb42e" (UID: "3ab1a177-2de0-46d9-b765-d0d0649bb42e"). InnerVolumeSpecName "kube-api-access-4d4hj". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.612108 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs" (OuterVolumeSpecName: "tmpfs") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "tmpfs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.612381 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides" (OuterVolumeSpecName: "env-overrides") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "env-overrides". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.612435 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.612533 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/8edc04b1-dbb4-4d18-a110-e925d19ac049-host\") pod \"node-ca-528sf\" (UID: \"8edc04b1-dbb4-4d18-a110-e925d19ac049\") " pod="openshift-image-registry/node-ca-528sf" Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.612592 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hssdd\" (UniqueName: \"kubernetes.io/projected/8edc04b1-dbb4-4d18-a110-e925d19ac049-kube-api-access-hssdd\") pod \"node-ca-528sf\" (UID: \"8edc04b1-dbb4-4d18-a110-e925d19ac049\") " pod="openshift-image-registry/node-ca-528sf" Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.612638 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-etc-kube\" (UniqueName: \"kubernetes.io/host-path/37a5e44f-9a88-4405-be8a-b645485e7312-host-etc-kube\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.612701 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rdwmf\" (UniqueName: \"kubernetes.io/projected/37a5e44f-9a88-4405-be8a-b645485e7312-kube-api-access-rdwmf\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.612756 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/ef543e1b-8068-4ea3-b32a-61027b32e95d-webhook-cert\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.612774 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca" (OuterVolumeSpecName: "etcd-serving-ca") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "etcd-serving-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.612811 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-host-slash\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.612970 4869 reconciler_common.go:293] "Volume detached for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert\") on node \"crc\" DevicePath \"\"" Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.613001 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bf2bz\" (UniqueName: \"kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz\") on node \"crc\" DevicePath \"\"" Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.613028 4869 reconciler_common.go:293] "Volume detached for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs\") on node \"crc\" DevicePath \"\"" Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.613055 4869 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.613386 4869 reconciler_common.go:293] "Volume detached for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca\") on node \"crc\" DevicePath \"\"" Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.613428 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4d4hj\" (UniqueName: \"kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj\") on node \"crc\" DevicePath \"\"" Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.613648 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mnrrd\" (UniqueName: \"kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd\") on node \"crc\" DevicePath \"\"" Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.613677 4869 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities\") on node \"crc\" DevicePath \"\"" Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.613706 4869 reconciler_common.go:293] "Volume detached for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls\") on node \"crc\" DevicePath \"\"" Oct 01 15:05:11 crc kubenswrapper[4869]: E1001 15:05:11.613723 4869 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.613733 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qs4fp\" (UniqueName: \"kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp\") on node \"crc\" DevicePath \"\"" Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.613762 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-sb6h7\" (UniqueName: \"kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7\") on node \"crc\" DevicePath \"\"" Oct 01 15:05:11 crc kubenswrapper[4869]: E1001 15:05:11.613791 4869 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-10-01 15:05:12.113770759 +0000 UTC m=+21.260613885 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.613819 4869 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig\") on node \"crc\" DevicePath \"\"" Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.613874 4869 reconciler_common.go:293] "Volume detached for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls\") on node \"crc\" DevicePath \"\"" Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.613908 4869 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.613937 4869 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config\") on node \"crc\" DevicePath \"\"" Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.613973 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x2m85\" (UniqueName: \"kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85\") on node \"crc\" DevicePath \"\"" Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.614107 4869 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.614135 4869 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.614161 4869 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.614184 4869 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token\") on node \"crc\" DevicePath \"\"" Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.614207 4869 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.614228 4869 reconciler_common.go:293] "Volume detached for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config\") on node \"crc\" DevicePath \"\"" Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.614252 4869 reconciler_common.go:293] "Volume detached for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca\") on node \"crc\" DevicePath \"\"" Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.614315 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x4zgh\" (UniqueName: \"kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh\") on node \"crc\" DevicePath \"\"" Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.614344 4869 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.614368 4869 reconciler_common.go:293] "Volume detached for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies\") on node \"crc\" DevicePath \"\"" Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.614395 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-s4n52\" (UniqueName: \"kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52\") on node \"crc\" DevicePath \"\"" Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.614422 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xcgwh\" (UniqueName: \"kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh\") on node \"crc\" DevicePath \"\"" Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.614447 4869 reconciler_common.go:293] "Volume detached for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.614478 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9xfj7\" (UniqueName: \"kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7\") on node \"crc\" DevicePath \"\"" Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.614503 4869 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities\") on node \"crc\" DevicePath \"\"" Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.614527 4869 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume\") on node \"crc\" DevicePath \"\"" Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.614552 4869 reconciler_common.go:293] "Volume detached for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config\") on node \"crc\" DevicePath \"\"" Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.614584 4869 reconciler_common.go:293] "Volume detached for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs\") on node \"crc\" DevicePath \"\"" Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.614610 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w9rds\" (UniqueName: \"kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds\") on node \"crc\" DevicePath \"\"" Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.614637 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pjr6v\" (UniqueName: \"kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v\") on node \"crc\" DevicePath \"\"" Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.614663 4869 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.614689 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-279lb\" (UniqueName: \"kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb\") on node \"crc\" DevicePath \"\"" Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.614712 4869 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config\") on node \"crc\" DevicePath \"\"" Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.614738 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7c4vf\" (UniqueName: \"kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf\") on node \"crc\" DevicePath \"\"" Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.614887 4869 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config\") on node \"crc\" DevicePath \"\"" Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.614922 4869 reconciler_common.go:293] "Volume detached for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs\") on node \"crc\" DevicePath \"\"" Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.614947 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access\") on node \"crc\" DevicePath \"\"" Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.614976 4869 reconciler_common.go:293] "Volume detached for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert\") on node \"crc\" DevicePath \"\"" Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.615001 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2w9zh\" (UniqueName: \"kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh\") on node \"crc\" DevicePath \"\"" Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.615123 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-d4lsv\" (UniqueName: \"kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv\") on node \"crc\" DevicePath \"\"" Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.615161 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pj782\" (UniqueName: \"kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782\") on node \"crc\" DevicePath \"\"" Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.615198 4869 reconciler_common.go:293] "Volume detached for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate\") on node \"crc\" DevicePath \"\"" Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.615223 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fcqwp\" (UniqueName: \"kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp\") on node \"crc\" DevicePath \"\"" Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.615249 4869 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token\") on node \"crc\" DevicePath \"\"" Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.615311 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mg5zb\" (UniqueName: \"kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb\") on node \"crc\" DevicePath \"\"" Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.615337 4869 reconciler_common.go:293] "Volume detached for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.615362 4869 reconciler_common.go:293] "Volume detached for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.615387 4869 reconciler_common.go:293] "Volume detached for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.615420 4869 reconciler_common.go:293] "Volume detached for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert\") on node \"crc\" DevicePath \"\"" Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.615446 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w4xd4\" (UniqueName: \"kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4\") on node \"crc\" DevicePath \"\"" Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.615476 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jhbk2\" (UniqueName: \"kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2\") on node \"crc\" DevicePath \"\"" Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.615500 4869 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config\") on node \"crc\" DevicePath \"\"" Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.615525 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rnphk\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk\") on node \"crc\" DevicePath \"\"" Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.615549 4869 reconciler_common.go:293] "Volume detached for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth\") on node \"crc\" DevicePath \"\"" Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.615572 4869 reconciler_common.go:293] "Volume detached for volume \"certs\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs\") on node \"crc\" DevicePath \"\"" Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.615596 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nzwt7\" (UniqueName: \"kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7\") on node \"crc\" DevicePath \"\"" Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.615621 4869 reconciler_common.go:293] "Volume detached for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config\") on node \"crc\" DevicePath \"\"" Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.615651 4869 reconciler_common.go:293] "Volume detached for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy\") on node \"crc\" DevicePath \"\"" Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.615674 4869 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.615699 4869 reconciler_common.go:293] "Volume detached for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs\") on node \"crc\" DevicePath \"\"" Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.615722 4869 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.615744 4869 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca\") on node \"crc\" DevicePath \"\"" Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.615767 4869 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities\") on node \"crc\" DevicePath \"\"" Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.615791 4869 reconciler_common.go:293] "Volume detached for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client\") on node \"crc\" DevicePath \"\"" Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.615814 4869 reconciler_common.go:293] "Volume detached for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert\") on node \"crc\" DevicePath \"\"" Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.615837 4869 reconciler_common.go:293] "Volume detached for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key\") on node \"crc\" DevicePath \"\"" Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.615862 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wxkg8\" (UniqueName: \"kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8\") on node \"crc\" DevicePath \"\"" Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.615887 4869 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session\") on node \"crc\" DevicePath \"\"" Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.615909 4869 reconciler_common.go:293] "Volume detached for volume \"cert\" (UniqueName: \"kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert\") on node \"crc\" DevicePath \"\"" Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.615921 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-identity-cm\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-ovnkube-identity-cm\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.615932 4869 reconciler_common.go:293] "Volume detached for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client\") on node \"crc\" DevicePath \"\"" Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.615955 4869 reconciler_common.go:293] "Volume detached for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca\") on node \"crc\" DevicePath \"\"" Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.615977 4869 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.616004 4869 reconciler_common.go:293] "Volume detached for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides\") on node \"crc\" DevicePath \"\"" Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.616037 4869 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca\") on node \"crc\" DevicePath \"\"" Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.616062 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xcphl\" (UniqueName: \"kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl\") on node \"crc\" DevicePath \"\"" Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.616087 4869 reconciler_common.go:293] "Volume detached for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls\") on node \"crc\" DevicePath \"\"" Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.616111 4869 reconciler_common.go:293] "Volume detached for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies\") on node \"crc\" DevicePath \"\"" Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.616135 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access\") on node \"crc\" DevicePath \"\"" Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.616162 4869 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.616187 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8tdtz\" (UniqueName: \"kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz\") on node \"crc\" DevicePath \"\"" Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.616213 4869 reconciler_common.go:293] "Volume detached for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config\") on node \"crc\" DevicePath \"\"" Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.616238 4869 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.616297 4869 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.616324 4869 reconciler_common.go:293] "Volume detached for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.616349 4869 reconciler_common.go:293] "Volume detached for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets\") on node \"crc\" DevicePath \"\"" Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.616374 4869 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config\") on node \"crc\" DevicePath \"\"" Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.616402 4869 reconciler_common.go:293] "Volume detached for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca\") on node \"crc\" DevicePath \"\"" Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.616415 4869 swap_util.go:74] "error creating dir to test if tmpfs noswap is enabled. Assuming not supported" mount path="" error="stat /var/lib/kubelet/plugins/kubernetes.io/empty-dir: no such file or directory" Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.616427 4869 reconciler_common.go:293] "Volume detached for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy\") on node \"crc\" DevicePath \"\"" Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.616453 4869 reconciler_common.go:293] "Volume detached for volume \"images\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images\") on node \"crc\" DevicePath \"\"" Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.616477 4869 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config\") on node \"crc\" DevicePath \"\"" Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.616501 4869 reconciler_common.go:293] "Volume detached for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca\") on node \"crc\" DevicePath \"\"" Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.616527 4869 reconciler_common.go:293] "Volume detached for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.616555 4869 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config\") on node \"crc\" DevicePath \"\"" Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.616581 4869 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca\") on node \"crc\" DevicePath \"\"" Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.616606 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access\") on node \"crc\" DevicePath \"\"" Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.616632 4869 reconciler_common.go:293] "Volume detached for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.616659 4869 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.616684 4869 reconciler_common.go:293] "Volume detached for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config\") on node \"crc\" DevicePath \"\"" Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.616711 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dbsvg\" (UniqueName: \"kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg\") on node \"crc\" DevicePath \"\"" Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.616738 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lz9wn\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn\") on node \"crc\" DevicePath \"\"" Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.616763 4869 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config\") on node \"crc\" DevicePath \"\"" Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.616789 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cfbct\" (UniqueName: \"kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct\") on node \"crc\" DevicePath \"\"" Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.616814 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zgdk5\" (UniqueName: \"kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5\") on node \"crc\" DevicePath \"\"" Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.616841 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pcxfs\" (UniqueName: \"kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs\") on node \"crc\" DevicePath \"\"" Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.616869 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-v47cf\" (UniqueName: \"kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf\") on node \"crc\" DevicePath \"\"" Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.616907 4869 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.616932 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2d4wz\" (UniqueName: \"kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz\") on node \"crc\" DevicePath \"\"" Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.616959 4869 reconciler_common.go:293] "Volume detached for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca\") on node \"crc\" DevicePath \"\"" Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.616984 4869 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token\") on node \"crc\" DevicePath \"\"" Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.617011 4869 reconciler_common.go:293] "Volume detached for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls\") on node \"crc\" DevicePath \"\"" Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.617036 4869 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config\") on node \"crc\" DevicePath \"\"" Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.617060 4869 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.617087 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-d6qdx\" (UniqueName: \"kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx\") on node \"crc\" DevicePath \"\"" Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.617111 4869 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config\") on node \"crc\" DevicePath \"\"" Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.617135 4869 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.614553 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-env-overrides\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.617215 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"iptables-alerter-script\" (UniqueName: \"kubernetes.io/configmap/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-iptables-alerter-script\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.620367 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token" (OuterVolumeSpecName: "node-bootstrap-token") pod "5fe579f8-e8a6-4643-bce5-a661393c4dde" (UID: "5fe579f8-e8a6-4643-bce5-a661393c4dde"). InnerVolumeSpecName "node-bootstrap-token". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.620651 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config" (OuterVolumeSpecName: "config") pod "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" (UID: "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 15:05:11 crc kubenswrapper[4869]: W1001 15:05:11.621393 4869 reflector.go:561] object-"openshift-machine-config-operator"/"machine-config-daemon-dockercfg-r5tcq": failed to list *v1.Secret: secrets "machine-config-daemon-dockercfg-r5tcq" is forbidden: User "system:node:crc" cannot list resource "secrets" in API group "" in the namespace "openshift-machine-config-operator": no relationship found between node 'crc' and this object Oct 01 15:05:11 crc kubenswrapper[4869]: E1001 15:05:11.621428 4869 reflector.go:158] "Unhandled Error" err="object-\"openshift-machine-config-operator\"/\"machine-config-daemon-dockercfg-r5tcq\": Failed to watch *v1.Secret: failed to list *v1.Secret: secrets \"machine-config-daemon-dockercfg-r5tcq\" is forbidden: User \"system:node:crc\" cannot list resource \"secrets\" in API group \"\" in the namespace \"openshift-machine-config-operator\": no relationship found between node 'crc' and this object" logger="UnhandledError" Oct 01 15:05:11 crc kubenswrapper[4869]: W1001 15:05:11.621687 4869 reflector.go:561] object-"openshift-machine-config-operator"/"openshift-service-ca.crt": failed to list *v1.ConfigMap: configmaps "openshift-service-ca.crt" is forbidden: User "system:node:crc" cannot list resource "configmaps" in API group "" in the namespace "openshift-machine-config-operator": no relationship found between node 'crc' and this object Oct 01 15:05:11 crc kubenswrapper[4869]: E1001 15:05:11.621704 4869 reflector.go:158] "Unhandled Error" err="object-\"openshift-machine-config-operator\"/\"openshift-service-ca.crt\": Failed to watch *v1.ConfigMap: failed to list *v1.ConfigMap: configmaps \"openshift-service-ca.crt\" is forbidden: User \"system:node:crc\" cannot list resource \"configmaps\" in API group \"\" in the namespace \"openshift-machine-config-operator\": no relationship found between node 'crc' and this object" logger="UnhandledError" Oct 01 15:05:11 crc kubenswrapper[4869]: W1001 15:05:11.621766 4869 reflector.go:561] object-"openshift-machine-config-operator"/"kube-root-ca.crt": failed to list *v1.ConfigMap: configmaps "kube-root-ca.crt" is forbidden: User "system:node:crc" cannot list resource "configmaps" in API group "" in the namespace "openshift-machine-config-operator": no relationship found between node 'crc' and this object Oct 01 15:05:11 crc kubenswrapper[4869]: E1001 15:05:11.621779 4869 reflector.go:158] "Unhandled Error" err="object-\"openshift-machine-config-operator\"/\"kube-root-ca.crt\": Failed to watch *v1.ConfigMap: failed to list *v1.ConfigMap: configmaps \"kube-root-ca.crt\" is forbidden: User \"system:node:crc\" cannot list resource \"configmaps\" in API group \"\" in the namespace \"openshift-machine-config-operator\": no relationship found between node 'crc' and this object" logger="UnhandledError" Oct 01 15:05:11 crc kubenswrapper[4869]: W1001 15:05:11.621814 4869 reflector.go:561] object-"openshift-machine-config-operator"/"kube-rbac-proxy": failed to list *v1.ConfigMap: configmaps "kube-rbac-proxy" is forbidden: User "system:node:crc" cannot list resource "configmaps" in API group "" in the namespace "openshift-machine-config-operator": no relationship found between node 'crc' and this object Oct 01 15:05:11 crc kubenswrapper[4869]: E1001 15:05:11.621826 4869 reflector.go:158] "Unhandled Error" err="object-\"openshift-machine-config-operator\"/\"kube-rbac-proxy\": Failed to watch *v1.ConfigMap: failed to list *v1.ConfigMap: configmaps \"kube-rbac-proxy\" is forbidden: User \"system:node:crc\" cannot list resource \"configmaps\" in API group \"\" in the namespace \"openshift-machine-config-operator\": no relationship found between node 'crc' and this object" logger="UnhandledError" Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.621961 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"proxy-tls" Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.622088 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8" (OuterVolumeSpecName: "kube-api-access-6ccd8") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "kube-api-access-6ccd8". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.622127 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config" (OuterVolumeSpecName: "config") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.622452 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c" (OuterVolumeSpecName: "kube-api-access-tk88c") pod "7539238d-5fe0-46ed-884e-1c3b566537ec" (UID: "7539238d-5fe0-46ed-884e-1c3b566537ec"). InnerVolumeSpecName "kube-api-access-tk88c". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.622715 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls" (OuterVolumeSpecName: "proxy-tls") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "proxy-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.623026 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities" (OuterVolumeSpecName: "utilities") pod "57a731c4-ef35-47a8-b875-bfb08a7f8011" (UID: "57a731c4-ef35-47a8-b875-bfb08a7f8011"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.623382 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr" (OuterVolumeSpecName: "kube-api-access-249nr") pod "b6312bbd-5731-4ea0-a20f-81d5a57df44a" (UID: "b6312bbd-5731-4ea0-a20f-81d5a57df44a"). InnerVolumeSpecName "kube-api-access-249nr". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.623399 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data" (OuterVolumeSpecName: "v4-0-config-user-idp-0-file-data") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-idp-0-file-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.623583 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca" (OuterVolumeSpecName: "v4-0-config-system-service-ca") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.623811 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88" (OuterVolumeSpecName: "kube-api-access-lzf88") pod "0b574797-001e-440a-8f4e-c0be86edad0f" (UID: "0b574797-001e-440a-8f4e-c0be86edad0f"). InnerVolumeSpecName "kube-api-access-lzf88". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.623853 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "1386a44e-36a2-460c-96d0-0359d2b6f0f5" (UID: "1386a44e-36a2-460c-96d0-0359d2b6f0f5"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.624135 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz" (OuterVolumeSpecName: "kube-api-access-6g6sz") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "kube-api-access-6g6sz". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.624198 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls" (OuterVolumeSpecName: "image-registry-operator-tls") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "image-registry-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.624212 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle" (OuterVolumeSpecName: "v4-0-config-system-trusted-ca-bundle") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.625178 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/37a5e44f-9a88-4405-be8a-b645485e7312-metrics-tls\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.625996 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert" (OuterVolumeSpecName: "console-serving-cert") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "console-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.632053 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config" (OuterVolumeSpecName: "console-config") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "console-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.632503 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc" (OuterVolumeSpecName: "kube-api-access-vt5rc") pod "44663579-783b-4372-86d6-acf235a62d72" (UID: "44663579-783b-4372-86d6-acf235a62d72"). InnerVolumeSpecName "kube-api-access-vt5rc". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.632963 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config" (OuterVolumeSpecName: "mcc-auth-proxy-config") pod "0b574797-001e-440a-8f4e-c0be86edad0f" (UID: "0b574797-001e-440a-8f4e-c0be86edad0f"). InnerVolumeSpecName "mcc-auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.633212 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config" (OuterVolumeSpecName: "config") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.633933 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides" (OuterVolumeSpecName: "env-overrides") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "env-overrides". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.634553 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca" (OuterVolumeSpecName: "marketplace-trusted-ca") pod "b6cd30de-2eeb-49a2-ab40-9167f4560ff5" (UID: "b6cd30de-2eeb-49a2-ab40-9167f4560ff5"). InnerVolumeSpecName "marketplace-trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.635098 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert" (OuterVolumeSpecName: "apiservice-cert") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "apiservice-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.636644 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt" (OuterVolumeSpecName: "kube-api-access-fqsjt") pod "efdd0498-1daa-4136-9a4a-3b948c2293fc" (UID: "efdd0498-1daa-4136-9a4a-3b948c2293fc"). InnerVolumeSpecName "kube-api-access-fqsjt". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.637067 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.637174 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m" (OuterVolumeSpecName: "kube-api-access-gf66m") pod "a0128f3a-b052-44ed-a84e-c4c8aaf17c13" (UID: "a0128f3a-b052-44ed-a84e-c4c8aaf17c13"). InnerVolumeSpecName "kube-api-access-gf66m". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.637447 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls" (OuterVolumeSpecName: "metrics-tls") pod "87cf06ed-a83f-41a7-828d-70653580a8cb" (UID: "87cf06ed-a83f-41a7-828d-70653580a8cb"). InnerVolumeSpecName "metrics-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.637740 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6" (OuterVolumeSpecName: "kube-api-access-htfz6") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "kube-api-access-htfz6". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.637876 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist" (OuterVolumeSpecName: "cni-sysctl-allowlist") pod "7bb08738-c794-4ee8-9972-3a62ca171029" (UID: "7bb08738-c794-4ee8-9972-3a62ca171029"). InnerVolumeSpecName "cni-sysctl-allowlist". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.637943 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls" (OuterVolumeSpecName: "machine-approver-tls") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "machine-approver-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.638340 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert" (OuterVolumeSpecName: "webhook-cert") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "webhook-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.638698 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp" (OuterVolumeSpecName: "kube-api-access-ngvvp") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "kube-api-access-ngvvp". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.638787 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle" (OuterVolumeSpecName: "service-ca-bundle") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "service-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.639758 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s2kz5\" (UniqueName: \"kubernetes.io/projected/ef543e1b-8068-4ea3-b32a-61027b32e95d-kube-api-access-s2kz5\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.639975 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates" (OuterVolumeSpecName: "available-featuregates") pod "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" (UID: "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d"). InnerVolumeSpecName "available-featuregates". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.640413 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config" (OuterVolumeSpecName: "config") pod "1386a44e-36a2-460c-96d0-0359d2b6f0f5" (UID: "1386a44e-36a2-460c-96d0-0359d2b6f0f5"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.641735 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert" (OuterVolumeSpecName: "profile-collector-cert") pod "b6312bbd-5731-4ea0-a20f-81d5a57df44a" (UID: "b6312bbd-5731-4ea0-a20f-81d5a57df44a"). InnerVolumeSpecName "profile-collector-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.641754 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5" (OuterVolumeSpecName: "kube-api-access-qg5z5") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "kube-api-access-qg5z5". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.646827 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7" (OuterVolumeSpecName: "kube-api-access-kfwg7") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "kube-api-access-kfwg7". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.650514 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls" (OuterVolumeSpecName: "registry-tls") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "registry-tls". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.652955 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.652999 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates" (OuterVolumeSpecName: "registry-certificates") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "registry-certificates". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.655629 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn" (OuterVolumeSpecName: "kube-api-access-jkwtn") pod "5b88f790-22fa-440e-b583-365168c0b23d" (UID: "5b88f790-22fa-440e-b583-365168c0b23d"). InnerVolumeSpecName "kube-api-access-jkwtn". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.658183 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rdwmf\" (UniqueName: \"kubernetes.io/projected/37a5e44f-9a88-4405-be8a-b645485e7312-kube-api-access-rdwmf\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.659332 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert" (OuterVolumeSpecName: "ovn-node-metrics-cert") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "ovn-node-metrics-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.659771 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "7539238d-5fe0-46ed-884e-1c3b566537ec" (UID: "7539238d-5fe0-46ed-884e-1c3b566537ec"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.661360 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca" (OuterVolumeSpecName: "serviceca") pod "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" (UID: "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59"). InnerVolumeSpecName "serviceca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.661419 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls" (OuterVolumeSpecName: "proxy-tls") pod "0b574797-001e-440a-8f4e-c0be86edad0f" (UID: "0b574797-001e-440a-8f4e-c0be86edad0f"). InnerVolumeSpecName "proxy-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.662422 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle" (OuterVolumeSpecName: "signing-cabundle") pod "25e176fe-21b4-4974-b1ed-c8b94f112a7f" (UID: "25e176fe-21b4-4974-b1ed-c8b94f112a7f"). InnerVolumeSpecName "signing-cabundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.662683 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rczfb\" (UniqueName: \"kubernetes.io/projected/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-kube-api-access-rczfb\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.663150 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca" (OuterVolumeSpecName: "image-import-ca") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "image-import-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.663220 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit" (OuterVolumeSpecName: "audit") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "audit". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.664755 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.666105 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j" (OuterVolumeSpecName: "kube-api-access-w7l8j") pod "01ab3dd5-8196-46d0-ad33-122e2ca51def" (UID: "01ab3dd5-8196-46d0-ad33-122e2ca51def"). InnerVolumeSpecName "kube-api-access-w7l8j". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.666235 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client" (OuterVolumeSpecName: "etcd-client") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "etcd-client". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 15:05:11 crc kubenswrapper[4869]: E1001 15:05:11.669557 4869 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Oct 01 15:05:11 crc kubenswrapper[4869]: E1001 15:05:11.669685 4869 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Oct 01 15:05:11 crc kubenswrapper[4869]: E1001 15:05:11.669753 4869 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 01 15:05:11 crc kubenswrapper[4869]: E1001 15:05:11.669900 4869 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-10-01 15:05:12.169865989 +0000 UTC m=+21.316709115 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.670219 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T15:05:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T15:05:11Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 01 15:05:11 crc kubenswrapper[4869]: E1001 15:05:11.670498 4869 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Oct 01 15:05:11 crc kubenswrapper[4869]: E1001 15:05:11.670576 4869 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Oct 01 15:05:11 crc kubenswrapper[4869]: E1001 15:05:11.670655 4869 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 01 15:05:11 crc kubenswrapper[4869]: E1001 15:05:11.670758 4869 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-10-01 15:05:12.170747212 +0000 UTC m=+21.317590328 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.675452 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config" (OuterVolumeSpecName: "ovnkube-config") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "ovnkube-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.675401 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.687596 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls" (OuterVolumeSpecName: "machine-api-operator-tls") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "machine-api-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.713434 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics" (OuterVolumeSpecName: "marketplace-operator-metrics") pod "b6cd30de-2eeb-49a2-ab40-9167f4560ff5" (UID: "b6cd30de-2eeb-49a2-ab40-9167f4560ff5"). InnerVolumeSpecName "marketplace-operator-metrics". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.713501 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error" (OuterVolumeSpecName: "v4-0-config-user-template-error") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-template-error". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.713581 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert" (OuterVolumeSpecName: "v4-0-config-system-serving-cert") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.687894 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template" (OuterVolumeSpecName: "v4-0-config-system-ocp-branding-template") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-ocp-branding-template". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.714780 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images" (OuterVolumeSpecName: "images") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "images". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.715202 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config" (OuterVolumeSpecName: "config") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.715277 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs" (OuterVolumeSpecName: "v4-0-config-system-router-certs") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-router-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.687925 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login" (OuterVolumeSpecName: "v4-0-config-user-template-login") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-template-login". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.716219 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection" (OuterVolumeSpecName: "v4-0-config-user-template-provider-selection") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-template-provider-selection". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.716918 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T15:05:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T15:05:11Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.720174 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls" (OuterVolumeSpecName: "samples-operator-tls") pod "a0128f3a-b052-44ed-a84e-c4c8aaf17c13" (UID: "a0128f3a-b052-44ed-a84e-c4c8aaf17c13"). InnerVolumeSpecName "samples-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.720350 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh" (OuterVolumeSpecName: "kube-api-access-x7zkh") pod "6731426b-95fe-49ff-bb5f-40441049fde2" (UID: "6731426b-95fe-49ff-bb5f-40441049fde2"). InnerVolumeSpecName "kube-api-access-x7zkh". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.720390 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv" (OuterVolumeSpecName: "kube-api-access-zkvpv") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "kube-api-access-zkvpv". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.720425 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib" (OuterVolumeSpecName: "ovnkube-script-lib") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "ovnkube-script-lib". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.720759 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/ef543e1b-8068-4ea3-b32a-61027b32e95d-webhook-cert\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.722208 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "57a731c4-ef35-47a8-b875-bfb08a7f8011" (UID: "57a731c4-ef35-47a8-b875-bfb08a7f8011"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.730586 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "1d611f23-29be-4491-8495-bee1670e935f" (UID: "1d611f23-29be-4491-8495-bee1670e935f"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.730768 4869 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-check-endpoints/0.log" Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.730861 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content\") pod \"1d611f23-29be-4491-8495-bee1670e935f\" (UID: \"1d611f23-29be-4491-8495-bee1670e935f\") " Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.730885 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls\") pod \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\" (UID: \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\") " Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.730909 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x7zkh\" (UniqueName: \"kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh\") pod \"6731426b-95fe-49ff-bb5f-40441049fde2\" (UID: \"6731426b-95fe-49ff-bb5f-40441049fde2\") " Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.730927 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zkvpv\" (UniqueName: \"kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.730943 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.730963 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content\") pod \"57a731c4-ef35-47a8-b875-bfb08a7f8011\" (UID: \"57a731c4-ef35-47a8-b875-bfb08a7f8011\") " Oct 01 15:05:11 crc kubenswrapper[4869]: W1001 15:05:11.731068 4869 empty_dir.go:500] Warning: Unmount skipped because path does not exist: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/volumes/kubernetes.io~projected/kube-api-access-x7zkh Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.731079 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh" (OuterVolumeSpecName: "kube-api-access-x7zkh") pod "6731426b-95fe-49ff-bb5f-40441049fde2" (UID: "6731426b-95fe-49ff-bb5f-40441049fde2"). InnerVolumeSpecName "kube-api-access-x7zkh". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.731111 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/8edc04b1-dbb4-4d18-a110-e925d19ac049-serviceca\") pod \"node-ca-528sf\" (UID: \"8edc04b1-dbb4-4d18-a110-e925d19ac049\") " pod="openshift-image-registry/node-ca-528sf" Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.731136 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/a4b64f7f-0b03-4f47-965b-9fde048b735c-proxy-tls\") pod \"machine-config-daemon-c86m8\" (UID: \"a4b64f7f-0b03-4f47-965b-9fde048b735c\") " pod="openshift-machine-config-operator/machine-config-daemon-c86m8" Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.731173 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/a4b64f7f-0b03-4f47-965b-9fde048b735c-mcd-auth-proxy-config\") pod \"machine-config-daemon-c86m8\" (UID: \"a4b64f7f-0b03-4f47-965b-9fde048b735c\") " pod="openshift-machine-config-operator/machine-config-daemon-c86m8" Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.731190 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-m289h\" (UniqueName: \"kubernetes.io/projected/a4b64f7f-0b03-4f47-965b-9fde048b735c-kube-api-access-m289h\") pod \"machine-config-daemon-c86m8\" (UID: \"a4b64f7f-0b03-4f47-965b-9fde048b735c\") " pod="openshift-machine-config-operator/machine-config-daemon-c86m8" Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.731214 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/8edc04b1-dbb4-4d18-a110-e925d19ac049-host\") pod \"node-ca-528sf\" (UID: \"8edc04b1-dbb4-4d18-a110-e925d19ac049\") " pod="openshift-image-registry/node-ca-528sf" Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.731230 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-etc-kube\" (UniqueName: \"kubernetes.io/host-path/37a5e44f-9a88-4405-be8a-b645485e7312-host-etc-kube\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.731248 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hssdd\" (UniqueName: \"kubernetes.io/projected/8edc04b1-dbb4-4d18-a110-e925d19ac049-kube-api-access-hssdd\") pod \"node-ca-528sf\" (UID: \"8edc04b1-dbb4-4d18-a110-e925d19ac049\") " pod="openshift-image-registry/node-ca-528sf" Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.731278 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-host-slash\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.731294 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rootfs\" (UniqueName: \"kubernetes.io/host-path/a4b64f7f-0b03-4f47-965b-9fde048b735c-rootfs\") pod \"machine-config-daemon-c86m8\" (UID: \"a4b64f7f-0b03-4f47-965b-9fde048b735c\") " pod="openshift-machine-config-operator/machine-config-daemon-c86m8" Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.731350 4869 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template\") on node \"crc\" DevicePath \"\"" Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.731361 4869 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error\") on node \"crc\" DevicePath \"\"" Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.731371 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w7l8j\" (UniqueName: \"kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j\") on node \"crc\" DevicePath \"\"" Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.731381 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6ccd8\" (UniqueName: \"kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8\") on node \"crc\" DevicePath \"\"" Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.731390 4869 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config\") on node \"crc\" DevicePath \"\"" Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.731399 4869 reconciler_common.go:293] "Volume detached for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle\") on node \"crc\" DevicePath \"\"" Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.731409 4869 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data\") on node \"crc\" DevicePath \"\"" Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.731418 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vt5rc\" (UniqueName: \"kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc\") on node \"crc\" DevicePath \"\"" Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.731427 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-249nr\" (UniqueName: \"kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr\") on node \"crc\" DevicePath \"\"" Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.731436 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kfwg7\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7\") on node \"crc\" DevicePath \"\"" Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.731443 4869 reconciler_common.go:293] "Volume detached for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit\") on node \"crc\" DevicePath \"\"" Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.731451 4869 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs\") on node \"crc\" DevicePath \"\"" Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.731460 4869 reconciler_common.go:293] "Volume detached for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config\") on node \"crc\" DevicePath \"\"" Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.731478 4869 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.731488 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fqsjt\" (UniqueName: \"kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt\") on node \"crc\" DevicePath \"\"" Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.731498 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jkwtn\" (UniqueName: \"kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn\") on node \"crc\" DevicePath \"\"" Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.731507 4869 reconciler_common.go:293] "Volume detached for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token\") on node \"crc\" DevicePath \"\"" Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.731515 4869 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config\") on node \"crc\" DevicePath \"\"" Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.731524 4869 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca\") on node \"crc\" DevicePath \"\"" Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.731532 4869 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.731541 4869 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.731552 4869 reconciler_common.go:293] "Volume detached for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.731561 4869 reconciler_common.go:293] "Volume detached for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca\") on node \"crc\" DevicePath \"\"" Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.731569 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lzf88\" (UniqueName: \"kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88\") on node \"crc\" DevicePath \"\"" Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.731577 4869 reconciler_common.go:293] "Volume detached for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca\") on node \"crc\" DevicePath \"\"" Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.731587 4869 reconciler_common.go:293] "Volume detached for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls\") on node \"crc\" DevicePath \"\"" Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.731595 4869 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca\") on node \"crc\" DevicePath \"\"" Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.731603 4869 reconciler_common.go:293] "Volume detached for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls\") on node \"crc\" DevicePath \"\"" Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.731612 4869 reconciler_common.go:293] "Volume detached for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls\") on node \"crc\" DevicePath \"\"" Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.731620 4869 reconciler_common.go:293] "Volume detached for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls\") on node \"crc\" DevicePath \"\"" Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.731628 4869 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities\") on node \"crc\" DevicePath \"\"" Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.731636 4869 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config\") on node \"crc\" DevicePath \"\"" Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.731646 4869 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config\") on node \"crc\" DevicePath \"\"" Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.731653 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x7zkh\" (UniqueName: \"kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh\") on node \"crc\" DevicePath \"\"" Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.731661 4869 reconciler_common.go:293] "Volume detached for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client\") on node \"crc\" DevicePath \"\"" Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.731669 4869 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca\") on node \"crc\" DevicePath \"\"" Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.731677 4869 reconciler_common.go:293] "Volume detached for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.731686 4869 reconciler_common.go:293] "Volume detached for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates\") on node \"crc\" DevicePath \"\"" Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.731694 4869 reconciler_common.go:293] "Volume detached for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.731702 4869 reconciler_common.go:293] "Volume detached for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert\") on node \"crc\" DevicePath \"\"" Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.731710 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tk88c\" (UniqueName: \"kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c\") on node \"crc\" DevicePath \"\"" Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.731718 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ngvvp\" (UniqueName: \"kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp\") on node \"crc\" DevicePath \"\"" Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.731727 4869 reconciler_common.go:293] "Volume detached for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates\") on node \"crc\" DevicePath \"\"" Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.731735 4869 reconciler_common.go:293] "Volume detached for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert\") on node \"crc\" DevicePath \"\"" Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.731743 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qg5z5\" (UniqueName: \"kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5\") on node \"crc\" DevicePath \"\"" Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.731751 4869 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.731758 4869 reconciler_common.go:293] "Volume detached for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides\") on node \"crc\" DevicePath \"\"" Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.731766 4869 reconciler_common.go:293] "Volume detached for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca\") on node \"crc\" DevicePath \"\"" Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.731774 4869 reconciler_common.go:293] "Volume detached for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert\") on node \"crc\" DevicePath \"\"" Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.731782 4869 reconciler_common.go:293] "Volume detached for volume \"images\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images\") on node \"crc\" DevicePath \"\"" Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.731789 4869 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca\") on node \"crc\" DevicePath \"\"" Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.731798 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6g6sz\" (UniqueName: \"kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz\") on node \"crc\" DevicePath \"\"" Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.731807 4869 reconciler_common.go:293] "Volume detached for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls\") on node \"crc\" DevicePath \"\"" Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.731814 4869 reconciler_common.go:293] "Volume detached for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist\") on node \"crc\" DevicePath \"\"" Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.731823 4869 reconciler_common.go:293] "Volume detached for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls\") on node \"crc\" DevicePath \"\"" Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.731832 4869 reconciler_common.go:293] "Volume detached for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics\") on node \"crc\" DevicePath \"\"" Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.731841 4869 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection\") on node \"crc\" DevicePath \"\"" Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.731849 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access\") on node \"crc\" DevicePath \"\"" Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.731858 4869 reconciler_common.go:293] "Volume detached for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls\") on node \"crc\" DevicePath \"\"" Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.731866 4869 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login\") on node \"crc\" DevicePath \"\"" Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.731876 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gf66m\" (UniqueName: \"kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m\") on node \"crc\" DevicePath \"\"" Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.731885 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-htfz6\" (UniqueName: \"kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6\") on node \"crc\" DevicePath \"\"" Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.731895 4869 reconciler_common.go:293] "Volume detached for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert\") on node \"crc\" DevicePath \"\"" Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.731903 4869 reconciler_common.go:293] "Volume detached for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config\") on node \"crc\" DevicePath \"\"" Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.731911 4869 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config\") on node \"crc\" DevicePath \"\"" Oct 01 15:05:11 crc kubenswrapper[4869]: W1001 15:05:11.731115 4869 empty_dir.go:500] Warning: Unmount skipped because path does not exist: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.731930 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/8edc04b1-dbb4-4d18-a110-e925d19ac049-serviceca\") pod \"node-ca-528sf\" (UID: \"8edc04b1-dbb4-4d18-a110-e925d19ac049\") " pod="openshift-image-registry/node-ca-528sf" Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.731939 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "1d611f23-29be-4491-8495-bee1670e935f" (UID: "1d611f23-29be-4491-8495-bee1670e935f"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 15:05:11 crc kubenswrapper[4869]: W1001 15:05:11.731147 4869 empty_dir.go:500] Warning: Unmount skipped because path does not exist: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/volumes/kubernetes.io~secret/samples-operator-tls Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.731951 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls" (OuterVolumeSpecName: "samples-operator-tls") pod "a0128f3a-b052-44ed-a84e-c4c8aaf17c13" (UID: "a0128f3a-b052-44ed-a84e-c4c8aaf17c13"). InnerVolumeSpecName "samples-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 15:05:11 crc kubenswrapper[4869]: W1001 15:05:11.731988 4869 empty_dir.go:500] Warning: Unmount skipped because path does not exist: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~projected/kube-api-access-zkvpv Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.731996 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv" (OuterVolumeSpecName: "kube-api-access-zkvpv") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "kube-api-access-zkvpv". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.732000 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/8edc04b1-dbb4-4d18-a110-e925d19ac049-host\") pod \"node-ca-528sf\" (UID: \"8edc04b1-dbb4-4d18-a110-e925d19ac049\") " pod="openshift-image-registry/node-ca-528sf" Oct 01 15:05:11 crc kubenswrapper[4869]: W1001 15:05:11.732036 4869 empty_dir.go:500] Warning: Unmount skipped because path does not exist: /var/lib/kubelet/pods/6ea678ab-3438-413e-bfe3-290ae7725660/volumes/kubernetes.io~configmap/ovnkube-script-lib Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.732038 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-etc-kube\" (UniqueName: \"kubernetes.io/host-path/37a5e44f-9a88-4405-be8a-b645485e7312-host-etc-kube\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.732042 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib" (OuterVolumeSpecName: "ovnkube-script-lib") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "ovnkube-script-lib". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 15:05:11 crc kubenswrapper[4869]: W1001 15:05:11.732081 4869 empty_dir.go:500] Warning: Unmount skipped because path does not exist: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.732087 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "57a731c4-ef35-47a8-b875-bfb08a7f8011" (UID: "57a731c4-ef35-47a8-b875-bfb08a7f8011"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.732211 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-host-slash\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.734965 4869 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="0de9f530ff8a0069f4718fb3b820fef8a17fe5ee04f641326d42503d5b39956a" exitCode=255 Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.735197 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerDied","Data":"0de9f530ff8a0069f4718fb3b820fef8a17fe5ee04f641326d42503d5b39956a"} Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.749542 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" (UID: "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 15:05:11 crc kubenswrapper[4869]: E1001 15:05:11.752542 4869 kubelet.go:1929] "Failed creating a mirror pod for" err="pods \"kube-controller-manager-crc\" already exists" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.754162 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "5225d0e4-402f-4861-b410-819f433b1803" (UID: "5225d0e4-402f-4861-b410-819f433b1803"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.754289 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T15:05:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T15:05:11Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.756361 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted" (OuterVolumeSpecName: "ca-trust-extracted") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "ca-trust-extracted". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.763117 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T15:05:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T15:05:11Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.772022 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hssdd\" (UniqueName: \"kubernetes.io/projected/8edc04b1-dbb4-4d18-a110-e925d19ac049-kube-api-access-hssdd\") pod \"node-ca-528sf\" (UID: \"8edc04b1-dbb4-4d18-a110-e925d19ac049\") " pod="openshift-image-registry/node-ca-528sf" Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.776079 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T15:05:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T15:05:11Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.786897 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T15:05:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T15:05:11Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.794197 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.798907 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"51d3e7b3-811a-42d8-a711-abf28a181753\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T15:04:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T15:04:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T15:05:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T15:05:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T15:04:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cd32818a8ac16855cf81d75085471907d2579ba8bc8ab434464787d21e630c86\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T15:04:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4d0beef438954056c20003a1a29c1356f1fe50e73db01411b4cf082a2e9b2a7a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T15:04:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d08b3c48bda55cb82da1c929cd1dff850852c297b3e8612df25bd27b587c2f42\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T15:04:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d09f0ff6fbdc14dfc60551c4bb2db225cda38354193e9e791335d01d9eb8cece\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T15:04:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T15:04:51Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.799700 4869 scope.go:117] "RemoveContainer" containerID="0de9f530ff8a0069f4718fb3b820fef8a17fe5ee04f641326d42503d5b39956a" Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.799745 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/kube-apiserver-crc"] Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.800143 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/node-ca-528sf" Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.810695 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/iptables-alerter-4ln5h" Oct 01 15:05:11 crc kubenswrapper[4869]: W1001 15:05:11.820990 4869 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod37a5e44f_9a88_4405_be8a_b645485e7312.slice/crio-9a294d127807682312a6c6828f844310a8a022ebce811e51a53c2a5e60fc6bec WatchSource:0}: Error finding container 9a294d127807682312a6c6828f844310a8a022ebce811e51a53c2a5e60fc6bec: Status 404 returned error can't find the container with id 9a294d127807682312a6c6828f844310a8a022ebce811e51a53c2a5e60fc6bec Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.821115 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-528sf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8edc04b1-dbb4-4d18-a110-e925d19ac049\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T15:05:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T15:05:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T15:05:11Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T15:05:11Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hssdd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T15:05:11Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-528sf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.821160 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-node-identity/network-node-identity-vrzqb" Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.832705 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/a4b64f7f-0b03-4f47-965b-9fde048b735c-proxy-tls\") pod \"machine-config-daemon-c86m8\" (UID: \"a4b64f7f-0b03-4f47-965b-9fde048b735c\") " pod="openshift-machine-config-operator/machine-config-daemon-c86m8" Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.832768 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/a4b64f7f-0b03-4f47-965b-9fde048b735c-mcd-auth-proxy-config\") pod \"machine-config-daemon-c86m8\" (UID: \"a4b64f7f-0b03-4f47-965b-9fde048b735c\") " pod="openshift-machine-config-operator/machine-config-daemon-c86m8" Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.832800 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-m289h\" (UniqueName: \"kubernetes.io/projected/a4b64f7f-0b03-4f47-965b-9fde048b735c-kube-api-access-m289h\") pod \"machine-config-daemon-c86m8\" (UID: \"a4b64f7f-0b03-4f47-965b-9fde048b735c\") " pod="openshift-machine-config-operator/machine-config-daemon-c86m8" Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.832832 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rootfs\" (UniqueName: \"kubernetes.io/host-path/a4b64f7f-0b03-4f47-965b-9fde048b735c-rootfs\") pod \"machine-config-daemon-c86m8\" (UID: \"a4b64f7f-0b03-4f47-965b-9fde048b735c\") " pod="openshift-machine-config-operator/machine-config-daemon-c86m8" Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.832868 4869 reconciler_common.go:293] "Volume detached for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted\") on node \"crc\" DevicePath \"\"" Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.832882 4869 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.832894 4869 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.832905 4869 reconciler_common.go:293] "Volume detached for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls\") on node \"crc\" DevicePath \"\"" Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.832916 4869 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.832928 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zkvpv\" (UniqueName: \"kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv\") on node \"crc\" DevicePath \"\"" Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.832940 4869 reconciler_common.go:293] "Volume detached for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib\") on node \"crc\" DevicePath \"\"" Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.832950 4869 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.832983 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rootfs\" (UniqueName: \"kubernetes.io/host-path/a4b64f7f-0b03-4f47-965b-9fde048b735c-rootfs\") pod \"machine-config-daemon-c86m8\" (UID: \"a4b64f7f-0b03-4f47-965b-9fde048b735c\") " pod="openshift-machine-config-operator/machine-config-daemon-c86m8" Oct 01 15:05:11 crc kubenswrapper[4869]: W1001 15:05:11.835631 4869 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podd75a4c96_2883_4a0b_bab2_0fab2b6c0b49.slice/crio-fc43848025e4ec8362b1ba825fb555028bd74973a27291635502bfbd093b1efc WatchSource:0}: Error finding container fc43848025e4ec8362b1ba825fb555028bd74973a27291635502bfbd093b1efc: Status 404 returned error can't find the container with id fc43848025e4ec8362b1ba825fb555028bd74973a27291635502bfbd093b1efc Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.836675 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T15:05:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T15:05:11Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.837966 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/a4b64f7f-0b03-4f47-965b-9fde048b735c-proxy-tls\") pod \"machine-config-daemon-c86m8\" (UID: \"a4b64f7f-0b03-4f47-965b-9fde048b735c\") " pod="openshift-machine-config-operator/machine-config-daemon-c86m8" Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.846368 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-c86m8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a4b64f7f-0b03-4f47-965b-9fde048b735c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T15:05:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T15:05:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T15:05:11Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T15:05:11Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m289h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m289h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T15:05:11Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-c86m8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.859793 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5794379e-0bd9-4b1f-89eb-d1075fd901e1\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T15:04:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T15:04:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T15:04:51Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T15:04:51Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T15:04:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5ccef45b15da9c0dc92cfbebfda5fddde05397f6a23946a1c7e9df12b3c275bb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T15:04:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c5fc6afde533fd1160a9b445d9835126d2c2b0fb019b4b9d13269e91c5ad1e53\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T15:04:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ec14a0b8c122b3f70309112b68f07cdd77057e977ceb5f5a061a1892a93e28d6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T15:04:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0de9f530ff8a0069f4718fb3b820fef8a17fe5ee04f641326d42503d5b39956a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0de9f530ff8a0069f4718fb3b820fef8a17fe5ee04f641326d42503d5b39956a\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-01T15:05:10Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1001 15:05:05.118072 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1001 15:05:05.120651 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2773387964/tls.crt::/tmp/serving-cert-2773387964/tls.key\\\\\\\"\\\\nI1001 15:05:10.813323 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1001 15:05:10.819195 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1001 15:05:10.819374 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1001 15:05:10.819467 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1001 15:05:10.819569 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1001 15:05:10.834141 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1001 15:05:10.834193 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1001 15:05:10.834212 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1001 15:05:10.834221 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1001 15:05:10.834227 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1001 15:05:10.834234 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1001 15:05:10.834240 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1001 15:05:10.834323 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1001 15:05:10.839565 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-01T15:04:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5cddd04fc8db0c24ce9cefef134d143e9927ec632a6829a402069e740b42a429\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T15:04:54Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1119a5821d140830d19c6272faade87f2ef99f2436d1011adad88b5b86d964b0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1119a5821d140830d19c6272faade87f2ef99f2436d1011adad88b5b86d964b0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T15:04:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T15:04:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T15:04:51Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.873894 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-528sf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8edc04b1-dbb4-4d18-a110-e925d19ac049\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T15:05:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T15:05:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T15:05:11Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T15:05:11Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hssdd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T15:05:11Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-528sf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.886354 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T15:05:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T15:05:11Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.904865 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-c86m8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a4b64f7f-0b03-4f47-965b-9fde048b735c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T15:05:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T15:05:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T15:05:11Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T15:05:11Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m289h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m289h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T15:05:11Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-c86m8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.919677 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T15:05:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T15:05:11Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.941329 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T15:05:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T15:05:11Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.954843 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T15:05:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T15:05:11Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.966021 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T15:05:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T15:05:11Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.976750 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T15:05:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T15:05:11Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 01 15:05:11 crc kubenswrapper[4869]: I1001 15:05:11.992653 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"51d3e7b3-811a-42d8-a711-abf28a181753\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T15:04:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T15:04:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T15:05:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T15:05:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T15:04:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cd32818a8ac16855cf81d75085471907d2579ba8bc8ab434464787d21e630c86\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T15:04:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4d0beef438954056c20003a1a29c1356f1fe50e73db01411b4cf082a2e9b2a7a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T15:04:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d08b3c48bda55cb82da1c929cd1dff850852c297b3e8612df25bd27b587c2f42\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T15:04:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d09f0ff6fbdc14dfc60551c4bb2db225cda38354193e9e791335d01d9eb8cece\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T15:04:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T15:04:51Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 01 15:05:12 crc kubenswrapper[4869]: I1001 15:05:12.018195 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-dns/node-resolver-5hbq7"] Oct 01 15:05:12 crc kubenswrapper[4869]: I1001 15:05:12.018459 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/multus-j98s2"] Oct 01 15:05:12 crc kubenswrapper[4869]: I1001 15:05:12.018635 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-j98s2" Oct 01 15:05:12 crc kubenswrapper[4869]: I1001 15:05:12.019286 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/node-resolver-5hbq7" Oct 01 15:05:12 crc kubenswrapper[4869]: I1001 15:05:12.022464 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"multus-daemon-config" Oct 01 15:05:12 crc kubenswrapper[4869]: I1001 15:05:12.022855 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"cni-copy-resources" Oct 01 15:05:12 crc kubenswrapper[4869]: I1001 15:05:12.027552 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"node-resolver-dockercfg-kz9s7" Oct 01 15:05:12 crc kubenswrapper[4869]: I1001 15:05:12.027470 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"openshift-service-ca.crt" Oct 01 15:05:12 crc kubenswrapper[4869]: I1001 15:05:12.027746 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"openshift-service-ca.crt" Oct 01 15:05:12 crc kubenswrapper[4869]: I1001 15:05:12.030689 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"kube-root-ca.crt" Oct 01 15:05:12 crc kubenswrapper[4869]: I1001 15:05:12.031309 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"kube-root-ca.crt" Oct 01 15:05:12 crc kubenswrapper[4869]: I1001 15:05:12.031657 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"default-dockercfg-2q5b6" Oct 01 15:05:12 crc kubenswrapper[4869]: I1001 15:05:12.036753 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/69635c7a-0025-4ea2-a1b6-fc7776c2be11-host-var-lib-kubelet\") pod \"multus-j98s2\" (UID: \"69635c7a-0025-4ea2-a1b6-fc7776c2be11\") " pod="openshift-multus/multus-j98s2" Oct 01 15:05:12 crc kubenswrapper[4869]: I1001 15:05:12.036820 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/69635c7a-0025-4ea2-a1b6-fc7776c2be11-cni-binary-copy\") pod \"multus-j98s2\" (UID: \"69635c7a-0025-4ea2-a1b6-fc7776c2be11\") " pod="openshift-multus/multus-j98s2" Oct 01 15:05:12 crc kubenswrapper[4869]: I1001 15:05:12.036840 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/69635c7a-0025-4ea2-a1b6-fc7776c2be11-os-release\") pod \"multus-j98s2\" (UID: \"69635c7a-0025-4ea2-a1b6-fc7776c2be11\") " pod="openshift-multus/multus-j98s2" Oct 01 15:05:12 crc kubenswrapper[4869]: I1001 15:05:12.036876 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"hostroot\" (UniqueName: \"kubernetes.io/host-path/69635c7a-0025-4ea2-a1b6-fc7776c2be11-hostroot\") pod \"multus-j98s2\" (UID: \"69635c7a-0025-4ea2-a1b6-fc7776c2be11\") " pod="openshift-multus/multus-j98s2" Oct 01 15:05:12 crc kubenswrapper[4869]: I1001 15:05:12.036898 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-cni-dir\" (UniqueName: \"kubernetes.io/host-path/69635c7a-0025-4ea2-a1b6-fc7776c2be11-multus-cni-dir\") pod \"multus-j98s2\" (UID: \"69635c7a-0025-4ea2-a1b6-fc7776c2be11\") " pod="openshift-multus/multus-j98s2" Oct 01 15:05:12 crc kubenswrapper[4869]: I1001 15:05:12.036914 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/69635c7a-0025-4ea2-a1b6-fc7776c2be11-system-cni-dir\") pod \"multus-j98s2\" (UID: \"69635c7a-0025-4ea2-a1b6-fc7776c2be11\") " pod="openshift-multus/multus-j98s2" Oct 01 15:05:12 crc kubenswrapper[4869]: I1001 15:05:12.036935 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-bin\" (UniqueName: \"kubernetes.io/host-path/69635c7a-0025-4ea2-a1b6-fc7776c2be11-host-var-lib-cni-bin\") pod \"multus-j98s2\" (UID: \"69635c7a-0025-4ea2-a1b6-fc7776c2be11\") " pod="openshift-multus/multus-j98s2" Oct 01 15:05:12 crc kubenswrapper[4869]: I1001 15:05:12.036990 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-b7rv5\" (UniqueName: \"kubernetes.io/projected/69635c7a-0025-4ea2-a1b6-fc7776c2be11-kube-api-access-b7rv5\") pod \"multus-j98s2\" (UID: \"69635c7a-0025-4ea2-a1b6-fc7776c2be11\") " pod="openshift-multus/multus-j98s2" Oct 01 15:05:12 crc kubenswrapper[4869]: I1001 15:05:12.037239 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-65476\" (UniqueName: \"kubernetes.io/projected/0c01e592-2d8d-4773-a8b3-f6efe676f57f-kube-api-access-65476\") pod \"node-resolver-5hbq7\" (UID: \"0c01e592-2d8d-4773-a8b3-f6efe676f57f\") " pod="openshift-dns/node-resolver-5hbq7" Oct 01 15:05:12 crc kubenswrapper[4869]: I1001 15:05:12.037368 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/69635c7a-0025-4ea2-a1b6-fc7776c2be11-cnibin\") pod \"multus-j98s2\" (UID: \"69635c7a-0025-4ea2-a1b6-fc7776c2be11\") " pod="openshift-multus/multus-j98s2" Oct 01 15:05:12 crc kubenswrapper[4869]: I1001 15:05:12.037399 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-socket-dir-parent\" (UniqueName: \"kubernetes.io/host-path/69635c7a-0025-4ea2-a1b6-fc7776c2be11-multus-socket-dir-parent\") pod \"multus-j98s2\" (UID: \"69635c7a-0025-4ea2-a1b6-fc7776c2be11\") " pod="openshift-multus/multus-j98s2" Oct 01 15:05:12 crc kubenswrapper[4869]: I1001 15:05:12.037425 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/69635c7a-0025-4ea2-a1b6-fc7776c2be11-host-run-netns\") pod \"multus-j98s2\" (UID: \"69635c7a-0025-4ea2-a1b6-fc7776c2be11\") " pod="openshift-multus/multus-j98s2" Oct 01 15:05:12 crc kubenswrapper[4869]: I1001 15:05:12.037454 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/69635c7a-0025-4ea2-a1b6-fc7776c2be11-multus-daemon-config\") pod \"multus-j98s2\" (UID: \"69635c7a-0025-4ea2-a1b6-fc7776c2be11\") " pod="openshift-multus/multus-j98s2" Oct 01 15:05:12 crc kubenswrapper[4869]: I1001 15:05:12.037485 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-conf-dir\" (UniqueName: \"kubernetes.io/host-path/69635c7a-0025-4ea2-a1b6-fc7776c2be11-multus-conf-dir\") pod \"multus-j98s2\" (UID: \"69635c7a-0025-4ea2-a1b6-fc7776c2be11\") " pod="openshift-multus/multus-j98s2" Oct 01 15:05:12 crc kubenswrapper[4869]: I1001 15:05:12.037510 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-multus\" (UniqueName: \"kubernetes.io/host-path/69635c7a-0025-4ea2-a1b6-fc7776c2be11-host-var-lib-cni-multus\") pod \"multus-j98s2\" (UID: \"69635c7a-0025-4ea2-a1b6-fc7776c2be11\") " pod="openshift-multus/multus-j98s2" Oct 01 15:05:12 crc kubenswrapper[4869]: I1001 15:05:12.037535 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-kubernetes\" (UniqueName: \"kubernetes.io/host-path/69635c7a-0025-4ea2-a1b6-fc7776c2be11-etc-kubernetes\") pod \"multus-j98s2\" (UID: \"69635c7a-0025-4ea2-a1b6-fc7776c2be11\") " pod="openshift-multus/multus-j98s2" Oct 01 15:05:12 crc kubenswrapper[4869]: I1001 15:05:12.037591 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-k8s-cni-cncf-io\" (UniqueName: \"kubernetes.io/host-path/69635c7a-0025-4ea2-a1b6-fc7776c2be11-host-run-k8s-cni-cncf-io\") pod \"multus-j98s2\" (UID: \"69635c7a-0025-4ea2-a1b6-fc7776c2be11\") " pod="openshift-multus/multus-j98s2" Oct 01 15:05:12 crc kubenswrapper[4869]: I1001 15:05:12.037620 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"hosts-file\" (UniqueName: \"kubernetes.io/host-path/0c01e592-2d8d-4773-a8b3-f6efe676f57f-hosts-file\") pod \"node-resolver-5hbq7\" (UID: \"0c01e592-2d8d-4773-a8b3-f6efe676f57f\") " pod="openshift-dns/node-resolver-5hbq7" Oct 01 15:05:12 crc kubenswrapper[4869]: I1001 15:05:12.037652 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-multus-certs\" (UniqueName: \"kubernetes.io/host-path/69635c7a-0025-4ea2-a1b6-fc7776c2be11-host-run-multus-certs\") pod \"multus-j98s2\" (UID: \"69635c7a-0025-4ea2-a1b6-fc7776c2be11\") " pod="openshift-multus/multus-j98s2" Oct 01 15:05:12 crc kubenswrapper[4869]: I1001 15:05:12.045532 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T15:05:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T15:05:11Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 01 15:05:12 crc kubenswrapper[4869]: I1001 15:05:12.083664 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T15:05:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T15:05:11Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 01 15:05:12 crc kubenswrapper[4869]: I1001 15:05:12.109504 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T15:05:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T15:05:11Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 01 15:05:12 crc kubenswrapper[4869]: I1001 15:05:12.132795 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T15:05:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T15:05:11Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 01 15:05:12 crc kubenswrapper[4869]: I1001 15:05:12.138902 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 01 15:05:12 crc kubenswrapper[4869]: I1001 15:05:12.138978 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-cni-dir\" (UniqueName: \"kubernetes.io/host-path/69635c7a-0025-4ea2-a1b6-fc7776c2be11-multus-cni-dir\") pod \"multus-j98s2\" (UID: \"69635c7a-0025-4ea2-a1b6-fc7776c2be11\") " pod="openshift-multus/multus-j98s2" Oct 01 15:05:12 crc kubenswrapper[4869]: I1001 15:05:12.139010 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/69635c7a-0025-4ea2-a1b6-fc7776c2be11-system-cni-dir\") pod \"multus-j98s2\" (UID: \"69635c7a-0025-4ea2-a1b6-fc7776c2be11\") " pod="openshift-multus/multus-j98s2" Oct 01 15:05:12 crc kubenswrapper[4869]: I1001 15:05:12.139030 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-bin\" (UniqueName: \"kubernetes.io/host-path/69635c7a-0025-4ea2-a1b6-fc7776c2be11-host-var-lib-cni-bin\") pod \"multus-j98s2\" (UID: \"69635c7a-0025-4ea2-a1b6-fc7776c2be11\") " pod="openshift-multus/multus-j98s2" Oct 01 15:05:12 crc kubenswrapper[4869]: I1001 15:05:12.139048 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-b7rv5\" (UniqueName: \"kubernetes.io/projected/69635c7a-0025-4ea2-a1b6-fc7776c2be11-kube-api-access-b7rv5\") pod \"multus-j98s2\" (UID: \"69635c7a-0025-4ea2-a1b6-fc7776c2be11\") " pod="openshift-multus/multus-j98s2" Oct 01 15:05:12 crc kubenswrapper[4869]: I1001 15:05:12.139076 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-65476\" (UniqueName: \"kubernetes.io/projected/0c01e592-2d8d-4773-a8b3-f6efe676f57f-kube-api-access-65476\") pod \"node-resolver-5hbq7\" (UID: \"0c01e592-2d8d-4773-a8b3-f6efe676f57f\") " pod="openshift-dns/node-resolver-5hbq7" Oct 01 15:05:12 crc kubenswrapper[4869]: I1001 15:05:12.139114 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/69635c7a-0025-4ea2-a1b6-fc7776c2be11-cnibin\") pod \"multus-j98s2\" (UID: \"69635c7a-0025-4ea2-a1b6-fc7776c2be11\") " pod="openshift-multus/multus-j98s2" Oct 01 15:05:12 crc kubenswrapper[4869]: I1001 15:05:12.139136 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-socket-dir-parent\" (UniqueName: \"kubernetes.io/host-path/69635c7a-0025-4ea2-a1b6-fc7776c2be11-multus-socket-dir-parent\") pod \"multus-j98s2\" (UID: \"69635c7a-0025-4ea2-a1b6-fc7776c2be11\") " pod="openshift-multus/multus-j98s2" Oct 01 15:05:12 crc kubenswrapper[4869]: I1001 15:05:12.139157 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/69635c7a-0025-4ea2-a1b6-fc7776c2be11-host-run-netns\") pod \"multus-j98s2\" (UID: \"69635c7a-0025-4ea2-a1b6-fc7776c2be11\") " pod="openshift-multus/multus-j98s2" Oct 01 15:05:12 crc kubenswrapper[4869]: I1001 15:05:12.139179 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/69635c7a-0025-4ea2-a1b6-fc7776c2be11-multus-daemon-config\") pod \"multus-j98s2\" (UID: \"69635c7a-0025-4ea2-a1b6-fc7776c2be11\") " pod="openshift-multus/multus-j98s2" Oct 01 15:05:12 crc kubenswrapper[4869]: I1001 15:05:12.139199 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-conf-dir\" (UniqueName: \"kubernetes.io/host-path/69635c7a-0025-4ea2-a1b6-fc7776c2be11-multus-conf-dir\") pod \"multus-j98s2\" (UID: \"69635c7a-0025-4ea2-a1b6-fc7776c2be11\") " pod="openshift-multus/multus-j98s2" Oct 01 15:05:12 crc kubenswrapper[4869]: I1001 15:05:12.139219 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-multus\" (UniqueName: \"kubernetes.io/host-path/69635c7a-0025-4ea2-a1b6-fc7776c2be11-host-var-lib-cni-multus\") pod \"multus-j98s2\" (UID: \"69635c7a-0025-4ea2-a1b6-fc7776c2be11\") " pod="openshift-multus/multus-j98s2" Oct 01 15:05:12 crc kubenswrapper[4869]: I1001 15:05:12.139237 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-kubernetes\" (UniqueName: \"kubernetes.io/host-path/69635c7a-0025-4ea2-a1b6-fc7776c2be11-etc-kubernetes\") pod \"multus-j98s2\" (UID: \"69635c7a-0025-4ea2-a1b6-fc7776c2be11\") " pod="openshift-multus/multus-j98s2" Oct 01 15:05:12 crc kubenswrapper[4869]: I1001 15:05:12.139274 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-k8s-cni-cncf-io\" (UniqueName: \"kubernetes.io/host-path/69635c7a-0025-4ea2-a1b6-fc7776c2be11-host-run-k8s-cni-cncf-io\") pod \"multus-j98s2\" (UID: \"69635c7a-0025-4ea2-a1b6-fc7776c2be11\") " pod="openshift-multus/multus-j98s2" Oct 01 15:05:12 crc kubenswrapper[4869]: I1001 15:05:12.139297 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"hosts-file\" (UniqueName: \"kubernetes.io/host-path/0c01e592-2d8d-4773-a8b3-f6efe676f57f-hosts-file\") pod \"node-resolver-5hbq7\" (UID: \"0c01e592-2d8d-4773-a8b3-f6efe676f57f\") " pod="openshift-dns/node-resolver-5hbq7" Oct 01 15:05:12 crc kubenswrapper[4869]: I1001 15:05:12.139317 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-multus-certs\" (UniqueName: \"kubernetes.io/host-path/69635c7a-0025-4ea2-a1b6-fc7776c2be11-host-run-multus-certs\") pod \"multus-j98s2\" (UID: \"69635c7a-0025-4ea2-a1b6-fc7776c2be11\") " pod="openshift-multus/multus-j98s2" Oct 01 15:05:12 crc kubenswrapper[4869]: I1001 15:05:12.139343 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 01 15:05:12 crc kubenswrapper[4869]: I1001 15:05:12.139389 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/69635c7a-0025-4ea2-a1b6-fc7776c2be11-cni-binary-copy\") pod \"multus-j98s2\" (UID: \"69635c7a-0025-4ea2-a1b6-fc7776c2be11\") " pod="openshift-multus/multus-j98s2" Oct 01 15:05:12 crc kubenswrapper[4869]: I1001 15:05:12.139412 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/69635c7a-0025-4ea2-a1b6-fc7776c2be11-host-var-lib-kubelet\") pod \"multus-j98s2\" (UID: \"69635c7a-0025-4ea2-a1b6-fc7776c2be11\") " pod="openshift-multus/multus-j98s2" Oct 01 15:05:12 crc kubenswrapper[4869]: I1001 15:05:12.139434 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/69635c7a-0025-4ea2-a1b6-fc7776c2be11-os-release\") pod \"multus-j98s2\" (UID: \"69635c7a-0025-4ea2-a1b6-fc7776c2be11\") " pod="openshift-multus/multus-j98s2" Oct 01 15:05:12 crc kubenswrapper[4869]: I1001 15:05:12.139461 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 01 15:05:12 crc kubenswrapper[4869]: I1001 15:05:12.139483 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"hostroot\" (UniqueName: \"kubernetes.io/host-path/69635c7a-0025-4ea2-a1b6-fc7776c2be11-hostroot\") pod \"multus-j98s2\" (UID: \"69635c7a-0025-4ea2-a1b6-fc7776c2be11\") " pod="openshift-multus/multus-j98s2" Oct 01 15:05:12 crc kubenswrapper[4869]: I1001 15:05:12.139550 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"hostroot\" (UniqueName: \"kubernetes.io/host-path/69635c7a-0025-4ea2-a1b6-fc7776c2be11-hostroot\") pod \"multus-j98s2\" (UID: \"69635c7a-0025-4ea2-a1b6-fc7776c2be11\") " pod="openshift-multus/multus-j98s2" Oct 01 15:05:12 crc kubenswrapper[4869]: E1001 15:05:12.139631 4869 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-01 15:05:13.139613037 +0000 UTC m=+22.286456153 (durationBeforeRetry 1s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 01 15:05:12 crc kubenswrapper[4869]: I1001 15:05:12.139684 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-cni-dir\" (UniqueName: \"kubernetes.io/host-path/69635c7a-0025-4ea2-a1b6-fc7776c2be11-multus-cni-dir\") pod \"multus-j98s2\" (UID: \"69635c7a-0025-4ea2-a1b6-fc7776c2be11\") " pod="openshift-multus/multus-j98s2" Oct 01 15:05:12 crc kubenswrapper[4869]: I1001 15:05:12.139802 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/69635c7a-0025-4ea2-a1b6-fc7776c2be11-system-cni-dir\") pod \"multus-j98s2\" (UID: \"69635c7a-0025-4ea2-a1b6-fc7776c2be11\") " pod="openshift-multus/multus-j98s2" Oct 01 15:05:12 crc kubenswrapper[4869]: I1001 15:05:12.139842 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-bin\" (UniqueName: \"kubernetes.io/host-path/69635c7a-0025-4ea2-a1b6-fc7776c2be11-host-var-lib-cni-bin\") pod \"multus-j98s2\" (UID: \"69635c7a-0025-4ea2-a1b6-fc7776c2be11\") " pod="openshift-multus/multus-j98s2" Oct 01 15:05:12 crc kubenswrapper[4869]: I1001 15:05:12.140217 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/69635c7a-0025-4ea2-a1b6-fc7776c2be11-cnibin\") pod \"multus-j98s2\" (UID: \"69635c7a-0025-4ea2-a1b6-fc7776c2be11\") " pod="openshift-multus/multus-j98s2" Oct 01 15:05:12 crc kubenswrapper[4869]: I1001 15:05:12.140288 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-socket-dir-parent\" (UniqueName: \"kubernetes.io/host-path/69635c7a-0025-4ea2-a1b6-fc7776c2be11-multus-socket-dir-parent\") pod \"multus-j98s2\" (UID: \"69635c7a-0025-4ea2-a1b6-fc7776c2be11\") " pod="openshift-multus/multus-j98s2" Oct 01 15:05:12 crc kubenswrapper[4869]: I1001 15:05:12.140330 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/69635c7a-0025-4ea2-a1b6-fc7776c2be11-host-run-netns\") pod \"multus-j98s2\" (UID: \"69635c7a-0025-4ea2-a1b6-fc7776c2be11\") " pod="openshift-multus/multus-j98s2" Oct 01 15:05:12 crc kubenswrapper[4869]: I1001 15:05:12.141024 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/69635c7a-0025-4ea2-a1b6-fc7776c2be11-multus-daemon-config\") pod \"multus-j98s2\" (UID: \"69635c7a-0025-4ea2-a1b6-fc7776c2be11\") " pod="openshift-multus/multus-j98s2" Oct 01 15:05:12 crc kubenswrapper[4869]: I1001 15:05:12.141076 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-conf-dir\" (UniqueName: \"kubernetes.io/host-path/69635c7a-0025-4ea2-a1b6-fc7776c2be11-multus-conf-dir\") pod \"multus-j98s2\" (UID: \"69635c7a-0025-4ea2-a1b6-fc7776c2be11\") " pod="openshift-multus/multus-j98s2" Oct 01 15:05:12 crc kubenswrapper[4869]: I1001 15:05:12.141109 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-multus\" (UniqueName: \"kubernetes.io/host-path/69635c7a-0025-4ea2-a1b6-fc7776c2be11-host-var-lib-cni-multus\") pod \"multus-j98s2\" (UID: \"69635c7a-0025-4ea2-a1b6-fc7776c2be11\") " pod="openshift-multus/multus-j98s2" Oct 01 15:05:12 crc kubenswrapper[4869]: I1001 15:05:12.141138 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-kubernetes\" (UniqueName: \"kubernetes.io/host-path/69635c7a-0025-4ea2-a1b6-fc7776c2be11-etc-kubernetes\") pod \"multus-j98s2\" (UID: \"69635c7a-0025-4ea2-a1b6-fc7776c2be11\") " pod="openshift-multus/multus-j98s2" Oct 01 15:05:12 crc kubenswrapper[4869]: I1001 15:05:12.141165 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-k8s-cni-cncf-io\" (UniqueName: \"kubernetes.io/host-path/69635c7a-0025-4ea2-a1b6-fc7776c2be11-host-run-k8s-cni-cncf-io\") pod \"multus-j98s2\" (UID: \"69635c7a-0025-4ea2-a1b6-fc7776c2be11\") " pod="openshift-multus/multus-j98s2" Oct 01 15:05:12 crc kubenswrapper[4869]: I1001 15:05:12.141202 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"hosts-file\" (UniqueName: \"kubernetes.io/host-path/0c01e592-2d8d-4773-a8b3-f6efe676f57f-hosts-file\") pod \"node-resolver-5hbq7\" (UID: \"0c01e592-2d8d-4773-a8b3-f6efe676f57f\") " pod="openshift-dns/node-resolver-5hbq7" Oct 01 15:05:12 crc kubenswrapper[4869]: I1001 15:05:12.141236 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-multus-certs\" (UniqueName: \"kubernetes.io/host-path/69635c7a-0025-4ea2-a1b6-fc7776c2be11-host-run-multus-certs\") pod \"multus-j98s2\" (UID: \"69635c7a-0025-4ea2-a1b6-fc7776c2be11\") " pod="openshift-multus/multus-j98s2" Oct 01 15:05:12 crc kubenswrapper[4869]: E1001 15:05:12.141301 4869 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Oct 01 15:05:12 crc kubenswrapper[4869]: E1001 15:05:12.141342 4869 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-10-01 15:05:13.141331963 +0000 UTC m=+22.288175079 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Oct 01 15:05:12 crc kubenswrapper[4869]: I1001 15:05:12.142047 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/69635c7a-0025-4ea2-a1b6-fc7776c2be11-cni-binary-copy\") pod \"multus-j98s2\" (UID: \"69635c7a-0025-4ea2-a1b6-fc7776c2be11\") " pod="openshift-multus/multus-j98s2" Oct 01 15:05:12 crc kubenswrapper[4869]: I1001 15:05:12.142087 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/69635c7a-0025-4ea2-a1b6-fc7776c2be11-host-var-lib-kubelet\") pod \"multus-j98s2\" (UID: \"69635c7a-0025-4ea2-a1b6-fc7776c2be11\") " pod="openshift-multus/multus-j98s2" Oct 01 15:05:12 crc kubenswrapper[4869]: I1001 15:05:12.142419 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/69635c7a-0025-4ea2-a1b6-fc7776c2be11-os-release\") pod \"multus-j98s2\" (UID: \"69635c7a-0025-4ea2-a1b6-fc7776c2be11\") " pod="openshift-multus/multus-j98s2" Oct 01 15:05:12 crc kubenswrapper[4869]: E1001 15:05:12.142491 4869 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Oct 01 15:05:12 crc kubenswrapper[4869]: E1001 15:05:12.142572 4869 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-10-01 15:05:13.142553525 +0000 UTC m=+22.289396641 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Oct 01 15:05:12 crc kubenswrapper[4869]: I1001 15:05:12.145545 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5794379e-0bd9-4b1f-89eb-d1075fd901e1\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T15:04:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T15:04:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T15:04:51Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T15:04:51Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T15:04:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5ccef45b15da9c0dc92cfbebfda5fddde05397f6a23946a1c7e9df12b3c275bb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T15:04:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c5fc6afde533fd1160a9b445d9835126d2c2b0fb019b4b9d13269e91c5ad1e53\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T15:04:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ec14a0b8c122b3f70309112b68f07cdd77057e977ceb5f5a061a1892a93e28d6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T15:04:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0de9f530ff8a0069f4718fb3b820fef8a17fe5ee04f641326d42503d5b39956a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0de9f530ff8a0069f4718fb3b820fef8a17fe5ee04f641326d42503d5b39956a\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-01T15:05:10Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1001 15:05:05.118072 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1001 15:05:05.120651 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2773387964/tls.crt::/tmp/serving-cert-2773387964/tls.key\\\\\\\"\\\\nI1001 15:05:10.813323 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1001 15:05:10.819195 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1001 15:05:10.819374 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1001 15:05:10.819467 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1001 15:05:10.819569 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1001 15:05:10.834141 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1001 15:05:10.834193 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1001 15:05:10.834212 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1001 15:05:10.834221 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1001 15:05:10.834227 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1001 15:05:10.834234 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1001 15:05:10.834240 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1001 15:05:10.834323 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1001 15:05:10.839565 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-01T15:04:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5cddd04fc8db0c24ce9cefef134d143e9927ec632a6829a402069e740b42a429\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T15:04:54Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1119a5821d140830d19c6272faade87f2ef99f2436d1011adad88b5b86d964b0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1119a5821d140830d19c6272faade87f2ef99f2436d1011adad88b5b86d964b0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T15:04:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T15:04:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T15:04:51Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 01 15:05:12 crc kubenswrapper[4869]: I1001 15:05:12.159223 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-b7rv5\" (UniqueName: \"kubernetes.io/projected/69635c7a-0025-4ea2-a1b6-fc7776c2be11-kube-api-access-b7rv5\") pod \"multus-j98s2\" (UID: \"69635c7a-0025-4ea2-a1b6-fc7776c2be11\") " pod="openshift-multus/multus-j98s2" Oct 01 15:05:12 crc kubenswrapper[4869]: I1001 15:05:12.159434 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-65476\" (UniqueName: \"kubernetes.io/projected/0c01e592-2d8d-4773-a8b3-f6efe676f57f-kube-api-access-65476\") pod \"node-resolver-5hbq7\" (UID: \"0c01e592-2d8d-4773-a8b3-f6efe676f57f\") " pod="openshift-dns/node-resolver-5hbq7" Oct 01 15:05:12 crc kubenswrapper[4869]: I1001 15:05:12.165139 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-c86m8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a4b64f7f-0b03-4f47-965b-9fde048b735c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T15:05:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T15:05:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T15:05:11Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T15:05:11Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m289h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m289h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T15:05:11Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-c86m8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 01 15:05:12 crc kubenswrapper[4869]: I1001 15:05:12.185992 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"51d3e7b3-811a-42d8-a711-abf28a181753\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T15:04:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T15:04:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T15:05:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T15:05:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T15:04:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cd32818a8ac16855cf81d75085471907d2579ba8bc8ab434464787d21e630c86\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T15:04:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4d0beef438954056c20003a1a29c1356f1fe50e73db01411b4cf082a2e9b2a7a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T15:04:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d08b3c48bda55cb82da1c929cd1dff850852c297b3e8612df25bd27b587c2f42\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T15:04:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d09f0ff6fbdc14dfc60551c4bb2db225cda38354193e9e791335d01d9eb8cece\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T15:04:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T15:04:51Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 01 15:05:12 crc kubenswrapper[4869]: I1001 15:05:12.199093 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T15:05:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T15:05:11Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 01 15:05:12 crc kubenswrapper[4869]: I1001 15:05:12.208473 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-528sf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8edc04b1-dbb4-4d18-a110-e925d19ac049\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T15:05:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T15:05:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T15:05:11Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T15:05:11Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hssdd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T15:05:11Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-528sf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 01 15:05:12 crc kubenswrapper[4869]: I1001 15:05:12.231692 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T15:05:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T15:05:11Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 01 15:05:12 crc kubenswrapper[4869]: I1001 15:05:12.240228 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 01 15:05:12 crc kubenswrapper[4869]: I1001 15:05:12.240303 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 01 15:05:12 crc kubenswrapper[4869]: E1001 15:05:12.240493 4869 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Oct 01 15:05:12 crc kubenswrapper[4869]: E1001 15:05:12.240514 4869 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Oct 01 15:05:12 crc kubenswrapper[4869]: E1001 15:05:12.240527 4869 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 01 15:05:12 crc kubenswrapper[4869]: E1001 15:05:12.240587 4869 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-10-01 15:05:13.240568446 +0000 UTC m=+22.387411562 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 01 15:05:12 crc kubenswrapper[4869]: E1001 15:05:12.240650 4869 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Oct 01 15:05:12 crc kubenswrapper[4869]: E1001 15:05:12.240659 4869 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Oct 01 15:05:12 crc kubenswrapper[4869]: E1001 15:05:12.240671 4869 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 01 15:05:12 crc kubenswrapper[4869]: E1001 15:05:12.240693 4869 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-10-01 15:05:13.240685599 +0000 UTC m=+22.387528715 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 01 15:05:12 crc kubenswrapper[4869]: I1001 15:05:12.249005 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-j98s2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"69635c7a-0025-4ea2-a1b6-fc7776c2be11\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T15:05:12Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T15:05:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T15:05:12Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T15:05:12Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b7rv5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T15:05:12Z\\\"}}\" for pod \"openshift-multus\"/\"multus-j98s2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 01 15:05:12 crc kubenswrapper[4869]: I1001 15:05:12.263421 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-528sf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8edc04b1-dbb4-4d18-a110-e925d19ac049\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T15:05:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T15:05:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T15:05:11Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T15:05:11Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hssdd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T15:05:11Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-528sf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 01 15:05:12 crc kubenswrapper[4869]: I1001 15:05:12.275396 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T15:05:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T15:05:11Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 01 15:05:12 crc kubenswrapper[4869]: I1001 15:05:12.290023 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-j98s2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"69635c7a-0025-4ea2-a1b6-fc7776c2be11\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T15:05:12Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T15:05:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T15:05:12Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T15:05:12Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b7rv5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T15:05:12Z\\\"}}\" for pod \"openshift-multus\"/\"multus-j98s2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 01 15:05:12 crc kubenswrapper[4869]: I1001 15:05:12.305304 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T15:05:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T15:05:11Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 01 15:05:12 crc kubenswrapper[4869]: I1001 15:05:12.323607 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T15:05:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T15:05:11Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 01 15:05:12 crc kubenswrapper[4869]: I1001 15:05:12.333115 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T15:05:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T15:05:11Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 01 15:05:12 crc kubenswrapper[4869]: I1001 15:05:12.347856 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T15:05:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T15:05:11Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 01 15:05:12 crc kubenswrapper[4869]: I1001 15:05:12.351460 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-j98s2" Oct 01 15:05:12 crc kubenswrapper[4869]: W1001 15:05:12.360837 4869 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod69635c7a_0025_4ea2_a1b6_fc7776c2be11.slice/crio-d4f35ec26468b0dba273db66efc6907dadadd87b8cf30709f006eed18570e98e WatchSource:0}: Error finding container d4f35ec26468b0dba273db66efc6907dadadd87b8cf30709f006eed18570e98e: Status 404 returned error can't find the container with id d4f35ec26468b0dba273db66efc6907dadadd87b8cf30709f006eed18570e98e Oct 01 15:05:12 crc kubenswrapper[4869]: I1001 15:05:12.362539 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-5hbq7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0c01e592-2d8d-4773-a8b3-f6efe676f57f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T15:05:12Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T15:05:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T15:05:12Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T15:05:12Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-65476\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T15:05:12Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-5hbq7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 01 15:05:12 crc kubenswrapper[4869]: I1001 15:05:12.384242 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/node-resolver-5hbq7" Oct 01 15:05:12 crc kubenswrapper[4869]: I1001 15:05:12.384354 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5794379e-0bd9-4b1f-89eb-d1075fd901e1\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T15:04:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T15:04:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T15:04:51Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T15:04:51Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T15:04:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5ccef45b15da9c0dc92cfbebfda5fddde05397f6a23946a1c7e9df12b3c275bb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T15:04:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c5fc6afde533fd1160a9b445d9835126d2c2b0fb019b4b9d13269e91c5ad1e53\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T15:04:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ec14a0b8c122b3f70309112b68f07cdd77057e977ceb5f5a061a1892a93e28d6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T15:04:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0de9f530ff8a0069f4718fb3b820fef8a17fe5ee04f641326d42503d5b39956a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0de9f530ff8a0069f4718fb3b820fef8a17fe5ee04f641326d42503d5b39956a\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-01T15:05:10Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1001 15:05:05.118072 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1001 15:05:05.120651 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2773387964/tls.crt::/tmp/serving-cert-2773387964/tls.key\\\\\\\"\\\\nI1001 15:05:10.813323 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1001 15:05:10.819195 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1001 15:05:10.819374 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1001 15:05:10.819467 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1001 15:05:10.819569 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1001 15:05:10.834141 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1001 15:05:10.834193 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1001 15:05:10.834212 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1001 15:05:10.834221 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1001 15:05:10.834227 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1001 15:05:10.834234 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1001 15:05:10.834240 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1001 15:05:10.834323 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1001 15:05:10.839565 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-01T15:04:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5cddd04fc8db0c24ce9cefef134d143e9927ec632a6829a402069e740b42a429\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T15:04:54Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1119a5821d140830d19c6272faade87f2ef99f2436d1011adad88b5b86d964b0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1119a5821d140830d19c6272faade87f2ef99f2436d1011adad88b5b86d964b0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T15:04:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T15:04:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T15:04:51Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 01 15:05:12 crc kubenswrapper[4869]: W1001 15:05:12.397973 4869 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod0c01e592_2d8d_4773_a8b3_f6efe676f57f.slice/crio-d423aa627c1fc6fc1906957915ec8e6c2bad5b78be4d66dacbf2d8451c6897b5 WatchSource:0}: Error finding container d423aa627c1fc6fc1906957915ec8e6c2bad5b78be4d66dacbf2d8451c6897b5: Status 404 returned error can't find the container with id d423aa627c1fc6fc1906957915ec8e6c2bad5b78be4d66dacbf2d8451c6897b5 Oct 01 15:05:12 crc kubenswrapper[4869]: I1001 15:05:12.412269 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-c86m8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a4b64f7f-0b03-4f47-965b-9fde048b735c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T15:05:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T15:05:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T15:05:11Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T15:05:11Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m289h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m289h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T15:05:11Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-c86m8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 01 15:05:12 crc kubenswrapper[4869]: I1001 15:05:12.437732 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"kube-root-ca.crt" Oct 01 15:05:12 crc kubenswrapper[4869]: I1001 15:05:12.451055 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-27gqg"] Oct 01 15:05:12 crc kubenswrapper[4869]: I1001 15:05:12.451999 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-27gqg" Oct 01 15:05:12 crc kubenswrapper[4869]: I1001 15:05:12.458879 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/multus-additional-cni-plugins-ch8sh"] Oct 01 15:05:12 crc kubenswrapper[4869]: I1001 15:05:12.459953 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-additional-cni-plugins-ch8sh" Oct 01 15:05:12 crc kubenswrapper[4869]: I1001 15:05:12.468542 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T15:05:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T15:05:11Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 01 15:05:12 crc kubenswrapper[4869]: I1001 15:05:12.482007 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"ovnkube-script-lib" Oct 01 15:05:12 crc kubenswrapper[4869]: I1001 15:05:12.482523 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"env-overrides" Oct 01 15:05:12 crc kubenswrapper[4869]: I1001 15:05:12.482550 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"kube-root-ca.crt" Oct 01 15:05:12 crc kubenswrapper[4869]: I1001 15:05:12.482913 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-kubernetes-node-dockercfg-pwtwl" Oct 01 15:05:12 crc kubenswrapper[4869]: I1001 15:05:12.483071 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"ovnkube-config" Oct 01 15:05:12 crc kubenswrapper[4869]: I1001 15:05:12.483333 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-node-metrics-cert" Oct 01 15:05:12 crc kubenswrapper[4869]: I1001 15:05:12.483538 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-ancillary-tools-dockercfg-vnmsz" Oct 01 15:05:12 crc kubenswrapper[4869]: I1001 15:05:12.487182 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"default-cni-sysctl-allowlist" Oct 01 15:05:12 crc kubenswrapper[4869]: I1001 15:05:12.487344 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"openshift-service-ca.crt" Oct 01 15:05:12 crc kubenswrapper[4869]: I1001 15:05:12.532846 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"openshift-service-ca.crt" Oct 01 15:05:12 crc kubenswrapper[4869]: I1001 15:05:12.542864 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/0dc0f13b-b0ed-4694-97f1-bdaeb0a99d53-cni-binary-copy\") pod \"multus-additional-cni-plugins-ch8sh\" (UID: \"0dc0f13b-b0ed-4694-97f1-bdaeb0a99d53\") " pod="openshift-multus/multus-additional-cni-plugins-ch8sh" Oct 01 15:05:12 crc kubenswrapper[4869]: I1001 15:05:12.542910 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/ebbefc55-bef9-4a03-a065-321bff3a75b4-host-cni-bin\") pod \"ovnkube-node-27gqg\" (UID: \"ebbefc55-bef9-4a03-a065-321bff3a75b4\") " pod="openshift-ovn-kubernetes/ovnkube-node-27gqg" Oct 01 15:05:12 crc kubenswrapper[4869]: I1001 15:05:12.542928 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/ebbefc55-bef9-4a03-a065-321bff3a75b4-host-run-netns\") pod \"ovnkube-node-27gqg\" (UID: \"ebbefc55-bef9-4a03-a065-321bff3a75b4\") " pod="openshift-ovn-kubernetes/ovnkube-node-27gqg" Oct 01 15:05:12 crc kubenswrapper[4869]: I1001 15:05:12.542945 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tuning-conf-dir\" (UniqueName: \"kubernetes.io/host-path/0dc0f13b-b0ed-4694-97f1-bdaeb0a99d53-tuning-conf-dir\") pod \"multus-additional-cni-plugins-ch8sh\" (UID: \"0dc0f13b-b0ed-4694-97f1-bdaeb0a99d53\") " pod="openshift-multus/multus-additional-cni-plugins-ch8sh" Oct 01 15:05:12 crc kubenswrapper[4869]: I1001 15:05:12.542967 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/ebbefc55-bef9-4a03-a065-321bff3a75b4-env-overrides\") pod \"ovnkube-node-27gqg\" (UID: \"ebbefc55-bef9-4a03-a065-321bff3a75b4\") " pod="openshift-ovn-kubernetes/ovnkube-node-27gqg" Oct 01 15:05:12 crc kubenswrapper[4869]: I1001 15:05:12.542986 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/ebbefc55-bef9-4a03-a065-321bff3a75b4-run-systemd\") pod \"ovnkube-node-27gqg\" (UID: \"ebbefc55-bef9-4a03-a065-321bff3a75b4\") " pod="openshift-ovn-kubernetes/ovnkube-node-27gqg" Oct 01 15:05:12 crc kubenswrapper[4869]: I1001 15:05:12.543001 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/ebbefc55-bef9-4a03-a065-321bff3a75b4-run-ovn\") pod \"ovnkube-node-27gqg\" (UID: \"ebbefc55-bef9-4a03-a065-321bff3a75b4\") " pod="openshift-ovn-kubernetes/ovnkube-node-27gqg" Oct 01 15:05:12 crc kubenswrapper[4869]: I1001 15:05:12.543025 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/ebbefc55-bef9-4a03-a065-321bff3a75b4-node-log\") pod \"ovnkube-node-27gqg\" (UID: \"ebbefc55-bef9-4a03-a065-321bff3a75b4\") " pod="openshift-ovn-kubernetes/ovnkube-node-27gqg" Oct 01 15:05:12 crc kubenswrapper[4869]: I1001 15:05:12.543038 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/0dc0f13b-b0ed-4694-97f1-bdaeb0a99d53-cnibin\") pod \"multus-additional-cni-plugins-ch8sh\" (UID: \"0dc0f13b-b0ed-4694-97f1-bdaeb0a99d53\") " pod="openshift-multus/multus-additional-cni-plugins-ch8sh" Oct 01 15:05:12 crc kubenswrapper[4869]: I1001 15:05:12.543051 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/0dc0f13b-b0ed-4694-97f1-bdaeb0a99d53-os-release\") pod \"multus-additional-cni-plugins-ch8sh\" (UID: \"0dc0f13b-b0ed-4694-97f1-bdaeb0a99d53\") " pod="openshift-multus/multus-additional-cni-plugins-ch8sh" Oct 01 15:05:12 crc kubenswrapper[4869]: I1001 15:05:12.543065 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/ebbefc55-bef9-4a03-a065-321bff3a75b4-host-run-ovn-kubernetes\") pod \"ovnkube-node-27gqg\" (UID: \"ebbefc55-bef9-4a03-a065-321bff3a75b4\") " pod="openshift-ovn-kubernetes/ovnkube-node-27gqg" Oct 01 15:05:12 crc kubenswrapper[4869]: I1001 15:05:12.543080 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/ebbefc55-bef9-4a03-a065-321bff3a75b4-systemd-units\") pod \"ovnkube-node-27gqg\" (UID: \"ebbefc55-bef9-4a03-a065-321bff3a75b4\") " pod="openshift-ovn-kubernetes/ovnkube-node-27gqg" Oct 01 15:05:12 crc kubenswrapper[4869]: I1001 15:05:12.543095 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/ebbefc55-bef9-4a03-a065-321bff3a75b4-var-lib-openvswitch\") pod \"ovnkube-node-27gqg\" (UID: \"ebbefc55-bef9-4a03-a065-321bff3a75b4\") " pod="openshift-ovn-kubernetes/ovnkube-node-27gqg" Oct 01 15:05:12 crc kubenswrapper[4869]: I1001 15:05:12.543110 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/ebbefc55-bef9-4a03-a065-321bff3a75b4-host-slash\") pod \"ovnkube-node-27gqg\" (UID: \"ebbefc55-bef9-4a03-a065-321bff3a75b4\") " pod="openshift-ovn-kubernetes/ovnkube-node-27gqg" Oct 01 15:05:12 crc kubenswrapper[4869]: I1001 15:05:12.543123 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/ebbefc55-bef9-4a03-a065-321bff3a75b4-etc-openvswitch\") pod \"ovnkube-node-27gqg\" (UID: \"ebbefc55-bef9-4a03-a065-321bff3a75b4\") " pod="openshift-ovn-kubernetes/ovnkube-node-27gqg" Oct 01 15:05:12 crc kubenswrapper[4869]: I1001 15:05:12.543135 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/ebbefc55-bef9-4a03-a065-321bff3a75b4-ovn-node-metrics-cert\") pod \"ovnkube-node-27gqg\" (UID: \"ebbefc55-bef9-4a03-a065-321bff3a75b4\") " pod="openshift-ovn-kubernetes/ovnkube-node-27gqg" Oct 01 15:05:12 crc kubenswrapper[4869]: I1001 15:05:12.543149 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/ebbefc55-bef9-4a03-a065-321bff3a75b4-ovnkube-script-lib\") pod \"ovnkube-node-27gqg\" (UID: \"ebbefc55-bef9-4a03-a065-321bff3a75b4\") " pod="openshift-ovn-kubernetes/ovnkube-node-27gqg" Oct 01 15:05:12 crc kubenswrapper[4869]: I1001 15:05:12.543163 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sxdtj\" (UniqueName: \"kubernetes.io/projected/0dc0f13b-b0ed-4694-97f1-bdaeb0a99d53-kube-api-access-sxdtj\") pod \"multus-additional-cni-plugins-ch8sh\" (UID: \"0dc0f13b-b0ed-4694-97f1-bdaeb0a99d53\") " pod="openshift-multus/multus-additional-cni-plugins-ch8sh" Oct 01 15:05:12 crc kubenswrapper[4869]: I1001 15:05:12.543181 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/ebbefc55-bef9-4a03-a065-321bff3a75b4-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-27gqg\" (UID: \"ebbefc55-bef9-4a03-a065-321bff3a75b4\") " pod="openshift-ovn-kubernetes/ovnkube-node-27gqg" Oct 01 15:05:12 crc kubenswrapper[4869]: I1001 15:05:12.543198 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/ebbefc55-bef9-4a03-a065-321bff3a75b4-ovnkube-config\") pod \"ovnkube-node-27gqg\" (UID: \"ebbefc55-bef9-4a03-a065-321bff3a75b4\") " pod="openshift-ovn-kubernetes/ovnkube-node-27gqg" Oct 01 15:05:12 crc kubenswrapper[4869]: I1001 15:05:12.543214 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/0dc0f13b-b0ed-4694-97f1-bdaeb0a99d53-system-cni-dir\") pod \"multus-additional-cni-plugins-ch8sh\" (UID: \"0dc0f13b-b0ed-4694-97f1-bdaeb0a99d53\") " pod="openshift-multus/multus-additional-cni-plugins-ch8sh" Oct 01 15:05:12 crc kubenswrapper[4869]: I1001 15:05:12.543233 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/ebbefc55-bef9-4a03-a065-321bff3a75b4-log-socket\") pod \"ovnkube-node-27gqg\" (UID: \"ebbefc55-bef9-4a03-a065-321bff3a75b4\") " pod="openshift-ovn-kubernetes/ovnkube-node-27gqg" Oct 01 15:05:12 crc kubenswrapper[4869]: I1001 15:05:12.543247 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/0dc0f13b-b0ed-4694-97f1-bdaeb0a99d53-cni-sysctl-allowlist\") pod \"multus-additional-cni-plugins-ch8sh\" (UID: \"0dc0f13b-b0ed-4694-97f1-bdaeb0a99d53\") " pod="openshift-multus/multus-additional-cni-plugins-ch8sh" Oct 01 15:05:12 crc kubenswrapper[4869]: I1001 15:05:12.543276 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-v9mfd\" (UniqueName: \"kubernetes.io/projected/ebbefc55-bef9-4a03-a065-321bff3a75b4-kube-api-access-v9mfd\") pod \"ovnkube-node-27gqg\" (UID: \"ebbefc55-bef9-4a03-a065-321bff3a75b4\") " pod="openshift-ovn-kubernetes/ovnkube-node-27gqg" Oct 01 15:05:12 crc kubenswrapper[4869]: I1001 15:05:12.543294 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/ebbefc55-bef9-4a03-a065-321bff3a75b4-host-kubelet\") pod \"ovnkube-node-27gqg\" (UID: \"ebbefc55-bef9-4a03-a065-321bff3a75b4\") " pod="openshift-ovn-kubernetes/ovnkube-node-27gqg" Oct 01 15:05:12 crc kubenswrapper[4869]: I1001 15:05:12.543307 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/ebbefc55-bef9-4a03-a065-321bff3a75b4-run-openvswitch\") pod \"ovnkube-node-27gqg\" (UID: \"ebbefc55-bef9-4a03-a065-321bff3a75b4\") " pod="openshift-ovn-kubernetes/ovnkube-node-27gqg" Oct 01 15:05:12 crc kubenswrapper[4869]: I1001 15:05:12.543321 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/ebbefc55-bef9-4a03-a065-321bff3a75b4-host-cni-netd\") pod \"ovnkube-node-27gqg\" (UID: \"ebbefc55-bef9-4a03-a065-321bff3a75b4\") " pod="openshift-ovn-kubernetes/ovnkube-node-27gqg" Oct 01 15:05:12 crc kubenswrapper[4869]: I1001 15:05:12.546024 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-m289h\" (UniqueName: \"kubernetes.io/projected/a4b64f7f-0b03-4f47-965b-9fde048b735c-kube-api-access-m289h\") pod \"machine-config-daemon-c86m8\" (UID: \"a4b64f7f-0b03-4f47-965b-9fde048b735c\") " pod="openshift-machine-config-operator/machine-config-daemon-c86m8" Oct 01 15:05:12 crc kubenswrapper[4869]: I1001 15:05:12.553298 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"51d3e7b3-811a-42d8-a711-abf28a181753\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T15:04:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T15:04:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T15:05:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T15:05:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T15:04:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cd32818a8ac16855cf81d75085471907d2579ba8bc8ab434464787d21e630c86\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T15:04:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4d0beef438954056c20003a1a29c1356f1fe50e73db01411b4cf082a2e9b2a7a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T15:04:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d08b3c48bda55cb82da1c929cd1dff850852c297b3e8612df25bd27b587c2f42\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T15:04:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d09f0ff6fbdc14dfc60551c4bb2db225cda38354193e9e791335d01d9eb8cece\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T15:04:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T15:04:51Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 01 15:05:12 crc kubenswrapper[4869]: I1001 15:05:12.560661 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"kube-rbac-proxy" Oct 01 15:05:12 crc kubenswrapper[4869]: I1001 15:05:12.564703 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/a4b64f7f-0b03-4f47-965b-9fde048b735c-mcd-auth-proxy-config\") pod \"machine-config-daemon-c86m8\" (UID: \"a4b64f7f-0b03-4f47-965b-9fde048b735c\") " pod="openshift-machine-config-operator/machine-config-daemon-c86m8" Oct 01 15:05:12 crc kubenswrapper[4869]: I1001 15:05:12.605470 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T15:05:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T15:05:11Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 01 15:05:12 crc kubenswrapper[4869]: I1001 15:05:12.643531 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/ebbefc55-bef9-4a03-a065-321bff3a75b4-host-run-ovn-kubernetes\") pod \"ovnkube-node-27gqg\" (UID: \"ebbefc55-bef9-4a03-a065-321bff3a75b4\") " pod="openshift-ovn-kubernetes/ovnkube-node-27gqg" Oct 01 15:05:12 crc kubenswrapper[4869]: I1001 15:05:12.643563 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/ebbefc55-bef9-4a03-a065-321bff3a75b4-systemd-units\") pod \"ovnkube-node-27gqg\" (UID: \"ebbefc55-bef9-4a03-a065-321bff3a75b4\") " pod="openshift-ovn-kubernetes/ovnkube-node-27gqg" Oct 01 15:05:12 crc kubenswrapper[4869]: I1001 15:05:12.643580 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/ebbefc55-bef9-4a03-a065-321bff3a75b4-var-lib-openvswitch\") pod \"ovnkube-node-27gqg\" (UID: \"ebbefc55-bef9-4a03-a065-321bff3a75b4\") " pod="openshift-ovn-kubernetes/ovnkube-node-27gqg" Oct 01 15:05:12 crc kubenswrapper[4869]: I1001 15:05:12.643596 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/ebbefc55-bef9-4a03-a065-321bff3a75b4-host-slash\") pod \"ovnkube-node-27gqg\" (UID: \"ebbefc55-bef9-4a03-a065-321bff3a75b4\") " pod="openshift-ovn-kubernetes/ovnkube-node-27gqg" Oct 01 15:05:12 crc kubenswrapper[4869]: I1001 15:05:12.643613 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/ebbefc55-bef9-4a03-a065-321bff3a75b4-etc-openvswitch\") pod \"ovnkube-node-27gqg\" (UID: \"ebbefc55-bef9-4a03-a065-321bff3a75b4\") " pod="openshift-ovn-kubernetes/ovnkube-node-27gqg" Oct 01 15:05:12 crc kubenswrapper[4869]: I1001 15:05:12.643632 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/ebbefc55-bef9-4a03-a065-321bff3a75b4-ovn-node-metrics-cert\") pod \"ovnkube-node-27gqg\" (UID: \"ebbefc55-bef9-4a03-a065-321bff3a75b4\") " pod="openshift-ovn-kubernetes/ovnkube-node-27gqg" Oct 01 15:05:12 crc kubenswrapper[4869]: I1001 15:05:12.643648 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/ebbefc55-bef9-4a03-a065-321bff3a75b4-ovnkube-script-lib\") pod \"ovnkube-node-27gqg\" (UID: \"ebbefc55-bef9-4a03-a065-321bff3a75b4\") " pod="openshift-ovn-kubernetes/ovnkube-node-27gqg" Oct 01 15:05:12 crc kubenswrapper[4869]: I1001 15:05:12.643665 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sxdtj\" (UniqueName: \"kubernetes.io/projected/0dc0f13b-b0ed-4694-97f1-bdaeb0a99d53-kube-api-access-sxdtj\") pod \"multus-additional-cni-plugins-ch8sh\" (UID: \"0dc0f13b-b0ed-4694-97f1-bdaeb0a99d53\") " pod="openshift-multus/multus-additional-cni-plugins-ch8sh" Oct 01 15:05:12 crc kubenswrapper[4869]: I1001 15:05:12.643666 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/ebbefc55-bef9-4a03-a065-321bff3a75b4-systemd-units\") pod \"ovnkube-node-27gqg\" (UID: \"ebbefc55-bef9-4a03-a065-321bff3a75b4\") " pod="openshift-ovn-kubernetes/ovnkube-node-27gqg" Oct 01 15:05:12 crc kubenswrapper[4869]: I1001 15:05:12.643700 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/ebbefc55-bef9-4a03-a065-321bff3a75b4-host-slash\") pod \"ovnkube-node-27gqg\" (UID: \"ebbefc55-bef9-4a03-a065-321bff3a75b4\") " pod="openshift-ovn-kubernetes/ovnkube-node-27gqg" Oct 01 15:05:12 crc kubenswrapper[4869]: I1001 15:05:12.643719 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/ebbefc55-bef9-4a03-a065-321bff3a75b4-log-socket\") pod \"ovnkube-node-27gqg\" (UID: \"ebbefc55-bef9-4a03-a065-321bff3a75b4\") " pod="openshift-ovn-kubernetes/ovnkube-node-27gqg" Oct 01 15:05:12 crc kubenswrapper[4869]: I1001 15:05:12.643683 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/ebbefc55-bef9-4a03-a065-321bff3a75b4-log-socket\") pod \"ovnkube-node-27gqg\" (UID: \"ebbefc55-bef9-4a03-a065-321bff3a75b4\") " pod="openshift-ovn-kubernetes/ovnkube-node-27gqg" Oct 01 15:05:12 crc kubenswrapper[4869]: I1001 15:05:12.643672 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/ebbefc55-bef9-4a03-a065-321bff3a75b4-var-lib-openvswitch\") pod \"ovnkube-node-27gqg\" (UID: \"ebbefc55-bef9-4a03-a065-321bff3a75b4\") " pod="openshift-ovn-kubernetes/ovnkube-node-27gqg" Oct 01 15:05:12 crc kubenswrapper[4869]: I1001 15:05:12.643753 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/ebbefc55-bef9-4a03-a065-321bff3a75b4-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-27gqg\" (UID: \"ebbefc55-bef9-4a03-a065-321bff3a75b4\") " pod="openshift-ovn-kubernetes/ovnkube-node-27gqg" Oct 01 15:05:12 crc kubenswrapper[4869]: I1001 15:05:12.643771 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/ebbefc55-bef9-4a03-a065-321bff3a75b4-ovnkube-config\") pod \"ovnkube-node-27gqg\" (UID: \"ebbefc55-bef9-4a03-a065-321bff3a75b4\") " pod="openshift-ovn-kubernetes/ovnkube-node-27gqg" Oct 01 15:05:12 crc kubenswrapper[4869]: I1001 15:05:12.643762 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/ebbefc55-bef9-4a03-a065-321bff3a75b4-etc-openvswitch\") pod \"ovnkube-node-27gqg\" (UID: \"ebbefc55-bef9-4a03-a065-321bff3a75b4\") " pod="openshift-ovn-kubernetes/ovnkube-node-27gqg" Oct 01 15:05:12 crc kubenswrapper[4869]: I1001 15:05:12.643805 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/0dc0f13b-b0ed-4694-97f1-bdaeb0a99d53-system-cni-dir\") pod \"multus-additional-cni-plugins-ch8sh\" (UID: \"0dc0f13b-b0ed-4694-97f1-bdaeb0a99d53\") " pod="openshift-multus/multus-additional-cni-plugins-ch8sh" Oct 01 15:05:12 crc kubenswrapper[4869]: I1001 15:05:12.643672 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/ebbefc55-bef9-4a03-a065-321bff3a75b4-host-run-ovn-kubernetes\") pod \"ovnkube-node-27gqg\" (UID: \"ebbefc55-bef9-4a03-a065-321bff3a75b4\") " pod="openshift-ovn-kubernetes/ovnkube-node-27gqg" Oct 01 15:05:12 crc kubenswrapper[4869]: I1001 15:05:12.643786 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/0dc0f13b-b0ed-4694-97f1-bdaeb0a99d53-system-cni-dir\") pod \"multus-additional-cni-plugins-ch8sh\" (UID: \"0dc0f13b-b0ed-4694-97f1-bdaeb0a99d53\") " pod="openshift-multus/multus-additional-cni-plugins-ch8sh" Oct 01 15:05:12 crc kubenswrapper[4869]: I1001 15:05:12.643834 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/ebbefc55-bef9-4a03-a065-321bff3a75b4-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-27gqg\" (UID: \"ebbefc55-bef9-4a03-a065-321bff3a75b4\") " pod="openshift-ovn-kubernetes/ovnkube-node-27gqg" Oct 01 15:05:12 crc kubenswrapper[4869]: I1001 15:05:12.643880 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/0dc0f13b-b0ed-4694-97f1-bdaeb0a99d53-cni-sysctl-allowlist\") pod \"multus-additional-cni-plugins-ch8sh\" (UID: \"0dc0f13b-b0ed-4694-97f1-bdaeb0a99d53\") " pod="openshift-multus/multus-additional-cni-plugins-ch8sh" Oct 01 15:05:12 crc kubenswrapper[4869]: I1001 15:05:12.643904 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/ebbefc55-bef9-4a03-a065-321bff3a75b4-host-kubelet\") pod \"ovnkube-node-27gqg\" (UID: \"ebbefc55-bef9-4a03-a065-321bff3a75b4\") " pod="openshift-ovn-kubernetes/ovnkube-node-27gqg" Oct 01 15:05:12 crc kubenswrapper[4869]: I1001 15:05:12.643923 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/ebbefc55-bef9-4a03-a065-321bff3a75b4-run-openvswitch\") pod \"ovnkube-node-27gqg\" (UID: \"ebbefc55-bef9-4a03-a065-321bff3a75b4\") " pod="openshift-ovn-kubernetes/ovnkube-node-27gqg" Oct 01 15:05:12 crc kubenswrapper[4869]: I1001 15:05:12.643939 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/ebbefc55-bef9-4a03-a065-321bff3a75b4-host-cni-netd\") pod \"ovnkube-node-27gqg\" (UID: \"ebbefc55-bef9-4a03-a065-321bff3a75b4\") " pod="openshift-ovn-kubernetes/ovnkube-node-27gqg" Oct 01 15:05:12 crc kubenswrapper[4869]: I1001 15:05:12.643956 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-v9mfd\" (UniqueName: \"kubernetes.io/projected/ebbefc55-bef9-4a03-a065-321bff3a75b4-kube-api-access-v9mfd\") pod \"ovnkube-node-27gqg\" (UID: \"ebbefc55-bef9-4a03-a065-321bff3a75b4\") " pod="openshift-ovn-kubernetes/ovnkube-node-27gqg" Oct 01 15:05:12 crc kubenswrapper[4869]: I1001 15:05:12.643977 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/0dc0f13b-b0ed-4694-97f1-bdaeb0a99d53-cni-binary-copy\") pod \"multus-additional-cni-plugins-ch8sh\" (UID: \"0dc0f13b-b0ed-4694-97f1-bdaeb0a99d53\") " pod="openshift-multus/multus-additional-cni-plugins-ch8sh" Oct 01 15:05:12 crc kubenswrapper[4869]: I1001 15:05:12.644005 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/ebbefc55-bef9-4a03-a065-321bff3a75b4-host-cni-bin\") pod \"ovnkube-node-27gqg\" (UID: \"ebbefc55-bef9-4a03-a065-321bff3a75b4\") " pod="openshift-ovn-kubernetes/ovnkube-node-27gqg" Oct 01 15:05:12 crc kubenswrapper[4869]: I1001 15:05:12.644023 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/ebbefc55-bef9-4a03-a065-321bff3a75b4-host-run-netns\") pod \"ovnkube-node-27gqg\" (UID: \"ebbefc55-bef9-4a03-a065-321bff3a75b4\") " pod="openshift-ovn-kubernetes/ovnkube-node-27gqg" Oct 01 15:05:12 crc kubenswrapper[4869]: I1001 15:05:12.644055 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tuning-conf-dir\" (UniqueName: \"kubernetes.io/host-path/0dc0f13b-b0ed-4694-97f1-bdaeb0a99d53-tuning-conf-dir\") pod \"multus-additional-cni-plugins-ch8sh\" (UID: \"0dc0f13b-b0ed-4694-97f1-bdaeb0a99d53\") " pod="openshift-multus/multus-additional-cni-plugins-ch8sh" Oct 01 15:05:12 crc kubenswrapper[4869]: I1001 15:05:12.644078 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/ebbefc55-bef9-4a03-a065-321bff3a75b4-run-systemd\") pod \"ovnkube-node-27gqg\" (UID: \"ebbefc55-bef9-4a03-a065-321bff3a75b4\") " pod="openshift-ovn-kubernetes/ovnkube-node-27gqg" Oct 01 15:05:12 crc kubenswrapper[4869]: I1001 15:05:12.644092 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/ebbefc55-bef9-4a03-a065-321bff3a75b4-run-ovn\") pod \"ovnkube-node-27gqg\" (UID: \"ebbefc55-bef9-4a03-a065-321bff3a75b4\") " pod="openshift-ovn-kubernetes/ovnkube-node-27gqg" Oct 01 15:05:12 crc kubenswrapper[4869]: I1001 15:05:12.644109 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/ebbefc55-bef9-4a03-a065-321bff3a75b4-env-overrides\") pod \"ovnkube-node-27gqg\" (UID: \"ebbefc55-bef9-4a03-a065-321bff3a75b4\") " pod="openshift-ovn-kubernetes/ovnkube-node-27gqg" Oct 01 15:05:12 crc kubenswrapper[4869]: I1001 15:05:12.644124 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/ebbefc55-bef9-4a03-a065-321bff3a75b4-node-log\") pod \"ovnkube-node-27gqg\" (UID: \"ebbefc55-bef9-4a03-a065-321bff3a75b4\") " pod="openshift-ovn-kubernetes/ovnkube-node-27gqg" Oct 01 15:05:12 crc kubenswrapper[4869]: I1001 15:05:12.644139 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/0dc0f13b-b0ed-4694-97f1-bdaeb0a99d53-cnibin\") pod \"multus-additional-cni-plugins-ch8sh\" (UID: \"0dc0f13b-b0ed-4694-97f1-bdaeb0a99d53\") " pod="openshift-multus/multus-additional-cni-plugins-ch8sh" Oct 01 15:05:12 crc kubenswrapper[4869]: I1001 15:05:12.644154 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/0dc0f13b-b0ed-4694-97f1-bdaeb0a99d53-os-release\") pod \"multus-additional-cni-plugins-ch8sh\" (UID: \"0dc0f13b-b0ed-4694-97f1-bdaeb0a99d53\") " pod="openshift-multus/multus-additional-cni-plugins-ch8sh" Oct 01 15:05:12 crc kubenswrapper[4869]: I1001 15:05:12.644229 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/0dc0f13b-b0ed-4694-97f1-bdaeb0a99d53-os-release\") pod \"multus-additional-cni-plugins-ch8sh\" (UID: \"0dc0f13b-b0ed-4694-97f1-bdaeb0a99d53\") " pod="openshift-multus/multus-additional-cni-plugins-ch8sh" Oct 01 15:05:12 crc kubenswrapper[4869]: I1001 15:05:12.644399 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/ebbefc55-bef9-4a03-a065-321bff3a75b4-run-openvswitch\") pod \"ovnkube-node-27gqg\" (UID: \"ebbefc55-bef9-4a03-a065-321bff3a75b4\") " pod="openshift-ovn-kubernetes/ovnkube-node-27gqg" Oct 01 15:05:12 crc kubenswrapper[4869]: I1001 15:05:12.644422 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/ebbefc55-bef9-4a03-a065-321bff3a75b4-run-systemd\") pod \"ovnkube-node-27gqg\" (UID: \"ebbefc55-bef9-4a03-a065-321bff3a75b4\") " pod="openshift-ovn-kubernetes/ovnkube-node-27gqg" Oct 01 15:05:12 crc kubenswrapper[4869]: I1001 15:05:12.644430 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/ebbefc55-bef9-4a03-a065-321bff3a75b4-host-kubelet\") pod \"ovnkube-node-27gqg\" (UID: \"ebbefc55-bef9-4a03-a065-321bff3a75b4\") " pod="openshift-ovn-kubernetes/ovnkube-node-27gqg" Oct 01 15:05:12 crc kubenswrapper[4869]: I1001 15:05:12.644459 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/ebbefc55-bef9-4a03-a065-321bff3a75b4-host-run-netns\") pod \"ovnkube-node-27gqg\" (UID: \"ebbefc55-bef9-4a03-a065-321bff3a75b4\") " pod="openshift-ovn-kubernetes/ovnkube-node-27gqg" Oct 01 15:05:12 crc kubenswrapper[4869]: I1001 15:05:12.644460 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/ebbefc55-bef9-4a03-a065-321bff3a75b4-host-cni-bin\") pod \"ovnkube-node-27gqg\" (UID: \"ebbefc55-bef9-4a03-a065-321bff3a75b4\") " pod="openshift-ovn-kubernetes/ovnkube-node-27gqg" Oct 01 15:05:12 crc kubenswrapper[4869]: I1001 15:05:12.644483 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/ebbefc55-bef9-4a03-a065-321bff3a75b4-host-cni-netd\") pod \"ovnkube-node-27gqg\" (UID: \"ebbefc55-bef9-4a03-a065-321bff3a75b4\") " pod="openshift-ovn-kubernetes/ovnkube-node-27gqg" Oct 01 15:05:12 crc kubenswrapper[4869]: I1001 15:05:12.644531 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tuning-conf-dir\" (UniqueName: \"kubernetes.io/host-path/0dc0f13b-b0ed-4694-97f1-bdaeb0a99d53-tuning-conf-dir\") pod \"multus-additional-cni-plugins-ch8sh\" (UID: \"0dc0f13b-b0ed-4694-97f1-bdaeb0a99d53\") " pod="openshift-multus/multus-additional-cni-plugins-ch8sh" Oct 01 15:05:12 crc kubenswrapper[4869]: I1001 15:05:12.644563 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/ebbefc55-bef9-4a03-a065-321bff3a75b4-run-ovn\") pod \"ovnkube-node-27gqg\" (UID: \"ebbefc55-bef9-4a03-a065-321bff3a75b4\") " pod="openshift-ovn-kubernetes/ovnkube-node-27gqg" Oct 01 15:05:12 crc kubenswrapper[4869]: I1001 15:05:12.644566 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/ebbefc55-bef9-4a03-a065-321bff3a75b4-ovnkube-script-lib\") pod \"ovnkube-node-27gqg\" (UID: \"ebbefc55-bef9-4a03-a065-321bff3a75b4\") " pod="openshift-ovn-kubernetes/ovnkube-node-27gqg" Oct 01 15:05:12 crc kubenswrapper[4869]: I1001 15:05:12.644586 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/ebbefc55-bef9-4a03-a065-321bff3a75b4-node-log\") pod \"ovnkube-node-27gqg\" (UID: \"ebbefc55-bef9-4a03-a065-321bff3a75b4\") " pod="openshift-ovn-kubernetes/ovnkube-node-27gqg" Oct 01 15:05:12 crc kubenswrapper[4869]: I1001 15:05:12.644627 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/0dc0f13b-b0ed-4694-97f1-bdaeb0a99d53-cnibin\") pod \"multus-additional-cni-plugins-ch8sh\" (UID: \"0dc0f13b-b0ed-4694-97f1-bdaeb0a99d53\") " pod="openshift-multus/multus-additional-cni-plugins-ch8sh" Oct 01 15:05:12 crc kubenswrapper[4869]: I1001 15:05:12.644649 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/0dc0f13b-b0ed-4694-97f1-bdaeb0a99d53-cni-sysctl-allowlist\") pod \"multus-additional-cni-plugins-ch8sh\" (UID: \"0dc0f13b-b0ed-4694-97f1-bdaeb0a99d53\") " pod="openshift-multus/multus-additional-cni-plugins-ch8sh" Oct 01 15:05:12 crc kubenswrapper[4869]: I1001 15:05:12.644854 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/ebbefc55-bef9-4a03-a065-321bff3a75b4-env-overrides\") pod \"ovnkube-node-27gqg\" (UID: \"ebbefc55-bef9-4a03-a065-321bff3a75b4\") " pod="openshift-ovn-kubernetes/ovnkube-node-27gqg" Oct 01 15:05:12 crc kubenswrapper[4869]: I1001 15:05:12.644992 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/ebbefc55-bef9-4a03-a065-321bff3a75b4-ovnkube-config\") pod \"ovnkube-node-27gqg\" (UID: \"ebbefc55-bef9-4a03-a065-321bff3a75b4\") " pod="openshift-ovn-kubernetes/ovnkube-node-27gqg" Oct 01 15:05:12 crc kubenswrapper[4869]: I1001 15:05:12.645241 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/0dc0f13b-b0ed-4694-97f1-bdaeb0a99d53-cni-binary-copy\") pod \"multus-additional-cni-plugins-ch8sh\" (UID: \"0dc0f13b-b0ed-4694-97f1-bdaeb0a99d53\") " pod="openshift-multus/multus-additional-cni-plugins-ch8sh" Oct 01 15:05:12 crc kubenswrapper[4869]: I1001 15:05:12.646549 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/ebbefc55-bef9-4a03-a065-321bff3a75b4-ovn-node-metrics-cert\") pod \"ovnkube-node-27gqg\" (UID: \"ebbefc55-bef9-4a03-a065-321bff3a75b4\") " pod="openshift-ovn-kubernetes/ovnkube-node-27gqg" Oct 01 15:05:12 crc kubenswrapper[4869]: I1001 15:05:12.647801 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T15:05:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T15:05:11Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 01 15:05:12 crc kubenswrapper[4869]: I1001 15:05:12.675675 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sxdtj\" (UniqueName: \"kubernetes.io/projected/0dc0f13b-b0ed-4694-97f1-bdaeb0a99d53-kube-api-access-sxdtj\") pod \"multus-additional-cni-plugins-ch8sh\" (UID: \"0dc0f13b-b0ed-4694-97f1-bdaeb0a99d53\") " pod="openshift-multus/multus-additional-cni-plugins-ch8sh" Oct 01 15:05:12 crc kubenswrapper[4869]: I1001 15:05:12.698611 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-v9mfd\" (UniqueName: \"kubernetes.io/projected/ebbefc55-bef9-4a03-a065-321bff3a75b4-kube-api-access-v9mfd\") pod \"ovnkube-node-27gqg\" (UID: \"ebbefc55-bef9-4a03-a065-321bff3a75b4\") " pod="openshift-ovn-kubernetes/ovnkube-node-27gqg" Oct 01 15:05:12 crc kubenswrapper[4869]: I1001 15:05:12.720178 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-daemon-dockercfg-r5tcq" Oct 01 15:05:12 crc kubenswrapper[4869]: I1001 15:05:12.727201 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-daemon-c86m8" Oct 01 15:05:12 crc kubenswrapper[4869]: W1001 15:05:12.737331 4869 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-poda4b64f7f_0b03_4f47_965b_9fde048b735c.slice/crio-978d7f4f27536bda84814cacb5123965fd3951ce083f4e55c50e634b8187f2cd WatchSource:0}: Error finding container 978d7f4f27536bda84814cacb5123965fd3951ce083f4e55c50e634b8187f2cd: Status 404 returned error can't find the container with id 978d7f4f27536bda84814cacb5123965fd3951ce083f4e55c50e634b8187f2cd Oct 01 15:05:12 crc kubenswrapper[4869]: I1001 15:05:12.738750 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" event={"ID":"37a5e44f-9a88-4405-be8a-b645485e7312","Type":"ContainerStarted","Data":"8b50d32a38b3800ef5fcfd0c75ec15b4cfa750fe67a23ddc7bf72e79b19d00f6"} Oct 01 15:05:12 crc kubenswrapper[4869]: I1001 15:05:12.738790 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" event={"ID":"37a5e44f-9a88-4405-be8a-b645485e7312","Type":"ContainerStarted","Data":"9a294d127807682312a6c6828f844310a8a022ebce811e51a53c2a5e60fc6bec"} Oct 01 15:05:12 crc kubenswrapper[4869]: I1001 15:05:12.741066 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" event={"ID":"ef543e1b-8068-4ea3-b32a-61027b32e95d","Type":"ContainerStarted","Data":"c11717c4dceac8e150d3f1d42be41e23c81b89f4410fb7dd8393fca33b6965ac"} Oct 01 15:05:12 crc kubenswrapper[4869]: I1001 15:05:12.741093 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" event={"ID":"ef543e1b-8068-4ea3-b32a-61027b32e95d","Type":"ContainerStarted","Data":"bc98b8d7b68084c32805c5f6d050b49a567ea3fc3eef9825909cc3058442e45e"} Oct 01 15:05:12 crc kubenswrapper[4869]: I1001 15:05:12.741104 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" event={"ID":"ef543e1b-8068-4ea3-b32a-61027b32e95d","Type":"ContainerStarted","Data":"88db8e553b2edf384a8f19e6da88dc05f91abaeffe8679c9be2b6a26106653b8"} Oct 01 15:05:12 crc kubenswrapper[4869]: I1001 15:05:12.744072 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" event={"ID":"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49","Type":"ContainerStarted","Data":"fc43848025e4ec8362b1ba825fb555028bd74973a27291635502bfbd093b1efc"} Oct 01 15:05:12 crc kubenswrapper[4869]: I1001 15:05:12.745909 4869 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-check-endpoints/0.log" Oct 01 15:05:12 crc kubenswrapper[4869]: I1001 15:05:12.747303 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"0f5fc3149c69187517c3ad92e33aa91e1d567a78b9770be35c10267467a68d37"} Oct 01 15:05:12 crc kubenswrapper[4869]: I1001 15:05:12.747782 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Oct 01 15:05:12 crc kubenswrapper[4869]: I1001 15:05:12.748831 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/node-resolver-5hbq7" event={"ID":"0c01e592-2d8d-4773-a8b3-f6efe676f57f","Type":"ContainerStarted","Data":"db5afaa678c9d0b0d7cf16284426cfcdd2d3534ab1a4978945d7bdfbfa3a6714"} Oct 01 15:05:12 crc kubenswrapper[4869]: I1001 15:05:12.748851 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/node-resolver-5hbq7" event={"ID":"0c01e592-2d8d-4773-a8b3-f6efe676f57f","Type":"ContainerStarted","Data":"d423aa627c1fc6fc1906957915ec8e6c2bad5b78be4d66dacbf2d8451c6897b5"} Oct 01 15:05:12 crc kubenswrapper[4869]: I1001 15:05:12.750288 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-j98s2" event={"ID":"69635c7a-0025-4ea2-a1b6-fc7776c2be11","Type":"ContainerStarted","Data":"a96142295c608f4cce284b8e003ed2a5c2954ce0542b28d04f3f63710c6c65e0"} Oct 01 15:05:12 crc kubenswrapper[4869]: I1001 15:05:12.750308 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-j98s2" event={"ID":"69635c7a-0025-4ea2-a1b6-fc7776c2be11","Type":"ContainerStarted","Data":"d4f35ec26468b0dba273db66efc6907dadadd87b8cf30709f006eed18570e98e"} Oct 01 15:05:12 crc kubenswrapper[4869]: I1001 15:05:12.752804 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T15:05:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T15:05:11Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T15:05:12Z is after 2025-08-24T17:21:41Z" Oct 01 15:05:12 crc kubenswrapper[4869]: I1001 15:05:12.753560 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/node-ca-528sf" event={"ID":"8edc04b1-dbb4-4d18-a110-e925d19ac049","Type":"ContainerStarted","Data":"fbd802c770f8ab2b7dd8fc652bc10e5d67cb9176aa35b4d135ad4d01c6cef158"} Oct 01 15:05:12 crc kubenswrapper[4869]: I1001 15:05:12.753583 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/node-ca-528sf" event={"ID":"8edc04b1-dbb4-4d18-a110-e925d19ac049","Type":"ContainerStarted","Data":"381bf721e04166967d4404a8100276c62e62bba5bbedb1134f0a48ab2e68c24c"} Oct 01 15:05:12 crc kubenswrapper[4869]: I1001 15:05:12.764483 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-27gqg" Oct 01 15:05:12 crc kubenswrapper[4869]: W1001 15:05:12.778504 4869 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podebbefc55_bef9_4a03_a065_321bff3a75b4.slice/crio-494cc5f315d900d7ca2031138d77f2eac5461180d1368ed046cc0e0509e13a78 WatchSource:0}: Error finding container 494cc5f315d900d7ca2031138d77f2eac5461180d1368ed046cc0e0509e13a78: Status 404 returned error can't find the container with id 494cc5f315d900d7ca2031138d77f2eac5461180d1368ed046cc0e0509e13a78 Oct 01 15:05:12 crc kubenswrapper[4869]: I1001 15:05:12.787341 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-additional-cni-plugins-ch8sh" Oct 01 15:05:12 crc kubenswrapper[4869]: I1001 15:05:12.789769 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T15:05:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T15:05:11Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T15:05:12Z is after 2025-08-24T17:21:41Z" Oct 01 15:05:12 crc kubenswrapper[4869]: W1001 15:05:12.805989 4869 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod0dc0f13b_b0ed_4694_97f1_bdaeb0a99d53.slice/crio-453655c0de4f7776d0ea7e6dc5fb2a6f4a56b0f797a48a77317c1af7f487e4ad WatchSource:0}: Error finding container 453655c0de4f7776d0ea7e6dc5fb2a6f4a56b0f797a48a77317c1af7f487e4ad: Status 404 returned error can't find the container with id 453655c0de4f7776d0ea7e6dc5fb2a6f4a56b0f797a48a77317c1af7f487e4ad Oct 01 15:05:12 crc kubenswrapper[4869]: I1001 15:05:12.830280 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-5hbq7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0c01e592-2d8d-4773-a8b3-f6efe676f57f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T15:05:12Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T15:05:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T15:05:12Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T15:05:12Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-65476\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T15:05:12Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-5hbq7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T15:05:12Z is after 2025-08-24T17:21:41Z" Oct 01 15:05:12 crc kubenswrapper[4869]: I1001 15:05:12.879028 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-ch8sh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0dc0f13b-b0ed-4694-97f1-bdaeb0a99d53\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T15:05:12Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T15:05:12Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T15:05:12Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T15:05:12Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sxdtj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sxdtj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sxdtj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sxdtj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sxdtj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sxdtj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sxdtj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T15:05:12Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-ch8sh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T15:05:12Z is after 2025-08-24T17:21:41Z" Oct 01 15:05:12 crc kubenswrapper[4869]: I1001 15:05:12.911421 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5794379e-0bd9-4b1f-89eb-d1075fd901e1\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T15:04:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T15:04:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T15:04:51Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T15:04:51Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T15:04:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5ccef45b15da9c0dc92cfbebfda5fddde05397f6a23946a1c7e9df12b3c275bb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T15:04:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c5fc6afde533fd1160a9b445d9835126d2c2b0fb019b4b9d13269e91c5ad1e53\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T15:04:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ec14a0b8c122b3f70309112b68f07cdd77057e977ceb5f5a061a1892a93e28d6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T15:04:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0de9f530ff8a0069f4718fb3b820fef8a17fe5ee04f641326d42503d5b39956a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0de9f530ff8a0069f4718fb3b820fef8a17fe5ee04f641326d42503d5b39956a\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-01T15:05:10Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1001 15:05:05.118072 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1001 15:05:05.120651 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2773387964/tls.crt::/tmp/serving-cert-2773387964/tls.key\\\\\\\"\\\\nI1001 15:05:10.813323 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1001 15:05:10.819195 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1001 15:05:10.819374 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1001 15:05:10.819467 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1001 15:05:10.819569 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1001 15:05:10.834141 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1001 15:05:10.834193 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1001 15:05:10.834212 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1001 15:05:10.834221 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1001 15:05:10.834227 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1001 15:05:10.834234 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1001 15:05:10.834240 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1001 15:05:10.834323 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1001 15:05:10.839565 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-01T15:04:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5cddd04fc8db0c24ce9cefef134d143e9927ec632a6829a402069e740b42a429\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T15:04:54Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1119a5821d140830d19c6272faade87f2ef99f2436d1011adad88b5b86d964b0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1119a5821d140830d19c6272faade87f2ef99f2436d1011adad88b5b86d964b0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T15:04:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T15:04:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T15:04:51Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T15:05:12Z is after 2025-08-24T17:21:41Z" Oct 01 15:05:12 crc kubenswrapper[4869]: I1001 15:05:12.950850 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-c86m8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a4b64f7f-0b03-4f47-965b-9fde048b735c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T15:05:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T15:05:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T15:05:11Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T15:05:11Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m289h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m289h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T15:05:11Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-c86m8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T15:05:12Z is after 2025-08-24T17:21:41Z" Oct 01 15:05:12 crc kubenswrapper[4869]: I1001 15:05:12.991999 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"51d3e7b3-811a-42d8-a711-abf28a181753\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T15:04:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T15:04:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T15:05:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T15:05:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T15:04:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cd32818a8ac16855cf81d75085471907d2579ba8bc8ab434464787d21e630c86\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T15:04:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4d0beef438954056c20003a1a29c1356f1fe50e73db01411b4cf082a2e9b2a7a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T15:04:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d08b3c48bda55cb82da1c929cd1dff850852c297b3e8612df25bd27b587c2f42\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T15:04:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d09f0ff6fbdc14dfc60551c4bb2db225cda38354193e9e791335d01d9eb8cece\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T15:04:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T15:04:51Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T15:05:12Z is after 2025-08-24T17:21:41Z" Oct 01 15:05:13 crc kubenswrapper[4869]: I1001 15:05:13.030134 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T15:05:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T15:05:11Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T15:05:13Z is after 2025-08-24T17:21:41Z" Oct 01 15:05:13 crc kubenswrapper[4869]: I1001 15:05:13.112949 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-27gqg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ebbefc55-bef9-4a03-a065-321bff3a75b4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T15:05:12Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T15:05:12Z\\\",\\\"message\\\":\\\"containers with incomplete status: [kubecfg-setup]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T15:05:12Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T15:05:12Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-v9mfd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-v9mfd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-v9mfd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-v9mfd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-v9mfd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-v9mfd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-v9mfd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-v9mfd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-v9mfd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T15:05:12Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-27gqg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T15:05:13Z is after 2025-08-24T17:21:41Z" Oct 01 15:05:13 crc kubenswrapper[4869]: I1001 15:05:13.127801 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-528sf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8edc04b1-dbb4-4d18-a110-e925d19ac049\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T15:05:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T15:05:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T15:05:11Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T15:05:11Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hssdd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T15:05:11Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-528sf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T15:05:13Z is after 2025-08-24T17:21:41Z" Oct 01 15:05:13 crc kubenswrapper[4869]: I1001 15:05:13.150573 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 01 15:05:13 crc kubenswrapper[4869]: I1001 15:05:13.151064 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 01 15:05:13 crc kubenswrapper[4869]: I1001 15:05:13.151094 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 01 15:05:13 crc kubenswrapper[4869]: E1001 15:05:13.151239 4869 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Oct 01 15:05:13 crc kubenswrapper[4869]: E1001 15:05:13.151308 4869 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-10-01 15:05:15.151294383 +0000 UTC m=+24.298137499 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Oct 01 15:05:13 crc kubenswrapper[4869]: E1001 15:05:13.151582 4869 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-01 15:05:15.151520459 +0000 UTC m=+24.298363575 (durationBeforeRetry 2s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 01 15:05:13 crc kubenswrapper[4869]: E1001 15:05:13.151741 4869 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Oct 01 15:05:13 crc kubenswrapper[4869]: E1001 15:05:13.151818 4869 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-10-01 15:05:15.151808106 +0000 UTC m=+24.298651222 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Oct 01 15:05:13 crc kubenswrapper[4869]: I1001 15:05:13.154515 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T15:05:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T15:05:11Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T15:05:13Z is after 2025-08-24T17:21:41Z" Oct 01 15:05:13 crc kubenswrapper[4869]: I1001 15:05:13.187487 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-j98s2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"69635c7a-0025-4ea2-a1b6-fc7776c2be11\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T15:05:12Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T15:05:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T15:05:12Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T15:05:12Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b7rv5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T15:05:12Z\\\"}}\" for pod \"openshift-multus\"/\"multus-j98s2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T15:05:13Z is after 2025-08-24T17:21:41Z" Oct 01 15:05:13 crc kubenswrapper[4869]: I1001 15:05:13.230462 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"51d3e7b3-811a-42d8-a711-abf28a181753\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T15:04:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T15:04:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T15:05:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T15:05:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T15:04:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cd32818a8ac16855cf81d75085471907d2579ba8bc8ab434464787d21e630c86\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T15:04:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4d0beef438954056c20003a1a29c1356f1fe50e73db01411b4cf082a2e9b2a7a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T15:04:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d08b3c48bda55cb82da1c929cd1dff850852c297b3e8612df25bd27b587c2f42\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T15:04:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d09f0ff6fbdc14dfc60551c4bb2db225cda38354193e9e791335d01d9eb8cece\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T15:04:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T15:04:51Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T15:05:13Z is after 2025-08-24T17:21:41Z" Oct 01 15:05:13 crc kubenswrapper[4869]: I1001 15:05:13.252632 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 01 15:05:13 crc kubenswrapper[4869]: I1001 15:05:13.252681 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 01 15:05:13 crc kubenswrapper[4869]: E1001 15:05:13.252824 4869 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Oct 01 15:05:13 crc kubenswrapper[4869]: E1001 15:05:13.252851 4869 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Oct 01 15:05:13 crc kubenswrapper[4869]: E1001 15:05:13.252824 4869 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Oct 01 15:05:13 crc kubenswrapper[4869]: E1001 15:05:13.252866 4869 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 01 15:05:13 crc kubenswrapper[4869]: E1001 15:05:13.252876 4869 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Oct 01 15:05:13 crc kubenswrapper[4869]: E1001 15:05:13.252887 4869 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 01 15:05:13 crc kubenswrapper[4869]: E1001 15:05:13.252926 4869 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-10-01 15:05:15.252907829 +0000 UTC m=+24.399750945 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 01 15:05:13 crc kubenswrapper[4869]: E1001 15:05:13.252941 4869 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-10-01 15:05:15.25293524 +0000 UTC m=+24.399778346 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 01 15:05:13 crc kubenswrapper[4869]: I1001 15:05:13.266728 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T15:05:12Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T15:05:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T15:05:12Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c11717c4dceac8e150d3f1d42be41e23c81b89f4410fb7dd8393fca33b6965ac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T15:05:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bc98b8d7b68084c32805c5f6d050b49a567ea3fc3eef9825909cc3058442e45e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T15:05:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T15:05:13Z is after 2025-08-24T17:21:41Z" Oct 01 15:05:13 crc kubenswrapper[4869]: I1001 15:05:13.325580 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-27gqg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ebbefc55-bef9-4a03-a065-321bff3a75b4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T15:05:12Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T15:05:12Z\\\",\\\"message\\\":\\\"containers with incomplete status: [kubecfg-setup]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T15:05:12Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T15:05:12Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-v9mfd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-v9mfd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-v9mfd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-v9mfd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-v9mfd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-v9mfd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-v9mfd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-v9mfd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-v9mfd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T15:05:12Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-27gqg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T15:05:13Z is after 2025-08-24T17:21:41Z" Oct 01 15:05:13 crc kubenswrapper[4869]: I1001 15:05:13.350236 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-j98s2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"69635c7a-0025-4ea2-a1b6-fc7776c2be11\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T15:05:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T15:05:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T15:05:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T15:05:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a96142295c608f4cce284b8e003ed2a5c2954ce0542b28d04f3f63710c6c65e0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T15:05:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b7rv5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T15:05:12Z\\\"}}\" for pod \"openshift-multus\"/\"multus-j98s2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T15:05:13Z is after 2025-08-24T17:21:41Z" Oct 01 15:05:13 crc kubenswrapper[4869]: I1001 15:05:13.386889 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-528sf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8edc04b1-dbb4-4d18-a110-e925d19ac049\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T15:05:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T15:05:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T15:05:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T15:05:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fbd802c770f8ab2b7dd8fc652bc10e5d67cb9176aa35b4d135ad4d01c6cef158\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T15:05:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hssdd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T15:05:11Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-528sf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T15:05:13Z is after 2025-08-24T17:21:41Z" Oct 01 15:05:13 crc kubenswrapper[4869]: I1001 15:05:13.427122 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T15:05:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T15:05:11Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T15:05:13Z is after 2025-08-24T17:21:41Z" Oct 01 15:05:13 crc kubenswrapper[4869]: I1001 15:05:13.469173 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T15:05:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T15:05:11Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T15:05:13Z is after 2025-08-24T17:21:41Z" Oct 01 15:05:13 crc kubenswrapper[4869]: I1001 15:05:13.510342 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T15:05:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T15:05:11Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T15:05:13Z is after 2025-08-24T17:21:41Z" Oct 01 15:05:13 crc kubenswrapper[4869]: I1001 15:05:13.546320 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-5hbq7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0c01e592-2d8d-4773-a8b3-f6efe676f57f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T15:05:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T15:05:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T15:05:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T15:05:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://db5afaa678c9d0b0d7cf16284426cfcdd2d3534ab1a4978945d7bdfbfa3a6714\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T15:05:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-65476\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T15:05:12Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-5hbq7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T15:05:13Z is after 2025-08-24T17:21:41Z" Oct 01 15:05:13 crc kubenswrapper[4869]: I1001 15:05:13.580895 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 01 15:05:13 crc kubenswrapper[4869]: I1001 15:05:13.580985 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 01 15:05:13 crc kubenswrapper[4869]: E1001 15:05:13.581047 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 01 15:05:13 crc kubenswrapper[4869]: E1001 15:05:13.581429 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 01 15:05:13 crc kubenswrapper[4869]: I1001 15:05:13.581537 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 01 15:05:13 crc kubenswrapper[4869]: E1001 15:05:13.581674 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 01 15:05:13 crc kubenswrapper[4869]: I1001 15:05:13.586581 4869 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="01ab3dd5-8196-46d0-ad33-122e2ca51def" path="/var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes" Oct 01 15:05:13 crc kubenswrapper[4869]: I1001 15:05:13.587433 4869 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" path="/var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes" Oct 01 15:05:13 crc kubenswrapper[4869]: I1001 15:05:13.588233 4869 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="09efc573-dbb6-4249-bd59-9b87aba8dd28" path="/var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes" Oct 01 15:05:13 crc kubenswrapper[4869]: I1001 15:05:13.588932 4869 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0b574797-001e-440a-8f4e-c0be86edad0f" path="/var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes" Oct 01 15:05:13 crc kubenswrapper[4869]: I1001 15:05:13.589649 4869 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0b78653f-4ff9-4508-8672-245ed9b561e3" path="/var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes" Oct 01 15:05:13 crc kubenswrapper[4869]: I1001 15:05:13.591116 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-ch8sh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0dc0f13b-b0ed-4694-97f1-bdaeb0a99d53\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T15:05:12Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T15:05:12Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T15:05:12Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T15:05:12Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sxdtj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sxdtj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sxdtj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sxdtj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sxdtj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sxdtj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sxdtj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T15:05:12Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-ch8sh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T15:05:13Z is after 2025-08-24T17:21:41Z" Oct 01 15:05:13 crc kubenswrapper[4869]: I1001 15:05:13.591525 4869 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1386a44e-36a2-460c-96d0-0359d2b6f0f5" path="/var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes" Oct 01 15:05:13 crc kubenswrapper[4869]: I1001 15:05:13.592701 4869 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1bf7eb37-55a3-4c65-b768-a94c82151e69" path="/var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes" Oct 01 15:05:13 crc kubenswrapper[4869]: I1001 15:05:13.593419 4869 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1d611f23-29be-4491-8495-bee1670e935f" path="/var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes" Oct 01 15:05:13 crc kubenswrapper[4869]: I1001 15:05:13.594167 4869 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="20b0d48f-5fd6-431c-a545-e3c800c7b866" path="/var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/volumes" Oct 01 15:05:13 crc kubenswrapper[4869]: I1001 15:05:13.594740 4869 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" path="/var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes" Oct 01 15:05:13 crc kubenswrapper[4869]: I1001 15:05:13.595325 4869 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="22c825df-677d-4ca6-82db-3454ed06e783" path="/var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes" Oct 01 15:05:13 crc kubenswrapper[4869]: I1001 15:05:13.596064 4869 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="25e176fe-21b4-4974-b1ed-c8b94f112a7f" path="/var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes" Oct 01 15:05:13 crc kubenswrapper[4869]: I1001 15:05:13.596615 4869 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" path="/var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes" Oct 01 15:05:13 crc kubenswrapper[4869]: I1001 15:05:13.597176 4869 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="31d8b7a1-420e-4252-a5b7-eebe8a111292" path="/var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes" Oct 01 15:05:13 crc kubenswrapper[4869]: I1001 15:05:13.597773 4869 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3ab1a177-2de0-46d9-b765-d0d0649bb42e" path="/var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/volumes" Oct 01 15:05:13 crc kubenswrapper[4869]: I1001 15:05:13.598336 4869 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" path="/var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes" Oct 01 15:05:13 crc kubenswrapper[4869]: I1001 15:05:13.598951 4869 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="43509403-f426-496e-be36-56cef71462f5" path="/var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes" Oct 01 15:05:13 crc kubenswrapper[4869]: I1001 15:05:13.599420 4869 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="44663579-783b-4372-86d6-acf235a62d72" path="/var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/volumes" Oct 01 15:05:13 crc kubenswrapper[4869]: I1001 15:05:13.599975 4869 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="496e6271-fb68-4057-954e-a0d97a4afa3f" path="/var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes" Oct 01 15:05:13 crc kubenswrapper[4869]: I1001 15:05:13.600669 4869 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" path="/var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes" Oct 01 15:05:13 crc kubenswrapper[4869]: I1001 15:05:13.601252 4869 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="49ef4625-1d3a-4a9f-b595-c2433d32326d" path="/var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/volumes" Oct 01 15:05:13 crc kubenswrapper[4869]: I1001 15:05:13.603144 4869 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4bb40260-dbaa-4fb0-84df-5e680505d512" path="/var/lib/kubelet/pods/4bb40260-dbaa-4fb0-84df-5e680505d512/volumes" Oct 01 15:05:13 crc kubenswrapper[4869]: I1001 15:05:13.603875 4869 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5225d0e4-402f-4861-b410-819f433b1803" path="/var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes" Oct 01 15:05:13 crc kubenswrapper[4869]: I1001 15:05:13.604935 4869 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5441d097-087c-4d9a-baa8-b210afa90fc9" path="/var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes" Oct 01 15:05:13 crc kubenswrapper[4869]: I1001 15:05:13.605629 4869 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="57a731c4-ef35-47a8-b875-bfb08a7f8011" path="/var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes" Oct 01 15:05:13 crc kubenswrapper[4869]: I1001 15:05:13.606458 4869 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5b88f790-22fa-440e-b583-365168c0b23d" path="/var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/volumes" Oct 01 15:05:13 crc kubenswrapper[4869]: I1001 15:05:13.609381 4869 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5fe579f8-e8a6-4643-bce5-a661393c4dde" path="/var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/volumes" Oct 01 15:05:13 crc kubenswrapper[4869]: I1001 15:05:13.610107 4869 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6402fda4-df10-493c-b4e5-d0569419652d" path="/var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes" Oct 01 15:05:13 crc kubenswrapper[4869]: I1001 15:05:13.611094 4869 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6509e943-70c6-444c-bc41-48a544e36fbd" path="/var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes" Oct 01 15:05:13 crc kubenswrapper[4869]: I1001 15:05:13.612362 4869 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6731426b-95fe-49ff-bb5f-40441049fde2" path="/var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/volumes" Oct 01 15:05:13 crc kubenswrapper[4869]: I1001 15:05:13.612925 4869 kubelet_volumes.go:152] "Cleaned up orphaned volume subpath from pod" podUID="6ea678ab-3438-413e-bfe3-290ae7725660" path="/var/lib/kubelet/pods/6ea678ab-3438-413e-bfe3-290ae7725660/volume-subpaths/run-systemd/ovnkube-controller/6" Oct 01 15:05:13 crc kubenswrapper[4869]: I1001 15:05:13.613060 4869 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6ea678ab-3438-413e-bfe3-290ae7725660" path="/var/lib/kubelet/pods/6ea678ab-3438-413e-bfe3-290ae7725660/volumes" Oct 01 15:05:13 crc kubenswrapper[4869]: I1001 15:05:13.615971 4869 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7539238d-5fe0-46ed-884e-1c3b566537ec" path="/var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes" Oct 01 15:05:13 crc kubenswrapper[4869]: I1001 15:05:13.616830 4869 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7583ce53-e0fe-4a16-9e4d-50516596a136" path="/var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes" Oct 01 15:05:13 crc kubenswrapper[4869]: I1001 15:05:13.617669 4869 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7bb08738-c794-4ee8-9972-3a62ca171029" path="/var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes" Oct 01 15:05:13 crc kubenswrapper[4869]: I1001 15:05:13.620597 4869 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="87cf06ed-a83f-41a7-828d-70653580a8cb" path="/var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes" Oct 01 15:05:13 crc kubenswrapper[4869]: I1001 15:05:13.621440 4869 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" path="/var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes" Oct 01 15:05:13 crc kubenswrapper[4869]: I1001 15:05:13.622079 4869 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="925f1c65-6136-48ba-85aa-3a3b50560753" path="/var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes" Oct 01 15:05:13 crc kubenswrapper[4869]: I1001 15:05:13.625924 4869 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="96b93a3a-6083-4aea-8eab-fe1aa8245ad9" path="/var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/volumes" Oct 01 15:05:13 crc kubenswrapper[4869]: I1001 15:05:13.627048 4869 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9d4552c7-cd75-42dd-8880-30dd377c49a4" path="/var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes" Oct 01 15:05:13 crc kubenswrapper[4869]: I1001 15:05:13.627819 4869 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a0128f3a-b052-44ed-a84e-c4c8aaf17c13" path="/var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/volumes" Oct 01 15:05:13 crc kubenswrapper[4869]: I1001 15:05:13.628687 4869 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a31745f5-9847-4afe-82a5-3161cc66ca93" path="/var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes" Oct 01 15:05:13 crc kubenswrapper[4869]: I1001 15:05:13.629669 4869 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" path="/var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes" Oct 01 15:05:13 crc kubenswrapper[4869]: I1001 15:05:13.631671 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T15:05:12Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T15:05:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T15:05:12Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8b50d32a38b3800ef5fcfd0c75ec15b4cfa750fe67a23ddc7bf72e79b19d00f6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T15:05:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T15:05:13Z is after 2025-08-24T17:21:41Z" Oct 01 15:05:13 crc kubenswrapper[4869]: I1001 15:05:13.631975 4869 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b6312bbd-5731-4ea0-a20f-81d5a57df44a" path="/var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/volumes" Oct 01 15:05:13 crc kubenswrapper[4869]: I1001 15:05:13.632769 4869 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" path="/var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes" Oct 01 15:05:13 crc kubenswrapper[4869]: I1001 15:05:13.633492 4869 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" path="/var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes" Oct 01 15:05:13 crc kubenswrapper[4869]: I1001 15:05:13.634396 4869 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bd23aa5c-e532-4e53-bccf-e79f130c5ae8" path="/var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/volumes" Oct 01 15:05:13 crc kubenswrapper[4869]: I1001 15:05:13.636463 4869 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bf126b07-da06-4140-9a57-dfd54fc6b486" path="/var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes" Oct 01 15:05:13 crc kubenswrapper[4869]: I1001 15:05:13.637160 4869 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c03ee662-fb2f-4fc4-a2c1-af487c19d254" path="/var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes" Oct 01 15:05:13 crc kubenswrapper[4869]: I1001 15:05:13.637870 4869 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d" path="/var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/volumes" Oct 01 15:05:13 crc kubenswrapper[4869]: I1001 15:05:13.639210 4869 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e7e6199b-1264-4501-8953-767f51328d08" path="/var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes" Oct 01 15:05:13 crc kubenswrapper[4869]: I1001 15:05:13.639936 4869 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="efdd0498-1daa-4136-9a4a-3b948c2293fc" path="/var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/volumes" Oct 01 15:05:13 crc kubenswrapper[4869]: I1001 15:05:13.641239 4869 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" path="/var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/volumes" Oct 01 15:05:13 crc kubenswrapper[4869]: I1001 15:05:13.642053 4869 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fda69060-fa79-4696-b1a6-7980f124bf7c" path="/var/lib/kubelet/pods/fda69060-fa79-4696-b1a6-7980f124bf7c/volumes" Oct 01 15:05:13 crc kubenswrapper[4869]: I1001 15:05:13.671600 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T15:05:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T15:05:11Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T15:05:13Z is after 2025-08-24T17:21:41Z" Oct 01 15:05:13 crc kubenswrapper[4869]: I1001 15:05:13.715723 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5794379e-0bd9-4b1f-89eb-d1075fd901e1\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T15:04:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T15:04:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T15:04:51Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T15:04:51Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T15:04:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5ccef45b15da9c0dc92cfbebfda5fddde05397f6a23946a1c7e9df12b3c275bb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T15:04:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c5fc6afde533fd1160a9b445d9835126d2c2b0fb019b4b9d13269e91c5ad1e53\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T15:04:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ec14a0b8c122b3f70309112b68f07cdd77057e977ceb5f5a061a1892a93e28d6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T15:04:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0f5fc3149c69187517c3ad92e33aa91e1d567a78b9770be35c10267467a68d37\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0de9f530ff8a0069f4718fb3b820fef8a17fe5ee04f641326d42503d5b39956a\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-01T15:05:10Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1001 15:05:05.118072 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1001 15:05:05.120651 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2773387964/tls.crt::/tmp/serving-cert-2773387964/tls.key\\\\\\\"\\\\nI1001 15:05:10.813323 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1001 15:05:10.819195 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1001 15:05:10.819374 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1001 15:05:10.819467 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1001 15:05:10.819569 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1001 15:05:10.834141 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1001 15:05:10.834193 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1001 15:05:10.834212 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1001 15:05:10.834221 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1001 15:05:10.834227 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1001 15:05:10.834234 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1001 15:05:10.834240 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1001 15:05:10.834323 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1001 15:05:10.839565 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-01T15:04:54Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T15:05:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5cddd04fc8db0c24ce9cefef134d143e9927ec632a6829a402069e740b42a429\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T15:04:54Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1119a5821d140830d19c6272faade87f2ef99f2436d1011adad88b5b86d964b0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1119a5821d140830d19c6272faade87f2ef99f2436d1011adad88b5b86d964b0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T15:04:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T15:04:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T15:04:51Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T15:05:13Z is after 2025-08-24T17:21:41Z" Oct 01 15:05:13 crc kubenswrapper[4869]: I1001 15:05:13.750588 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-c86m8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a4b64f7f-0b03-4f47-965b-9fde048b735c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T15:05:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T15:05:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T15:05:11Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T15:05:11Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m289h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m289h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T15:05:11Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-c86m8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T15:05:13Z is after 2025-08-24T17:21:41Z" Oct 01 15:05:13 crc kubenswrapper[4869]: I1001 15:05:13.758535 4869 generic.go:334] "Generic (PLEG): container finished" podID="0dc0f13b-b0ed-4694-97f1-bdaeb0a99d53" containerID="7c94b2af75607c37d477ef0d3bc8c9f6f775c7a3fe46c1c18c0df11a56a61f85" exitCode=0 Oct 01 15:05:13 crc kubenswrapper[4869]: I1001 15:05:13.758610 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-ch8sh" event={"ID":"0dc0f13b-b0ed-4694-97f1-bdaeb0a99d53","Type":"ContainerDied","Data":"7c94b2af75607c37d477ef0d3bc8c9f6f775c7a3fe46c1c18c0df11a56a61f85"} Oct 01 15:05:13 crc kubenswrapper[4869]: I1001 15:05:13.758647 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-ch8sh" event={"ID":"0dc0f13b-b0ed-4694-97f1-bdaeb0a99d53","Type":"ContainerStarted","Data":"453655c0de4f7776d0ea7e6dc5fb2a6f4a56b0f797a48a77317c1af7f487e4ad"} Oct 01 15:05:13 crc kubenswrapper[4869]: I1001 15:05:13.760896 4869 generic.go:334] "Generic (PLEG): container finished" podID="ebbefc55-bef9-4a03-a065-321bff3a75b4" containerID="992afa5f68050b8a1ec99aaf070b3016494713d2d0d51fef7bd1296ad29546fd" exitCode=0 Oct 01 15:05:13 crc kubenswrapper[4869]: I1001 15:05:13.760978 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-27gqg" event={"ID":"ebbefc55-bef9-4a03-a065-321bff3a75b4","Type":"ContainerDied","Data":"992afa5f68050b8a1ec99aaf070b3016494713d2d0d51fef7bd1296ad29546fd"} Oct 01 15:05:13 crc kubenswrapper[4869]: I1001 15:05:13.761012 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-27gqg" event={"ID":"ebbefc55-bef9-4a03-a065-321bff3a75b4","Type":"ContainerStarted","Data":"494cc5f315d900d7ca2031138d77f2eac5461180d1368ed046cc0e0509e13a78"} Oct 01 15:05:13 crc kubenswrapper[4869]: I1001 15:05:13.764824 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-c86m8" event={"ID":"a4b64f7f-0b03-4f47-965b-9fde048b735c","Type":"ContainerStarted","Data":"5159ec584ee7be0d58bb587c000f9e8c177f29a0a38d6732801a7a1d530197cc"} Oct 01 15:05:13 crc kubenswrapper[4869]: I1001 15:05:13.764875 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-c86m8" event={"ID":"a4b64f7f-0b03-4f47-965b-9fde048b735c","Type":"ContainerStarted","Data":"25d5b00d18bcf2d6a0845f99197fc189846fe0b873d6025b6799bfcb762624fd"} Oct 01 15:05:13 crc kubenswrapper[4869]: I1001 15:05:13.764917 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-c86m8" event={"ID":"a4b64f7f-0b03-4f47-965b-9fde048b735c","Type":"ContainerStarted","Data":"978d7f4f27536bda84814cacb5123965fd3951ce083f4e55c50e634b8187f2cd"} Oct 01 15:05:13 crc kubenswrapper[4869]: I1001 15:05:13.791476 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-528sf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8edc04b1-dbb4-4d18-a110-e925d19ac049\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T15:05:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T15:05:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T15:05:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T15:05:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fbd802c770f8ab2b7dd8fc652bc10e5d67cb9176aa35b4d135ad4d01c6cef158\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T15:05:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hssdd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T15:05:11Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-528sf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T15:05:13Z is after 2025-08-24T17:21:41Z" Oct 01 15:05:13 crc kubenswrapper[4869]: I1001 15:05:13.829397 4869 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T15:05:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T15:05:11Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T15:05:13Z is after 2025-08-24T17:21:41Z" Oct 01 15:05:13 crc kubenswrapper[4869]: I1001 15:05:13.914303 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/multus-j98s2" podStartSLOduration=2.914247049 podStartE2EDuration="2.914247049s" podCreationTimestamp="2025-10-01 15:05:11 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-01 15:05:13.885904192 +0000 UTC m=+23.032747308" watchObservedRunningTime="2025-10-01 15:05:13.914247049 +0000 UTC m=+23.061090175" Oct 01 15:05:14 crc kubenswrapper[4869]: I1001 15:05:14.122769 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-dns/node-resolver-5hbq7" podStartSLOduration=3.122749323 podStartE2EDuration="3.122749323s" podCreationTimestamp="2025-10-01 15:05:11 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-01 15:05:14.085164909 +0000 UTC m=+23.232008025" watchObservedRunningTime="2025-10-01 15:05:14.122749323 +0000 UTC m=+23.269592439" Oct 01 15:05:14 crc kubenswrapper[4869]: I1001 15:05:14.163143 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/kube-apiserver-crc" podStartSLOduration=3.163111012 podStartE2EDuration="3.163111012s" podCreationTimestamp="2025-10-01 15:05:11 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-01 15:05:14.162926468 +0000 UTC m=+23.309769584" watchObservedRunningTime="2025-10-01 15:05:14.163111012 +0000 UTC m=+23.309954128" Oct 01 15:05:14 crc kubenswrapper[4869]: I1001 15:05:14.270476 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-controller-manager/kube-controller-manager-crc" podStartSLOduration=3.2704597619999998 podStartE2EDuration="3.270459762s" podCreationTimestamp="2025-10-01 15:05:11 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-01 15:05:14.230422492 +0000 UTC m=+23.377265618" watchObservedRunningTime="2025-10-01 15:05:14.270459762 +0000 UTC m=+23.417302878" Oct 01 15:05:14 crc kubenswrapper[4869]: I1001 15:05:14.392190 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-daemon-c86m8" podStartSLOduration=3.392165886 podStartE2EDuration="3.392165886s" podCreationTimestamp="2025-10-01 15:05:11 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-01 15:05:14.349634089 +0000 UTC m=+23.496477215" watchObservedRunningTime="2025-10-01 15:05:14.392165886 +0000 UTC m=+23.539009002" Oct 01 15:05:14 crc kubenswrapper[4869]: I1001 15:05:14.392588 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-z5fgn"] Oct 01 15:05:14 crc kubenswrapper[4869]: I1001 15:05:14.393133 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-z5fgn" Oct 01 15:05:14 crc kubenswrapper[4869]: I1001 15:05:14.400223 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-control-plane-metrics-cert" Oct 01 15:05:14 crc kubenswrapper[4869]: I1001 15:05:14.421050 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-kubernetes-control-plane-dockercfg-gs7dd" Oct 01 15:05:14 crc kubenswrapper[4869]: I1001 15:05:14.429146 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/network-metrics-daemon-rz7qp"] Oct 01 15:05:14 crc kubenswrapper[4869]: I1001 15:05:14.429717 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-rz7qp" Oct 01 15:05:14 crc kubenswrapper[4869]: E1001 15:05:14.429793 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-rz7qp" podUID="328ce213-12ef-40af-a41f-e0079949b82d" Oct 01 15:05:14 crc kubenswrapper[4869]: I1001 15:05:14.466836 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/328ce213-12ef-40af-a41f-e0079949b82d-metrics-certs\") pod \"network-metrics-daemon-rz7qp\" (UID: \"328ce213-12ef-40af-a41f-e0079949b82d\") " pod="openshift-multus/network-metrics-daemon-rz7qp" Oct 01 15:05:14 crc kubenswrapper[4869]: I1001 15:05:14.466869 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/88d8a030-d15b-4e79-b3a1-4412aef234f6-env-overrides\") pod \"ovnkube-control-plane-749d76644c-z5fgn\" (UID: \"88d8a030-d15b-4e79-b3a1-4412aef234f6\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-z5fgn" Oct 01 15:05:14 crc kubenswrapper[4869]: I1001 15:05:14.466910 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lmcbm\" (UniqueName: \"kubernetes.io/projected/328ce213-12ef-40af-a41f-e0079949b82d-kube-api-access-lmcbm\") pod \"network-metrics-daemon-rz7qp\" (UID: \"328ce213-12ef-40af-a41f-e0079949b82d\") " pod="openshift-multus/network-metrics-daemon-rz7qp" Oct 01 15:05:14 crc kubenswrapper[4869]: I1001 15:05:14.466937 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/88d8a030-d15b-4e79-b3a1-4412aef234f6-ovn-control-plane-metrics-cert\") pod \"ovnkube-control-plane-749d76644c-z5fgn\" (UID: \"88d8a030-d15b-4e79-b3a1-4412aef234f6\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-z5fgn" Oct 01 15:05:14 crc kubenswrapper[4869]: I1001 15:05:14.466956 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kggh2\" (UniqueName: \"kubernetes.io/projected/88d8a030-d15b-4e79-b3a1-4412aef234f6-kube-api-access-kggh2\") pod \"ovnkube-control-plane-749d76644c-z5fgn\" (UID: \"88d8a030-d15b-4e79-b3a1-4412aef234f6\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-z5fgn" Oct 01 15:05:14 crc kubenswrapper[4869]: I1001 15:05:14.466979 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/88d8a030-d15b-4e79-b3a1-4412aef234f6-ovnkube-config\") pod \"ovnkube-control-plane-749d76644c-z5fgn\" (UID: \"88d8a030-d15b-4e79-b3a1-4412aef234f6\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-z5fgn" Oct 01 15:05:14 crc kubenswrapper[4869]: I1001 15:05:14.470293 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/node-ca-528sf" podStartSLOduration=3.470241062 podStartE2EDuration="3.470241062s" podCreationTimestamp="2025-10-01 15:05:11 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-01 15:05:14.467134539 +0000 UTC m=+23.613977655" watchObservedRunningTime="2025-10-01 15:05:14.470241062 +0000 UTC m=+23.617084188" Oct 01 15:05:14 crc kubenswrapper[4869]: I1001 15:05:14.568728 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lmcbm\" (UniqueName: \"kubernetes.io/projected/328ce213-12ef-40af-a41f-e0079949b82d-kube-api-access-lmcbm\") pod \"network-metrics-daemon-rz7qp\" (UID: \"328ce213-12ef-40af-a41f-e0079949b82d\") " pod="openshift-multus/network-metrics-daemon-rz7qp" Oct 01 15:05:14 crc kubenswrapper[4869]: I1001 15:05:14.568832 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/88d8a030-d15b-4e79-b3a1-4412aef234f6-ovn-control-plane-metrics-cert\") pod \"ovnkube-control-plane-749d76644c-z5fgn\" (UID: \"88d8a030-d15b-4e79-b3a1-4412aef234f6\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-z5fgn" Oct 01 15:05:14 crc kubenswrapper[4869]: I1001 15:05:14.568864 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kggh2\" (UniqueName: \"kubernetes.io/projected/88d8a030-d15b-4e79-b3a1-4412aef234f6-kube-api-access-kggh2\") pod \"ovnkube-control-plane-749d76644c-z5fgn\" (UID: \"88d8a030-d15b-4e79-b3a1-4412aef234f6\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-z5fgn" Oct 01 15:05:14 crc kubenswrapper[4869]: I1001 15:05:14.568908 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/88d8a030-d15b-4e79-b3a1-4412aef234f6-ovnkube-config\") pod \"ovnkube-control-plane-749d76644c-z5fgn\" (UID: \"88d8a030-d15b-4e79-b3a1-4412aef234f6\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-z5fgn" Oct 01 15:05:14 crc kubenswrapper[4869]: I1001 15:05:14.568954 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/328ce213-12ef-40af-a41f-e0079949b82d-metrics-certs\") pod \"network-metrics-daemon-rz7qp\" (UID: \"328ce213-12ef-40af-a41f-e0079949b82d\") " pod="openshift-multus/network-metrics-daemon-rz7qp" Oct 01 15:05:14 crc kubenswrapper[4869]: I1001 15:05:14.568975 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/88d8a030-d15b-4e79-b3a1-4412aef234f6-env-overrides\") pod \"ovnkube-control-plane-749d76644c-z5fgn\" (UID: \"88d8a030-d15b-4e79-b3a1-4412aef234f6\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-z5fgn" Oct 01 15:05:14 crc kubenswrapper[4869]: E1001 15:05:14.569121 4869 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Oct 01 15:05:14 crc kubenswrapper[4869]: E1001 15:05:14.569410 4869 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/328ce213-12ef-40af-a41f-e0079949b82d-metrics-certs podName:328ce213-12ef-40af-a41f-e0079949b82d nodeName:}" failed. No retries permitted until 2025-10-01 15:05:15.069383803 +0000 UTC m=+24.216226919 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/328ce213-12ef-40af-a41f-e0079949b82d-metrics-certs") pod "network-metrics-daemon-rz7qp" (UID: "328ce213-12ef-40af-a41f-e0079949b82d") : object "openshift-multus"/"metrics-daemon-secret" not registered Oct 01 15:05:14 crc kubenswrapper[4869]: I1001 15:05:14.570073 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/88d8a030-d15b-4e79-b3a1-4412aef234f6-env-overrides\") pod \"ovnkube-control-plane-749d76644c-z5fgn\" (UID: \"88d8a030-d15b-4e79-b3a1-4412aef234f6\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-z5fgn" Oct 01 15:05:14 crc kubenswrapper[4869]: I1001 15:05:14.575433 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/88d8a030-d15b-4e79-b3a1-4412aef234f6-ovnkube-config\") pod \"ovnkube-control-plane-749d76644c-z5fgn\" (UID: \"88d8a030-d15b-4e79-b3a1-4412aef234f6\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-z5fgn" Oct 01 15:05:14 crc kubenswrapper[4869]: I1001 15:05:14.582181 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/88d8a030-d15b-4e79-b3a1-4412aef234f6-ovn-control-plane-metrics-cert\") pod \"ovnkube-control-plane-749d76644c-z5fgn\" (UID: \"88d8a030-d15b-4e79-b3a1-4412aef234f6\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-z5fgn" Oct 01 15:05:14 crc kubenswrapper[4869]: I1001 15:05:14.634326 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lmcbm\" (UniqueName: \"kubernetes.io/projected/328ce213-12ef-40af-a41f-e0079949b82d-kube-api-access-lmcbm\") pod \"network-metrics-daemon-rz7qp\" (UID: \"328ce213-12ef-40af-a41f-e0079949b82d\") " pod="openshift-multus/network-metrics-daemon-rz7qp" Oct 01 15:05:14 crc kubenswrapper[4869]: I1001 15:05:14.643040 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kggh2\" (UniqueName: \"kubernetes.io/projected/88d8a030-d15b-4e79-b3a1-4412aef234f6-kube-api-access-kggh2\") pod \"ovnkube-control-plane-749d76644c-z5fgn\" (UID: \"88d8a030-d15b-4e79-b3a1-4412aef234f6\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-z5fgn" Oct 01 15:05:14 crc kubenswrapper[4869]: I1001 15:05:14.762134 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-z5fgn" Oct 01 15:05:14 crc kubenswrapper[4869]: I1001 15:05:14.771388 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-ch8sh" event={"ID":"0dc0f13b-b0ed-4694-97f1-bdaeb0a99d53","Type":"ContainerStarted","Data":"82201543c1454f980b6aed8623ff29c9e22878dc88e56a9125238f21d44abcf2"} Oct 01 15:05:14 crc kubenswrapper[4869]: I1001 15:05:14.772759 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" event={"ID":"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49","Type":"ContainerStarted","Data":"f9ab1660a759e976f31078c7d2313f3a62d5ac781ac875926a0e85198fb314df"} Oct 01 15:05:14 crc kubenswrapper[4869]: I1001 15:05:14.776529 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-27gqg" event={"ID":"ebbefc55-bef9-4a03-a065-321bff3a75b4","Type":"ContainerStarted","Data":"e332dd7b45911e52db6ad9d474be312012028a9def590871b4d2a7a735c80ff0"} Oct 01 15:05:14 crc kubenswrapper[4869]: I1001 15:05:14.776589 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-27gqg" event={"ID":"ebbefc55-bef9-4a03-a065-321bff3a75b4","Type":"ContainerStarted","Data":"c59f9083cd75c8bee85596c2a178cb931de40a37dd459433dd2c23d5b61d95f0"} Oct 01 15:05:14 crc kubenswrapper[4869]: I1001 15:05:14.776601 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-27gqg" event={"ID":"ebbefc55-bef9-4a03-a065-321bff3a75b4","Type":"ContainerStarted","Data":"c5f583b0b94050424796b550a344f0c6069c1608629fdd01ddfdb303add0d5f7"} Oct 01 15:05:14 crc kubenswrapper[4869]: I1001 15:05:14.776612 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-27gqg" event={"ID":"ebbefc55-bef9-4a03-a065-321bff3a75b4","Type":"ContainerStarted","Data":"ce48fdbad0049809bac4c2afd94d87133b935841c6a3805f8eacc26dd8527c62"} Oct 01 15:05:14 crc kubenswrapper[4869]: W1001 15:05:14.787526 4869 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod88d8a030_d15b_4e79_b3a1_4412aef234f6.slice/crio-76ff1cba73cef2f452554920b7e4fc5083c04286b0dd1775ac30e59daed907d2 WatchSource:0}: Error finding container 76ff1cba73cef2f452554920b7e4fc5083c04286b0dd1775ac30e59daed907d2: Status 404 returned error can't find the container with id 76ff1cba73cef2f452554920b7e4fc5083c04286b0dd1775ac30e59daed907d2 Oct 01 15:05:15 crc kubenswrapper[4869]: I1001 15:05:15.074212 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/328ce213-12ef-40af-a41f-e0079949b82d-metrics-certs\") pod \"network-metrics-daemon-rz7qp\" (UID: \"328ce213-12ef-40af-a41f-e0079949b82d\") " pod="openshift-multus/network-metrics-daemon-rz7qp" Oct 01 15:05:15 crc kubenswrapper[4869]: E1001 15:05:15.074383 4869 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Oct 01 15:05:15 crc kubenswrapper[4869]: E1001 15:05:15.074454 4869 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/328ce213-12ef-40af-a41f-e0079949b82d-metrics-certs podName:328ce213-12ef-40af-a41f-e0079949b82d nodeName:}" failed. No retries permitted until 2025-10-01 15:05:16.074436885 +0000 UTC m=+25.221280001 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/328ce213-12ef-40af-a41f-e0079949b82d-metrics-certs") pod "network-metrics-daemon-rz7qp" (UID: "328ce213-12ef-40af-a41f-e0079949b82d") : object "openshift-multus"/"metrics-daemon-secret" not registered Oct 01 15:05:15 crc kubenswrapper[4869]: I1001 15:05:15.175196 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 01 15:05:15 crc kubenswrapper[4869]: I1001 15:05:15.175302 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 01 15:05:15 crc kubenswrapper[4869]: I1001 15:05:15.175329 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 01 15:05:15 crc kubenswrapper[4869]: E1001 15:05:15.175445 4869 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Oct 01 15:05:15 crc kubenswrapper[4869]: E1001 15:05:15.175501 4869 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-10-01 15:05:19.175476856 +0000 UTC m=+28.322319972 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Oct 01 15:05:15 crc kubenswrapper[4869]: E1001 15:05:15.175773 4869 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-01 15:05:19.175762544 +0000 UTC m=+28.322605660 (durationBeforeRetry 4s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 01 15:05:15 crc kubenswrapper[4869]: E1001 15:05:15.175807 4869 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Oct 01 15:05:15 crc kubenswrapper[4869]: E1001 15:05:15.175829 4869 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-10-01 15:05:19.175823035 +0000 UTC m=+28.322666151 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Oct 01 15:05:15 crc kubenswrapper[4869]: I1001 15:05:15.276545 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 01 15:05:15 crc kubenswrapper[4869]: I1001 15:05:15.276588 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 01 15:05:15 crc kubenswrapper[4869]: E1001 15:05:15.276749 4869 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Oct 01 15:05:15 crc kubenswrapper[4869]: E1001 15:05:15.276766 4869 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Oct 01 15:05:15 crc kubenswrapper[4869]: E1001 15:05:15.276778 4869 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 01 15:05:15 crc kubenswrapper[4869]: E1001 15:05:15.276828 4869 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-10-01 15:05:19.276814055 +0000 UTC m=+28.423657171 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 01 15:05:15 crc kubenswrapper[4869]: E1001 15:05:15.276909 4869 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Oct 01 15:05:15 crc kubenswrapper[4869]: E1001 15:05:15.276970 4869 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Oct 01 15:05:15 crc kubenswrapper[4869]: E1001 15:05:15.276990 4869 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 01 15:05:15 crc kubenswrapper[4869]: E1001 15:05:15.277068 4869 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-10-01 15:05:19.277043041 +0000 UTC m=+28.423886157 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 01 15:05:15 crc kubenswrapper[4869]: I1001 15:05:15.580046 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 01 15:05:15 crc kubenswrapper[4869]: I1001 15:05:15.580138 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 01 15:05:15 crc kubenswrapper[4869]: I1001 15:05:15.580064 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-rz7qp" Oct 01 15:05:15 crc kubenswrapper[4869]: E1001 15:05:15.580242 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 01 15:05:15 crc kubenswrapper[4869]: I1001 15:05:15.580046 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 01 15:05:15 crc kubenswrapper[4869]: E1001 15:05:15.580479 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 01 15:05:15 crc kubenswrapper[4869]: E1001 15:05:15.580579 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 01 15:05:15 crc kubenswrapper[4869]: E1001 15:05:15.580671 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-rz7qp" podUID="328ce213-12ef-40af-a41f-e0079949b82d" Oct 01 15:05:15 crc kubenswrapper[4869]: I1001 15:05:15.782325 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-z5fgn" event={"ID":"88d8a030-d15b-4e79-b3a1-4412aef234f6","Type":"ContainerStarted","Data":"1ebc6cd1174524eb4e05b1a3e85a27a2c2cd7676f140ca20fbfae834f809a52a"} Oct 01 15:05:15 crc kubenswrapper[4869]: I1001 15:05:15.782372 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-z5fgn" event={"ID":"88d8a030-d15b-4e79-b3a1-4412aef234f6","Type":"ContainerStarted","Data":"7c498d9a90752fdba951c70541aaed8d15f5f57b92608b1ca966ca5fad070a63"} Oct 01 15:05:15 crc kubenswrapper[4869]: I1001 15:05:15.782385 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-z5fgn" event={"ID":"88d8a030-d15b-4e79-b3a1-4412aef234f6","Type":"ContainerStarted","Data":"76ff1cba73cef2f452554920b7e4fc5083c04286b0dd1775ac30e59daed907d2"} Oct 01 15:05:15 crc kubenswrapper[4869]: I1001 15:05:15.784429 4869 generic.go:334] "Generic (PLEG): container finished" podID="0dc0f13b-b0ed-4694-97f1-bdaeb0a99d53" containerID="82201543c1454f980b6aed8623ff29c9e22878dc88e56a9125238f21d44abcf2" exitCode=0 Oct 01 15:05:15 crc kubenswrapper[4869]: I1001 15:05:15.784510 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-ch8sh" event={"ID":"0dc0f13b-b0ed-4694-97f1-bdaeb0a99d53","Type":"ContainerDied","Data":"82201543c1454f980b6aed8623ff29c9e22878dc88e56a9125238f21d44abcf2"} Oct 01 15:05:15 crc kubenswrapper[4869]: I1001 15:05:15.789828 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-27gqg" event={"ID":"ebbefc55-bef9-4a03-a065-321bff3a75b4","Type":"ContainerStarted","Data":"7afd925f4b10ef56213c985a7b92c49d60a62f9be8ade831c42641100327ea40"} Oct 01 15:05:15 crc kubenswrapper[4869]: I1001 15:05:15.789880 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-27gqg" event={"ID":"ebbefc55-bef9-4a03-a065-321bff3a75b4","Type":"ContainerStarted","Data":"7811ddaf0b31f53a6bc23e82dfa3a4bb5f9fa408b1f2e22dff3f71e9de8b00d2"} Oct 01 15:05:15 crc kubenswrapper[4869]: I1001 15:05:15.798355 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-z5fgn" podStartSLOduration=3.798324277 podStartE2EDuration="3.798324277s" podCreationTimestamp="2025-10-01 15:05:12 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-01 15:05:15.797386852 +0000 UTC m=+24.944229998" watchObservedRunningTime="2025-10-01 15:05:15.798324277 +0000 UTC m=+24.945167443" Oct 01 15:05:16 crc kubenswrapper[4869]: I1001 15:05:16.084177 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/328ce213-12ef-40af-a41f-e0079949b82d-metrics-certs\") pod \"network-metrics-daemon-rz7qp\" (UID: \"328ce213-12ef-40af-a41f-e0079949b82d\") " pod="openshift-multus/network-metrics-daemon-rz7qp" Oct 01 15:05:16 crc kubenswrapper[4869]: E1001 15:05:16.084485 4869 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Oct 01 15:05:16 crc kubenswrapper[4869]: E1001 15:05:16.084664 4869 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/328ce213-12ef-40af-a41f-e0079949b82d-metrics-certs podName:328ce213-12ef-40af-a41f-e0079949b82d nodeName:}" failed. No retries permitted until 2025-10-01 15:05:18.084644331 +0000 UTC m=+27.231487447 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/328ce213-12ef-40af-a41f-e0079949b82d-metrics-certs") pod "network-metrics-daemon-rz7qp" (UID: "328ce213-12ef-40af-a41f-e0079949b82d") : object "openshift-multus"/"metrics-daemon-secret" not registered Oct 01 15:05:16 crc kubenswrapper[4869]: I1001 15:05:16.795815 4869 generic.go:334] "Generic (PLEG): container finished" podID="0dc0f13b-b0ed-4694-97f1-bdaeb0a99d53" containerID="e5cac3efd98ed4132adaa21af86a79a27db8c6885e9a6a643261723e668bd06b" exitCode=0 Oct 01 15:05:16 crc kubenswrapper[4869]: I1001 15:05:16.795867 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-ch8sh" event={"ID":"0dc0f13b-b0ed-4694-97f1-bdaeb0a99d53","Type":"ContainerDied","Data":"e5cac3efd98ed4132adaa21af86a79a27db8c6885e9a6a643261723e668bd06b"} Oct 01 15:05:17 crc kubenswrapper[4869]: I1001 15:05:17.177593 4869 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 01 15:05:17 crc kubenswrapper[4869]: I1001 15:05:17.180204 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 15:05:17 crc kubenswrapper[4869]: I1001 15:05:17.180254 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 15:05:17 crc kubenswrapper[4869]: I1001 15:05:17.180293 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 15:05:17 crc kubenswrapper[4869]: I1001 15:05:17.180473 4869 kubelet_node_status.go:76] "Attempting to register node" node="crc" Oct 01 15:05:17 crc kubenswrapper[4869]: I1001 15:05:17.186620 4869 kubelet_node_status.go:115] "Node was previously registered" node="crc" Oct 01 15:05:17 crc kubenswrapper[4869]: I1001 15:05:17.186918 4869 kubelet_node_status.go:79] "Successfully registered node" node="crc" Oct 01 15:05:17 crc kubenswrapper[4869]: I1001 15:05:17.187902 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 15:05:17 crc kubenswrapper[4869]: I1001 15:05:17.187931 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 15:05:17 crc kubenswrapper[4869]: I1001 15:05:17.187941 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 15:05:17 crc kubenswrapper[4869]: I1001 15:05:17.187955 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 15:05:17 crc kubenswrapper[4869]: I1001 15:05:17.187967 4869 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T15:05:17Z","lastTransitionTime":"2025-10-01T15:05:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 15:05:17 crc kubenswrapper[4869]: I1001 15:05:17.237303 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-cluster-version/cluster-version-operator-5c965bbfc6-nq8gd"] Oct 01 15:05:17 crc kubenswrapper[4869]: I1001 15:05:17.238030 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-nq8gd" Oct 01 15:05:17 crc kubenswrapper[4869]: I1001 15:05:17.239568 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-version"/"default-dockercfg-gxtc4" Oct 01 15:05:17 crc kubenswrapper[4869]: I1001 15:05:17.239695 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-version"/"openshift-service-ca.crt" Oct 01 15:05:17 crc kubenswrapper[4869]: I1001 15:05:17.240017 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-version"/"kube-root-ca.crt" Oct 01 15:05:17 crc kubenswrapper[4869]: I1001 15:05:17.242021 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-version"/"cluster-version-operator-serving-cert" Oct 01 15:05:17 crc kubenswrapper[4869]: I1001 15:05:17.298540 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/e03f40f3-9b1c-4415-a987-c4ecb012c52c-service-ca\") pod \"cluster-version-operator-5c965bbfc6-nq8gd\" (UID: \"e03f40f3-9b1c-4415-a987-c4ecb012c52c\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-nq8gd" Oct 01 15:05:17 crc kubenswrapper[4869]: I1001 15:05:17.298599 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/e03f40f3-9b1c-4415-a987-c4ecb012c52c-kube-api-access\") pod \"cluster-version-operator-5c965bbfc6-nq8gd\" (UID: \"e03f40f3-9b1c-4415-a987-c4ecb012c52c\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-nq8gd" Oct 01 15:05:17 crc kubenswrapper[4869]: I1001 15:05:17.298724 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e03f40f3-9b1c-4415-a987-c4ecb012c52c-serving-cert\") pod \"cluster-version-operator-5c965bbfc6-nq8gd\" (UID: \"e03f40f3-9b1c-4415-a987-c4ecb012c52c\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-nq8gd" Oct 01 15:05:17 crc kubenswrapper[4869]: I1001 15:05:17.298832 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-ssl-certs\" (UniqueName: \"kubernetes.io/host-path/e03f40f3-9b1c-4415-a987-c4ecb012c52c-etc-ssl-certs\") pod \"cluster-version-operator-5c965bbfc6-nq8gd\" (UID: \"e03f40f3-9b1c-4415-a987-c4ecb012c52c\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-nq8gd" Oct 01 15:05:17 crc kubenswrapper[4869]: I1001 15:05:17.298990 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-cvo-updatepayloads\" (UniqueName: \"kubernetes.io/host-path/e03f40f3-9b1c-4415-a987-c4ecb012c52c-etc-cvo-updatepayloads\") pod \"cluster-version-operator-5c965bbfc6-nq8gd\" (UID: \"e03f40f3-9b1c-4415-a987-c4ecb012c52c\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-nq8gd" Oct 01 15:05:17 crc kubenswrapper[4869]: I1001 15:05:17.399899 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/e03f40f3-9b1c-4415-a987-c4ecb012c52c-kube-api-access\") pod \"cluster-version-operator-5c965bbfc6-nq8gd\" (UID: \"e03f40f3-9b1c-4415-a987-c4ecb012c52c\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-nq8gd" Oct 01 15:05:17 crc kubenswrapper[4869]: I1001 15:05:17.399959 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e03f40f3-9b1c-4415-a987-c4ecb012c52c-serving-cert\") pod \"cluster-version-operator-5c965bbfc6-nq8gd\" (UID: \"e03f40f3-9b1c-4415-a987-c4ecb012c52c\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-nq8gd" Oct 01 15:05:17 crc kubenswrapper[4869]: I1001 15:05:17.400016 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-ssl-certs\" (UniqueName: \"kubernetes.io/host-path/e03f40f3-9b1c-4415-a987-c4ecb012c52c-etc-ssl-certs\") pod \"cluster-version-operator-5c965bbfc6-nq8gd\" (UID: \"e03f40f3-9b1c-4415-a987-c4ecb012c52c\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-nq8gd" Oct 01 15:05:17 crc kubenswrapper[4869]: I1001 15:05:17.400054 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-cvo-updatepayloads\" (UniqueName: \"kubernetes.io/host-path/e03f40f3-9b1c-4415-a987-c4ecb012c52c-etc-cvo-updatepayloads\") pod \"cluster-version-operator-5c965bbfc6-nq8gd\" (UID: \"e03f40f3-9b1c-4415-a987-c4ecb012c52c\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-nq8gd" Oct 01 15:05:17 crc kubenswrapper[4869]: I1001 15:05:17.400100 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/e03f40f3-9b1c-4415-a987-c4ecb012c52c-service-ca\") pod \"cluster-version-operator-5c965bbfc6-nq8gd\" (UID: \"e03f40f3-9b1c-4415-a987-c4ecb012c52c\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-nq8gd" Oct 01 15:05:17 crc kubenswrapper[4869]: I1001 15:05:17.400200 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-cvo-updatepayloads\" (UniqueName: \"kubernetes.io/host-path/e03f40f3-9b1c-4415-a987-c4ecb012c52c-etc-cvo-updatepayloads\") pod \"cluster-version-operator-5c965bbfc6-nq8gd\" (UID: \"e03f40f3-9b1c-4415-a987-c4ecb012c52c\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-nq8gd" Oct 01 15:05:17 crc kubenswrapper[4869]: I1001 15:05:17.400201 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-ssl-certs\" (UniqueName: \"kubernetes.io/host-path/e03f40f3-9b1c-4415-a987-c4ecb012c52c-etc-ssl-certs\") pod \"cluster-version-operator-5c965bbfc6-nq8gd\" (UID: \"e03f40f3-9b1c-4415-a987-c4ecb012c52c\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-nq8gd" Oct 01 15:05:17 crc kubenswrapper[4869]: I1001 15:05:17.401624 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/e03f40f3-9b1c-4415-a987-c4ecb012c52c-service-ca\") pod \"cluster-version-operator-5c965bbfc6-nq8gd\" (UID: \"e03f40f3-9b1c-4415-a987-c4ecb012c52c\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-nq8gd" Oct 01 15:05:17 crc kubenswrapper[4869]: I1001 15:05:17.406886 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e03f40f3-9b1c-4415-a987-c4ecb012c52c-serving-cert\") pod \"cluster-version-operator-5c965bbfc6-nq8gd\" (UID: \"e03f40f3-9b1c-4415-a987-c4ecb012c52c\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-nq8gd" Oct 01 15:05:17 crc kubenswrapper[4869]: I1001 15:05:17.424515 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/e03f40f3-9b1c-4415-a987-c4ecb012c52c-kube-api-access\") pod \"cluster-version-operator-5c965bbfc6-nq8gd\" (UID: \"e03f40f3-9b1c-4415-a987-c4ecb012c52c\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-nq8gd" Oct 01 15:05:17 crc kubenswrapper[4869]: I1001 15:05:17.552509 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-nq8gd" Oct 01 15:05:17 crc kubenswrapper[4869]: W1001 15:05:17.568956 4869 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pode03f40f3_9b1c_4415_a987_c4ecb012c52c.slice/crio-5c4e6798265902a460e4b065c7a02c5fcadbab3e5a8e72bfd8d45ca40d481c9a WatchSource:0}: Error finding container 5c4e6798265902a460e4b065c7a02c5fcadbab3e5a8e72bfd8d45ca40d481c9a: Status 404 returned error can't find the container with id 5c4e6798265902a460e4b065c7a02c5fcadbab3e5a8e72bfd8d45ca40d481c9a Oct 01 15:05:17 crc kubenswrapper[4869]: I1001 15:05:17.580983 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 01 15:05:17 crc kubenswrapper[4869]: I1001 15:05:17.581021 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-rz7qp" Oct 01 15:05:17 crc kubenswrapper[4869]: I1001 15:05:17.581076 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 01 15:05:17 crc kubenswrapper[4869]: E1001 15:05:17.581150 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 01 15:05:17 crc kubenswrapper[4869]: I1001 15:05:17.581327 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 01 15:05:17 crc kubenswrapper[4869]: E1001 15:05:17.581410 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 01 15:05:17 crc kubenswrapper[4869]: E1001 15:05:17.581482 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 01 15:05:17 crc kubenswrapper[4869]: E1001 15:05:17.581623 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-rz7qp" podUID="328ce213-12ef-40af-a41f-e0079949b82d" Oct 01 15:05:17 crc kubenswrapper[4869]: I1001 15:05:17.803305 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-27gqg" event={"ID":"ebbefc55-bef9-4a03-a065-321bff3a75b4","Type":"ContainerStarted","Data":"59471b38e240e2b9a2ccfc1dee734c077c4f09fa10e0b55964d0fe73cd8af5a1"} Oct 01 15:05:17 crc kubenswrapper[4869]: I1001 15:05:17.806194 4869 generic.go:334] "Generic (PLEG): container finished" podID="0dc0f13b-b0ed-4694-97f1-bdaeb0a99d53" containerID="ea02df6900ebf526226d975525f06eaf45bbc34d16929c625c393a809c070690" exitCode=0 Oct 01 15:05:17 crc kubenswrapper[4869]: I1001 15:05:17.806268 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-ch8sh" event={"ID":"0dc0f13b-b0ed-4694-97f1-bdaeb0a99d53","Type":"ContainerDied","Data":"ea02df6900ebf526226d975525f06eaf45bbc34d16929c625c393a809c070690"} Oct 01 15:05:17 crc kubenswrapper[4869]: I1001 15:05:17.807818 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-nq8gd" event={"ID":"e03f40f3-9b1c-4415-a987-c4ecb012c52c","Type":"ContainerStarted","Data":"638851840c77f93c28fd38eac8e7225473e3a02679f9a2ded712c47b259a7145"} Oct 01 15:05:17 crc kubenswrapper[4869]: I1001 15:05:17.807847 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-nq8gd" event={"ID":"e03f40f3-9b1c-4415-a987-c4ecb012c52c","Type":"ContainerStarted","Data":"5c4e6798265902a460e4b065c7a02c5fcadbab3e5a8e72bfd8d45ca40d481c9a"} Oct 01 15:05:18 crc kubenswrapper[4869]: I1001 15:05:18.107130 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/328ce213-12ef-40af-a41f-e0079949b82d-metrics-certs\") pod \"network-metrics-daemon-rz7qp\" (UID: \"328ce213-12ef-40af-a41f-e0079949b82d\") " pod="openshift-multus/network-metrics-daemon-rz7qp" Oct 01 15:05:18 crc kubenswrapper[4869]: E1001 15:05:18.107327 4869 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Oct 01 15:05:18 crc kubenswrapper[4869]: E1001 15:05:18.107431 4869 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/328ce213-12ef-40af-a41f-e0079949b82d-metrics-certs podName:328ce213-12ef-40af-a41f-e0079949b82d nodeName:}" failed. No retries permitted until 2025-10-01 15:05:22.107405287 +0000 UTC m=+31.254248443 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/328ce213-12ef-40af-a41f-e0079949b82d-metrics-certs") pod "network-metrics-daemon-rz7qp" (UID: "328ce213-12ef-40af-a41f-e0079949b82d") : object "openshift-multus"/"metrics-daemon-secret" not registered Oct 01 15:05:18 crc kubenswrapper[4869]: I1001 15:05:18.814684 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-ch8sh" event={"ID":"0dc0f13b-b0ed-4694-97f1-bdaeb0a99d53","Type":"ContainerStarted","Data":"20b624364636869761945ed8ed93eafb77ece8a3292a1ad273d59ae49a71493e"} Oct 01 15:05:18 crc kubenswrapper[4869]: I1001 15:05:18.844473 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-nq8gd" podStartSLOduration=7.844450381 podStartE2EDuration="7.844450381s" podCreationTimestamp="2025-10-01 15:05:11 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-01 15:05:17.864283228 +0000 UTC m=+27.011126354" watchObservedRunningTime="2025-10-01 15:05:18.844450381 +0000 UTC m=+27.991293507" Oct 01 15:05:19 crc kubenswrapper[4869]: I1001 15:05:19.219390 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 01 15:05:19 crc kubenswrapper[4869]: I1001 15:05:19.219585 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 01 15:05:19 crc kubenswrapper[4869]: E1001 15:05:19.219674 4869 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-01 15:05:27.219635561 +0000 UTC m=+36.366478707 (durationBeforeRetry 8s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 01 15:05:19 crc kubenswrapper[4869]: E1001 15:05:19.219742 4869 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Oct 01 15:05:19 crc kubenswrapper[4869]: I1001 15:05:19.219745 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 01 15:05:19 crc kubenswrapper[4869]: E1001 15:05:19.219855 4869 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-10-01 15:05:27.219822236 +0000 UTC m=+36.366665472 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Oct 01 15:05:19 crc kubenswrapper[4869]: E1001 15:05:19.219877 4869 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Oct 01 15:05:19 crc kubenswrapper[4869]: E1001 15:05:19.220061 4869 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-10-01 15:05:27.220045442 +0000 UTC m=+36.366888598 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Oct 01 15:05:19 crc kubenswrapper[4869]: I1001 15:05:19.322781 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 01 15:05:19 crc kubenswrapper[4869]: I1001 15:05:19.322912 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 01 15:05:19 crc kubenswrapper[4869]: E1001 15:05:19.322990 4869 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Oct 01 15:05:19 crc kubenswrapper[4869]: E1001 15:05:19.323016 4869 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Oct 01 15:05:19 crc kubenswrapper[4869]: E1001 15:05:19.323027 4869 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 01 15:05:19 crc kubenswrapper[4869]: E1001 15:05:19.323069 4869 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Oct 01 15:05:19 crc kubenswrapper[4869]: E1001 15:05:19.323098 4869 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Oct 01 15:05:19 crc kubenswrapper[4869]: E1001 15:05:19.323117 4869 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 01 15:05:19 crc kubenswrapper[4869]: E1001 15:05:19.323077 4869 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-10-01 15:05:27.323062786 +0000 UTC m=+36.469905902 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 01 15:05:19 crc kubenswrapper[4869]: E1001 15:05:19.323193 4869 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-10-01 15:05:27.323171559 +0000 UTC m=+36.470014705 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 01 15:05:19 crc kubenswrapper[4869]: I1001 15:05:19.580310 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 01 15:05:19 crc kubenswrapper[4869]: E1001 15:05:19.580755 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 01 15:05:19 crc kubenswrapper[4869]: I1001 15:05:19.580396 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-rz7qp" Oct 01 15:05:19 crc kubenswrapper[4869]: I1001 15:05:19.580389 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 01 15:05:19 crc kubenswrapper[4869]: E1001 15:05:19.580859 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-rz7qp" podUID="328ce213-12ef-40af-a41f-e0079949b82d" Oct 01 15:05:19 crc kubenswrapper[4869]: I1001 15:05:19.580521 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 01 15:05:19 crc kubenswrapper[4869]: E1001 15:05:19.580931 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 01 15:05:19 crc kubenswrapper[4869]: E1001 15:05:19.580986 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 01 15:05:19 crc kubenswrapper[4869]: I1001 15:05:19.824611 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-27gqg" event={"ID":"ebbefc55-bef9-4a03-a065-321bff3a75b4","Type":"ContainerStarted","Data":"b3afd39d18311bca00ad07f623d9d405a8cd60f6d29fc32c2bf6509b450160dd"} Oct 01 15:05:19 crc kubenswrapper[4869]: I1001 15:05:19.824912 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-27gqg" Oct 01 15:05:19 crc kubenswrapper[4869]: I1001 15:05:19.824938 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-27gqg" Oct 01 15:05:19 crc kubenswrapper[4869]: I1001 15:05:19.824948 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-27gqg" Oct 01 15:05:19 crc kubenswrapper[4869]: I1001 15:05:19.836821 4869 generic.go:334] "Generic (PLEG): container finished" podID="0dc0f13b-b0ed-4694-97f1-bdaeb0a99d53" containerID="20b624364636869761945ed8ed93eafb77ece8a3292a1ad273d59ae49a71493e" exitCode=0 Oct 01 15:05:19 crc kubenswrapper[4869]: I1001 15:05:19.836870 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-ch8sh" event={"ID":"0dc0f13b-b0ed-4694-97f1-bdaeb0a99d53","Type":"ContainerDied","Data":"20b624364636869761945ed8ed93eafb77ece8a3292a1ad273d59ae49a71493e"} Oct 01 15:05:19 crc kubenswrapper[4869]: I1001 15:05:19.896167 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ovn-kubernetes/ovnkube-node-27gqg" podStartSLOduration=8.896138977 podStartE2EDuration="8.896138977s" podCreationTimestamp="2025-10-01 15:05:11 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-01 15:05:19.865021015 +0000 UTC m=+29.011864151" watchObservedRunningTime="2025-10-01 15:05:19.896138977 +0000 UTC m=+29.042982133" Oct 01 15:05:19 crc kubenswrapper[4869]: I1001 15:05:19.910584 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-27gqg" Oct 01 15:05:19 crc kubenswrapper[4869]: I1001 15:05:19.911294 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-27gqg" Oct 01 15:05:20 crc kubenswrapper[4869]: I1001 15:05:20.845766 4869 generic.go:334] "Generic (PLEG): container finished" podID="0dc0f13b-b0ed-4694-97f1-bdaeb0a99d53" containerID="2d11e23efcde3957a54f3db0be0cd66ee9387e4d5937ac5e9839073cac477636" exitCode=0 Oct 01 15:05:20 crc kubenswrapper[4869]: I1001 15:05:20.845844 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-ch8sh" event={"ID":"0dc0f13b-b0ed-4694-97f1-bdaeb0a99d53","Type":"ContainerDied","Data":"2d11e23efcde3957a54f3db0be0cd66ee9387e4d5937ac5e9839073cac477636"} Oct 01 15:05:21 crc kubenswrapper[4869]: I1001 15:05:21.580919 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-rz7qp" Oct 01 15:05:21 crc kubenswrapper[4869]: I1001 15:05:21.580967 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 01 15:05:21 crc kubenswrapper[4869]: I1001 15:05:21.580989 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 01 15:05:21 crc kubenswrapper[4869]: E1001 15:05:21.581868 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-rz7qp" podUID="328ce213-12ef-40af-a41f-e0079949b82d" Oct 01 15:05:21 crc kubenswrapper[4869]: I1001 15:05:21.581914 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 01 15:05:21 crc kubenswrapper[4869]: E1001 15:05:21.582038 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 01 15:05:21 crc kubenswrapper[4869]: E1001 15:05:21.582194 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 01 15:05:21 crc kubenswrapper[4869]: E1001 15:05:21.582286 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 01 15:05:21 crc kubenswrapper[4869]: I1001 15:05:21.798651 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/network-metrics-daemon-rz7qp"] Oct 01 15:05:21 crc kubenswrapper[4869]: I1001 15:05:21.853079 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-ch8sh" event={"ID":"0dc0f13b-b0ed-4694-97f1-bdaeb0a99d53","Type":"ContainerStarted","Data":"fd0cddc2040d64d2aa70934317652922d94ee7adf1f5535a49d9dbfbf92d4585"} Oct 01 15:05:21 crc kubenswrapper[4869]: I1001 15:05:21.853103 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-rz7qp" Oct 01 15:05:21 crc kubenswrapper[4869]: E1001 15:05:21.854026 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-rz7qp" podUID="328ce213-12ef-40af-a41f-e0079949b82d" Oct 01 15:05:21 crc kubenswrapper[4869]: I1001 15:05:21.873674 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/multus-additional-cni-plugins-ch8sh" podStartSLOduration=10.873646642 podStartE2EDuration="10.873646642s" podCreationTimestamp="2025-10-01 15:05:11 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-01 15:05:21.872892472 +0000 UTC m=+31.019735648" watchObservedRunningTime="2025-10-01 15:05:21.873646642 +0000 UTC m=+31.020489798" Oct 01 15:05:22 crc kubenswrapper[4869]: I1001 15:05:22.155902 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/328ce213-12ef-40af-a41f-e0079949b82d-metrics-certs\") pod \"network-metrics-daemon-rz7qp\" (UID: \"328ce213-12ef-40af-a41f-e0079949b82d\") " pod="openshift-multus/network-metrics-daemon-rz7qp" Oct 01 15:05:22 crc kubenswrapper[4869]: E1001 15:05:22.156127 4869 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Oct 01 15:05:22 crc kubenswrapper[4869]: E1001 15:05:22.156313 4869 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/328ce213-12ef-40af-a41f-e0079949b82d-metrics-certs podName:328ce213-12ef-40af-a41f-e0079949b82d nodeName:}" failed. No retries permitted until 2025-10-01 15:05:30.156235337 +0000 UTC m=+39.303078493 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/328ce213-12ef-40af-a41f-e0079949b82d-metrics-certs") pod "network-metrics-daemon-rz7qp" (UID: "328ce213-12ef-40af-a41f-e0079949b82d") : object "openshift-multus"/"metrics-daemon-secret" not registered Oct 01 15:05:23 crc kubenswrapper[4869]: I1001 15:05:23.580688 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 01 15:05:23 crc kubenswrapper[4869]: I1001 15:05:23.580789 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 01 15:05:23 crc kubenswrapper[4869]: I1001 15:05:23.580832 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-rz7qp" Oct 01 15:05:23 crc kubenswrapper[4869]: I1001 15:05:23.580964 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 01 15:05:23 crc kubenswrapper[4869]: E1001 15:05:23.580954 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 01 15:05:23 crc kubenswrapper[4869]: E1001 15:05:23.581177 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-rz7qp" podUID="328ce213-12ef-40af-a41f-e0079949b82d" Oct 01 15:05:23 crc kubenswrapper[4869]: E1001 15:05:23.581356 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 01 15:05:23 crc kubenswrapper[4869]: E1001 15:05:23.581588 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 01 15:05:24 crc kubenswrapper[4869]: I1001 15:05:24.522710 4869 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeReady" Oct 01 15:05:24 crc kubenswrapper[4869]: I1001 15:05:24.522931 4869 kubelet_node_status.go:538] "Fast updating node status as it just became ready" Oct 01 15:05:24 crc kubenswrapper[4869]: I1001 15:05:24.617888 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-xjqf4"] Oct 01 15:05:24 crc kubenswrapper[4869]: I1001 15:05:24.618804 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-xjqf4" Oct 01 15:05:24 crc kubenswrapper[4869]: I1001 15:05:24.620772 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-apiserver/apiserver-76f77b778f-hxvsn"] Oct 01 15:05:24 crc kubenswrapper[4869]: I1001 15:05:24.622422 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver/apiserver-76f77b778f-hxvsn" Oct 01 15:05:24 crc kubenswrapper[4869]: I1001 15:05:24.623470 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-s8l4m"] Oct 01 15:05:24 crc kubenswrapper[4869]: I1001 15:05:24.623703 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-serving-cert" Oct 01 15:05:24 crc kubenswrapper[4869]: I1001 15:05:24.623977 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-s8l4m" Oct 01 15:05:24 crc kubenswrapper[4869]: I1001 15:05:24.624776 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-cluster-machine-approver/machine-approver-56656f9798-kv7s4"] Oct 01 15:05:24 crc kubenswrapper[4869]: I1001 15:05:24.624837 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-config" Oct 01 15:05:24 crc kubenswrapper[4869]: I1001 15:05:24.625550 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-kv7s4" Oct 01 15:05:24 crc kubenswrapper[4869]: I1001 15:05:24.626540 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-authentication-operator/authentication-operator-69f744f599-jzt8h"] Oct 01 15:05:24 crc kubenswrapper[4869]: I1001 15:05:24.626616 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"openshift-service-ca.crt" Oct 01 15:05:24 crc kubenswrapper[4869]: I1001 15:05:24.627310 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-dockercfg-xtcjv" Oct 01 15:05:24 crc kubenswrapper[4869]: I1001 15:05:24.627546 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication-operator/authentication-operator-69f744f599-jzt8h" Oct 01 15:05:24 crc kubenswrapper[4869]: I1001 15:05:24.635523 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-oauth-apiserver/apiserver-7bbb656c7d-pdvwx"] Oct 01 15:05:24 crc kubenswrapper[4869]: I1001 15:05:24.636164 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-pdvwx" Oct 01 15:05:24 crc kubenswrapper[4869]: I1001 15:05:24.637296 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-api/machine-api-operator-5694c8668f-jdrcg"] Oct 01 15:05:24 crc kubenswrapper[4869]: I1001 15:05:24.637629 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/machine-api-operator-5694c8668f-jdrcg" Oct 01 15:05:24 crc kubenswrapper[4869]: I1001 15:05:24.639389 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-fx7tg"] Oct 01 15:05:24 crc kubenswrapper[4869]: I1001 15:05:24.639930 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-fx7tg" Oct 01 15:05:24 crc kubenswrapper[4869]: I1001 15:05:24.640916 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"etcd-client" Oct 01 15:05:24 crc kubenswrapper[4869]: I1001 15:05:24.641091 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"encryption-config-1" Oct 01 15:05:24 crc kubenswrapper[4869]: I1001 15:05:24.641332 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"openshift-service-ca.crt" Oct 01 15:05:24 crc kubenswrapper[4869]: I1001 15:05:24.642112 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"kube-root-ca.crt" Oct 01 15:05:24 crc kubenswrapper[4869]: I1001 15:05:24.642133 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"audit-1" Oct 01 15:05:24 crc kubenswrapper[4869]: I1001 15:05:24.642955 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"config" Oct 01 15:05:24 crc kubenswrapper[4869]: I1001 15:05:24.643137 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"kube-root-ca.crt" Oct 01 15:05:24 crc kubenswrapper[4869]: I1001 15:05:24.643831 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-service-ca.crt" Oct 01 15:05:24 crc kubenswrapper[4869]: I1001 15:05:24.644251 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"config" Oct 01 15:05:24 crc kubenswrapper[4869]: I1001 15:05:24.644737 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"etcd-serving-ca" Oct 01 15:05:24 crc kubenswrapper[4869]: I1001 15:05:24.644793 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"openshift-apiserver-sa-dockercfg-djjff" Oct 01 15:05:24 crc kubenswrapper[4869]: I1001 15:05:24.644978 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"openshift-service-ca.crt" Oct 01 15:05:24 crc kubenswrapper[4869]: I1001 15:05:24.645076 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"machine-approver-config" Oct 01 15:05:24 crc kubenswrapper[4869]: I1001 15:05:24.644733 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"kube-rbac-proxy" Oct 01 15:05:24 crc kubenswrapper[4869]: I1001 15:05:24.647535 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-5zhxq"] Oct 01 15:05:24 crc kubenswrapper[4869]: I1001 15:05:24.648382 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console/downloads-7954f5f757-dxcl2"] Oct 01 15:05:24 crc kubenswrapper[4869]: I1001 15:05:24.648988 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/downloads-7954f5f757-dxcl2" Oct 01 15:05:24 crc kubenswrapper[4869]: I1001 15:05:24.649679 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-5zhxq" Oct 01 15:05:24 crc kubenswrapper[4869]: I1001 15:05:24.649963 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-4zhjs"] Oct 01 15:05:24 crc kubenswrapper[4869]: I1001 15:05:24.650532 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-4zhjs" Oct 01 15:05:24 crc kubenswrapper[4869]: I1001 15:05:24.666778 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"serving-cert" Oct 01 15:05:24 crc kubenswrapper[4869]: I1001 15:05:24.667321 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"service-ca-bundle" Oct 01 15:05:24 crc kubenswrapper[4869]: I1001 15:05:24.667398 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"openshift-controller-manager-sa-dockercfg-msq4c" Oct 01 15:05:24 crc kubenswrapper[4869]: I1001 15:05:24.667734 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"client-ca" Oct 01 15:05:24 crc kubenswrapper[4869]: I1001 15:05:24.667866 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-machine-approver"/"machine-approver-tls" Oct 01 15:05:24 crc kubenswrapper[4869]: I1001 15:05:24.667891 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-machine-approver"/"machine-approver-sa-dockercfg-nl2j4" Oct 01 15:05:24 crc kubenswrapper[4869]: I1001 15:05:24.668090 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"serving-cert" Oct 01 15:05:24 crc kubenswrapper[4869]: I1001 15:05:24.668541 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"kube-root-ca.crt" Oct 01 15:05:24 crc kubenswrapper[4869]: I1001 15:05:24.676769 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"authentication-operator-config" Oct 01 15:05:24 crc kubenswrapper[4869]: I1001 15:05:24.677230 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"audit-1" Oct 01 15:05:24 crc kubenswrapper[4869]: I1001 15:05:24.677386 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication-operator"/"authentication-operator-dockercfg-mz9bj" Oct 01 15:05:24 crc kubenswrapper[4869]: I1001 15:05:24.678744 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"openshift-service-ca.crt" Oct 01 15:05:24 crc kubenswrapper[4869]: I1001 15:05:24.678817 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"openshift-service-ca.crt" Oct 01 15:05:24 crc kubenswrapper[4869]: I1001 15:05:24.678984 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"kube-root-ca.crt" Oct 01 15:05:24 crc kubenswrapper[4869]: I1001 15:05:24.679339 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"route-controller-manager-sa-dockercfg-h2zr2" Oct 01 15:05:24 crc kubenswrapper[4869]: I1001 15:05:24.680084 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-config-operator/openshift-config-operator-7777fb866f-gqrbk"] Oct 01 15:05:24 crc kubenswrapper[4869]: I1001 15:05:24.680224 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"default-dockercfg-chnjx" Oct 01 15:05:24 crc kubenswrapper[4869]: I1001 15:05:24.680505 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"oauth-apiserver-sa-dockercfg-6r2bq" Oct 01 15:05:24 crc kubenswrapper[4869]: I1001 15:05:24.680778 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-config-operator/openshift-config-operator-7777fb866f-gqrbk" Oct 01 15:05:24 crc kubenswrapper[4869]: I1001 15:05:24.680856 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-l5vnp"] Oct 01 15:05:24 crc kubenswrapper[4869]: I1001 15:05:24.681509 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-l5vnp" Oct 01 15:05:24 crc kubenswrapper[4869]: I1001 15:05:24.682000 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"machine-api-operator-tls" Oct 01 15:05:24 crc kubenswrapper[4869]: I1001 15:05:24.682460 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"etcd-client" Oct 01 15:05:24 crc kubenswrapper[4869]: I1001 15:05:24.682580 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"encryption-config-1" Oct 01 15:05:24 crc kubenswrapper[4869]: I1001 15:05:24.682742 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"kube-rbac-proxy" Oct 01 15:05:24 crc kubenswrapper[4869]: I1001 15:05:24.682906 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"serving-cert" Oct 01 15:05:24 crc kubenswrapper[4869]: I1001 15:05:24.683032 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"etcd-serving-ca" Oct 01 15:05:24 crc kubenswrapper[4869]: I1001 15:05:24.683310 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication-operator"/"serving-cert" Oct 01 15:05:24 crc kubenswrapper[4869]: I1001 15:05:24.683418 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"trusted-ca-bundle" Oct 01 15:05:24 crc kubenswrapper[4869]: I1001 15:05:24.683520 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"serving-cert" Oct 01 15:05:24 crc kubenswrapper[4869]: I1001 15:05:24.683629 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"registry-dockercfg-kzzsd" Oct 01 15:05:24 crc kubenswrapper[4869]: I1001 15:05:24.683792 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"kube-root-ca.crt" Oct 01 15:05:24 crc kubenswrapper[4869]: I1001 15:05:24.683864 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console-operator/console-operator-58897d9998-vg26t"] Oct 01 15:05:24 crc kubenswrapper[4869]: I1001 15:05:24.683913 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"image-import-ca" Oct 01 15:05:24 crc kubenswrapper[4869]: I1001 15:05:24.683953 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"openshift-service-ca.crt" Oct 01 15:05:24 crc kubenswrapper[4869]: I1001 15:05:24.684117 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"kube-root-ca.crt" Oct 01 15:05:24 crc kubenswrapper[4869]: I1001 15:05:24.684315 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console-operator/console-operator-58897d9998-vg26t" Oct 01 15:05:24 crc kubenswrapper[4869]: I1001 15:05:24.684385 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-samples-operator"/"samples-operator-tls" Oct 01 15:05:24 crc kubenswrapper[4869]: I1001 15:05:24.684591 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-samples-operator"/"kube-root-ca.crt" Oct 01 15:05:24 crc kubenswrapper[4869]: I1001 15:05:24.684735 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"machine-api-operator-images" Oct 01 15:05:24 crc kubenswrapper[4869]: I1001 15:05:24.684841 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"openshift-service-ca.crt" Oct 01 15:05:24 crc kubenswrapper[4869]: I1001 15:05:24.685962 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console/console-f9d7485db-jv4xs"] Oct 01 15:05:24 crc kubenswrapper[4869]: I1001 15:05:24.686299 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-f9d7485db-jv4xs" Oct 01 15:05:24 crc kubenswrapper[4869]: I1001 15:05:24.687246 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"image-registry-tls" Oct 01 15:05:24 crc kubenswrapper[4869]: I1001 15:05:24.688119 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-samples-operator"/"cluster-samples-operator-dockercfg-xpp9w" Oct 01 15:05:24 crc kubenswrapper[4869]: I1001 15:05:24.689431 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-dns-operator/dns-operator-744455d44c-87546"] Oct 01 15:05:24 crc kubenswrapper[4869]: I1001 15:05:24.689905 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns-operator/dns-operator-744455d44c-87546" Oct 01 15:05:24 crc kubenswrapper[4869]: I1001 15:05:24.690768 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"kube-root-ca.crt" Oct 01 15:05:24 crc kubenswrapper[4869]: I1001 15:05:24.690965 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"client-ca" Oct 01 15:05:24 crc kubenswrapper[4869]: I1001 15:05:24.691078 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"openshift-service-ca.crt" Oct 01 15:05:24 crc kubenswrapper[4869]: I1001 15:05:24.691164 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"kube-root-ca.crt" Oct 01 15:05:24 crc kubenswrapper[4869]: I1001 15:05:24.691343 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"machine-api-operator-dockercfg-mfbb7" Oct 01 15:05:24 crc kubenswrapper[4869]: I1001 15:05:24.691769 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"config" Oct 01 15:05:24 crc kubenswrapper[4869]: I1001 15:05:24.692809 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-config-operator"/"openshift-service-ca.crt" Oct 01 15:05:24 crc kubenswrapper[4869]: I1001 15:05:24.692918 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-config-operator"/"config-operator-serving-cert" Oct 01 15:05:24 crc kubenswrapper[4869]: I1001 15:05:24.692940 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-samples-operator"/"openshift-service-ca.crt" Oct 01 15:05:24 crc kubenswrapper[4869]: I1001 15:05:24.693057 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver-operator"/"kube-root-ca.crt" Oct 01 15:05:24 crc kubenswrapper[4869]: I1001 15:05:24.693113 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/8b781c72-08fb-4464-9869-87595882ef0a-trusted-ca-bundle\") pod \"authentication-operator-69f744f599-jzt8h\" (UID: \"8b781c72-08fb-4464-9869-87595882ef0a\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-jzt8h" Oct 01 15:05:24 crc kubenswrapper[4869]: I1001 15:05:24.693170 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/2b6c50d9-ffec-4ecb-914e-c683a2d3b9e4-images\") pod \"machine-api-operator-5694c8668f-jdrcg\" (UID: \"2b6c50d9-ffec-4ecb-914e-c683a2d3b9e4\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-jdrcg" Oct 01 15:05:24 crc kubenswrapper[4869]: I1001 15:05:24.693179 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"kube-root-ca.crt" Oct 01 15:05:24 crc kubenswrapper[4869]: I1001 15:05:24.693204 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/2424fe66-6e4a-4dc6-b00e-c5a1b01e3f4a-etcd-client\") pod \"apiserver-76f77b778f-hxvsn\" (UID: \"2424fe66-6e4a-4dc6-b00e-c5a1b01e3f4a\") " pod="openshift-apiserver/apiserver-76f77b778f-hxvsn" Oct 01 15:05:24 crc kubenswrapper[4869]: I1001 15:05:24.693224 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/78536e39-e31c-4666-86e9-4a0c35993e8e-machine-approver-tls\") pod \"machine-approver-56656f9798-kv7s4\" (UID: \"78536e39-e31c-4666-86e9-4a0c35993e8e\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-kv7s4" Oct 01 15:05:24 crc kubenswrapper[4869]: I1001 15:05:24.693242 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/2424fe66-6e4a-4dc6-b00e-c5a1b01e3f4a-serving-cert\") pod \"apiserver-76f77b778f-hxvsn\" (UID: \"2424fe66-6e4a-4dc6-b00e-c5a1b01e3f4a\") " pod="openshift-apiserver/apiserver-76f77b778f-hxvsn" Oct 01 15:05:24 crc kubenswrapper[4869]: I1001 15:05:24.693273 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/2424fe66-6e4a-4dc6-b00e-c5a1b01e3f4a-encryption-config\") pod \"apiserver-76f77b778f-hxvsn\" (UID: \"2424fe66-6e4a-4dc6-b00e-c5a1b01e3f4a\") " pod="openshift-apiserver/apiserver-76f77b778f-hxvsn" Oct 01 15:05:24 crc kubenswrapper[4869]: I1001 15:05:24.693294 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"installation-pull-secrets" Oct 01 15:05:24 crc kubenswrapper[4869]: I1001 15:05:24.693304 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e3343b45-e092-46ef-9e1b-6d4d55167c19-serving-cert\") pod \"apiserver-7bbb656c7d-pdvwx\" (UID: \"e3343b45-e092-46ef-9e1b-6d4d55167c19\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-pdvwx" Oct 01 15:05:24 crc kubenswrapper[4869]: I1001 15:05:24.693332 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6j2vg\" (UniqueName: \"kubernetes.io/projected/2b6c50d9-ffec-4ecb-914e-c683a2d3b9e4-kube-api-access-6j2vg\") pod \"machine-api-operator-5694c8668f-jdrcg\" (UID: \"2b6c50d9-ffec-4ecb-914e-c683a2d3b9e4\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-jdrcg" Oct 01 15:05:24 crc kubenswrapper[4869]: I1001 15:05:24.693374 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/e3343b45-e092-46ef-9e1b-6d4d55167c19-encryption-config\") pod \"apiserver-7bbb656c7d-pdvwx\" (UID: \"e3343b45-e092-46ef-9e1b-6d4d55167c19\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-pdvwx" Oct 01 15:05:24 crc kubenswrapper[4869]: I1001 15:05:24.693405 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8367e54f-3b20-4903-8dcf-d3ae02e516ca-serving-cert\") pod \"controller-manager-879f6c89f-s8l4m\" (UID: \"8367e54f-3b20-4903-8dcf-d3ae02e516ca\") " pod="openshift-controller-manager/controller-manager-879f6c89f-s8l4m" Oct 01 15:05:24 crc kubenswrapper[4869]: I1001 15:05:24.693445 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/3935bd54-70b2-4d02-941b-94e00b4ec101-serving-cert\") pod \"openshift-apiserver-operator-796bbdcf4f-xjqf4\" (UID: \"3935bd54-70b2-4d02-941b-94e00b4ec101\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-xjqf4" Oct 01 15:05:24 crc kubenswrapper[4869]: I1001 15:05:24.693510 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lds7j\" (UniqueName: \"kubernetes.io/projected/3935bd54-70b2-4d02-941b-94e00b4ec101-kube-api-access-lds7j\") pod \"openshift-apiserver-operator-796bbdcf4f-xjqf4\" (UID: \"3935bd54-70b2-4d02-941b-94e00b4ec101\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-xjqf4" Oct 01 15:05:24 crc kubenswrapper[4869]: I1001 15:05:24.693545 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/e3343b45-e092-46ef-9e1b-6d4d55167c19-trusted-ca-bundle\") pod \"apiserver-7bbb656c7d-pdvwx\" (UID: \"e3343b45-e092-46ef-9e1b-6d4d55167c19\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-pdvwx" Oct 01 15:05:24 crc kubenswrapper[4869]: I1001 15:05:24.693582 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9ftj5\" (UniqueName: \"kubernetes.io/projected/5604dc28-0cb8-47af-990f-737c37b7a8e1-kube-api-access-9ftj5\") pod \"cluster-samples-operator-665b6dd947-fx7tg\" (UID: \"5604dc28-0cb8-47af-990f-737c37b7a8e1\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-fx7tg" Oct 01 15:05:24 crc kubenswrapper[4869]: I1001 15:05:24.693618 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8367e54f-3b20-4903-8dcf-d3ae02e516ca-config\") pod \"controller-manager-879f6c89f-s8l4m\" (UID: \"8367e54f-3b20-4903-8dcf-d3ae02e516ca\") " pod="openshift-controller-manager/controller-manager-879f6c89f-s8l4m" Oct 01 15:05:24 crc kubenswrapper[4869]: I1001 15:05:24.693653 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/78536e39-e31c-4666-86e9-4a0c35993e8e-config\") pod \"machine-approver-56656f9798-kv7s4\" (UID: \"78536e39-e31c-4666-86e9-4a0c35993e8e\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-kv7s4" Oct 01 15:05:24 crc kubenswrapper[4869]: I1001 15:05:24.693685 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6679m\" (UniqueName: \"kubernetes.io/projected/8367e54f-3b20-4903-8dcf-d3ae02e516ca-kube-api-access-6679m\") pod \"controller-manager-879f6c89f-s8l4m\" (UID: \"8367e54f-3b20-4903-8dcf-d3ae02e516ca\") " pod="openshift-controller-manager/controller-manager-879f6c89f-s8l4m" Oct 01 15:05:24 crc kubenswrapper[4869]: I1001 15:05:24.693738 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2424fe66-6e4a-4dc6-b00e-c5a1b01e3f4a-config\") pod \"apiserver-76f77b778f-hxvsn\" (UID: \"2424fe66-6e4a-4dc6-b00e-c5a1b01e3f4a\") " pod="openshift-apiserver/apiserver-76f77b778f-hxvsn" Oct 01 15:05:24 crc kubenswrapper[4869]: I1001 15:05:24.693774 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/2424fe66-6e4a-4dc6-b00e-c5a1b01e3f4a-audit-dir\") pod \"apiserver-76f77b778f-hxvsn\" (UID: \"2424fe66-6e4a-4dc6-b00e-c5a1b01e3f4a\") " pod="openshift-apiserver/apiserver-76f77b778f-hxvsn" Oct 01 15:05:24 crc kubenswrapper[4869]: I1001 15:05:24.693799 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8b781c72-08fb-4464-9869-87595882ef0a-serving-cert\") pod \"authentication-operator-69f744f599-jzt8h\" (UID: \"8b781c72-08fb-4464-9869-87595882ef0a\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-jzt8h" Oct 01 15:05:24 crc kubenswrapper[4869]: I1001 15:05:24.693833 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2b6c50d9-ffec-4ecb-914e-c683a2d3b9e4-config\") pod \"machine-api-operator-5694c8668f-jdrcg\" (UID: \"2b6c50d9-ffec-4ecb-914e-c683a2d3b9e4\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-jdrcg" Oct 01 15:05:24 crc kubenswrapper[4869]: I1001 15:05:24.693865 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/78536e39-e31c-4666-86e9-4a0c35993e8e-auth-proxy-config\") pod \"machine-approver-56656f9798-kv7s4\" (UID: \"78536e39-e31c-4666-86e9-4a0c35993e8e\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-kv7s4" Oct 01 15:05:24 crc kubenswrapper[4869]: I1001 15:05:24.693942 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3935bd54-70b2-4d02-941b-94e00b4ec101-config\") pod \"openshift-apiserver-operator-796bbdcf4f-xjqf4\" (UID: \"3935bd54-70b2-4d02-941b-94e00b4ec101\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-xjqf4" Oct 01 15:05:24 crc kubenswrapper[4869]: I1001 15:05:24.693974 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/e3343b45-e092-46ef-9e1b-6d4d55167c19-etcd-client\") pod \"apiserver-7bbb656c7d-pdvwx\" (UID: \"e3343b45-e092-46ef-9e1b-6d4d55167c19\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-pdvwx" Oct 01 15:05:24 crc kubenswrapper[4869]: I1001 15:05:24.694012 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/8367e54f-3b20-4903-8dcf-d3ae02e516ca-proxy-ca-bundles\") pod \"controller-manager-879f6c89f-s8l4m\" (UID: \"8367e54f-3b20-4903-8dcf-d3ae02e516ca\") " pod="openshift-controller-manager/controller-manager-879f6c89f-s8l4m" Oct 01 15:05:24 crc kubenswrapper[4869]: I1001 15:05:24.694039 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-r4fd7\" (UniqueName: \"kubernetes.io/projected/2424fe66-6e4a-4dc6-b00e-c5a1b01e3f4a-kube-api-access-r4fd7\") pod \"apiserver-76f77b778f-hxvsn\" (UID: \"2424fe66-6e4a-4dc6-b00e-c5a1b01e3f4a\") " pod="openshift-apiserver/apiserver-76f77b778f-hxvsn" Oct 01 15:05:24 crc kubenswrapper[4869]: I1001 15:05:24.694062 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/2424fe66-6e4a-4dc6-b00e-c5a1b01e3f4a-audit\") pod \"apiserver-76f77b778f-hxvsn\" (UID: \"2424fe66-6e4a-4dc6-b00e-c5a1b01e3f4a\") " pod="openshift-apiserver/apiserver-76f77b778f-hxvsn" Oct 01 15:05:24 crc kubenswrapper[4869]: I1001 15:05:24.694089 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/2424fe66-6e4a-4dc6-b00e-c5a1b01e3f4a-etcd-serving-ca\") pod \"apiserver-76f77b778f-hxvsn\" (UID: \"2424fe66-6e4a-4dc6-b00e-c5a1b01e3f4a\") " pod="openshift-apiserver/apiserver-76f77b778f-hxvsn" Oct 01 15:05:24 crc kubenswrapper[4869]: I1001 15:05:24.694109 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/2424fe66-6e4a-4dc6-b00e-c5a1b01e3f4a-trusted-ca-bundle\") pod \"apiserver-76f77b778f-hxvsn\" (UID: \"2424fe66-6e4a-4dc6-b00e-c5a1b01e3f4a\") " pod="openshift-apiserver/apiserver-76f77b778f-hxvsn" Oct 01 15:05:24 crc kubenswrapper[4869]: I1001 15:05:24.694132 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/5604dc28-0cb8-47af-990f-737c37b7a8e1-samples-operator-tls\") pod \"cluster-samples-operator-665b6dd947-fx7tg\" (UID: \"5604dc28-0cb8-47af-990f-737c37b7a8e1\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-fx7tg" Oct 01 15:05:24 crc kubenswrapper[4869]: I1001 15:05:24.694198 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8b781c72-08fb-4464-9869-87595882ef0a-config\") pod \"authentication-operator-69f744f599-jzt8h\" (UID: \"8b781c72-08fb-4464-9869-87595882ef0a\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-jzt8h" Oct 01 15:05:24 crc kubenswrapper[4869]: I1001 15:05:24.694232 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/8b781c72-08fb-4464-9869-87595882ef0a-service-ca-bundle\") pod \"authentication-operator-69f744f599-jzt8h\" (UID: \"8b781c72-08fb-4464-9869-87595882ef0a\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-jzt8h" Oct 01 15:05:24 crc kubenswrapper[4869]: I1001 15:05:24.694286 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/e3343b45-e092-46ef-9e1b-6d4d55167c19-audit-dir\") pod \"apiserver-7bbb656c7d-pdvwx\" (UID: \"e3343b45-e092-46ef-9e1b-6d4d55167c19\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-pdvwx" Oct 01 15:05:24 crc kubenswrapper[4869]: I1001 15:05:24.694332 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8c9zw\" (UniqueName: \"kubernetes.io/projected/e3343b45-e092-46ef-9e1b-6d4d55167c19-kube-api-access-8c9zw\") pod \"apiserver-7bbb656c7d-pdvwx\" (UID: \"e3343b45-e092-46ef-9e1b-6d4d55167c19\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-pdvwx" Oct 01 15:05:24 crc kubenswrapper[4869]: I1001 15:05:24.694376 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/8367e54f-3b20-4903-8dcf-d3ae02e516ca-client-ca\") pod \"controller-manager-879f6c89f-s8l4m\" (UID: \"8367e54f-3b20-4903-8dcf-d3ae02e516ca\") " pod="openshift-controller-manager/controller-manager-879f6c89f-s8l4m" Oct 01 15:05:24 crc kubenswrapper[4869]: I1001 15:05:24.694415 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/e3343b45-e092-46ef-9e1b-6d4d55167c19-etcd-serving-ca\") pod \"apiserver-7bbb656c7d-pdvwx\" (UID: \"e3343b45-e092-46ef-9e1b-6d4d55167c19\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-pdvwx" Oct 01 15:05:24 crc kubenswrapper[4869]: I1001 15:05:24.694447 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/2424fe66-6e4a-4dc6-b00e-c5a1b01e3f4a-image-import-ca\") pod \"apiserver-76f77b778f-hxvsn\" (UID: \"2424fe66-6e4a-4dc6-b00e-c5a1b01e3f4a\") " pod="openshift-apiserver/apiserver-76f77b778f-hxvsn" Oct 01 15:05:24 crc kubenswrapper[4869]: I1001 15:05:24.694491 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/2b6c50d9-ffec-4ecb-914e-c683a2d3b9e4-machine-api-operator-tls\") pod \"machine-api-operator-5694c8668f-jdrcg\" (UID: \"2b6c50d9-ffec-4ecb-914e-c683a2d3b9e4\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-jdrcg" Oct 01 15:05:24 crc kubenswrapper[4869]: I1001 15:05:24.694538 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pnz9r\" (UniqueName: \"kubernetes.io/projected/8b781c72-08fb-4464-9869-87595882ef0a-kube-api-access-pnz9r\") pod \"authentication-operator-69f744f599-jzt8h\" (UID: \"8b781c72-08fb-4464-9869-87595882ef0a\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-jzt8h" Oct 01 15:05:24 crc kubenswrapper[4869]: I1001 15:05:24.694577 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/e3343b45-e092-46ef-9e1b-6d4d55167c19-audit-policies\") pod \"apiserver-7bbb656c7d-pdvwx\" (UID: \"e3343b45-e092-46ef-9e1b-6d4d55167c19\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-pdvwx" Oct 01 15:05:24 crc kubenswrapper[4869]: I1001 15:05:24.694617 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/2424fe66-6e4a-4dc6-b00e-c5a1b01e3f4a-node-pullsecrets\") pod \"apiserver-76f77b778f-hxvsn\" (UID: \"2424fe66-6e4a-4dc6-b00e-c5a1b01e3f4a\") " pod="openshift-apiserver/apiserver-76f77b778f-hxvsn" Oct 01 15:05:24 crc kubenswrapper[4869]: I1001 15:05:24.694661 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cj9hx\" (UniqueName: \"kubernetes.io/projected/78536e39-e31c-4666-86e9-4a0c35993e8e-kube-api-access-cj9hx\") pod \"machine-approver-56656f9798-kv7s4\" (UID: \"78536e39-e31c-4666-86e9-4a0c35993e8e\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-kv7s4" Oct 01 15:05:24 crc kubenswrapper[4869]: I1001 15:05:24.694790 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-j7g7t"] Oct 01 15:05:24 crc kubenswrapper[4869]: I1001 15:05:24.695811 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-j7g7t" Oct 01 15:05:24 crc kubenswrapper[4869]: I1001 15:05:24.706044 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"trusted-ca-bundle" Oct 01 15:05:24 crc kubenswrapper[4869]: I1001 15:05:24.707521 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-global-ca" Oct 01 15:05:24 crc kubenswrapper[4869]: I1001 15:05:24.707535 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-config-operator"/"openshift-config-operator-dockercfg-7pc5z" Oct 01 15:05:24 crc kubenswrapper[4869]: I1001 15:05:24.709338 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"trusted-ca" Oct 01 15:05:24 crc kubenswrapper[4869]: I1001 15:05:24.710662 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"trusted-ca-bundle" Oct 01 15:05:24 crc kubenswrapper[4869]: I1001 15:05:24.715597 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-jds44"] Oct 01 15:05:24 crc kubenswrapper[4869]: I1001 15:05:24.717322 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console-operator"/"serving-cert" Oct 01 15:05:24 crc kubenswrapper[4869]: I1001 15:05:24.717590 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-config" Oct 01 15:05:24 crc kubenswrapper[4869]: I1001 15:05:24.719355 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-dockercfg-x57mr" Oct 01 15:05:24 crc kubenswrapper[4869]: I1001 15:05:24.719540 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"kube-root-ca.crt" Oct 01 15:05:24 crc kubenswrapper[4869]: I1001 15:05:24.719927 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"console-operator-config" Oct 01 15:05:24 crc kubenswrapper[4869]: I1001 15:05:24.720685 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-config-operator"/"kube-root-ca.crt" Oct 01 15:05:24 crc kubenswrapper[4869]: I1001 15:05:24.749872 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-sj2cx"] Oct 01 15:05:24 crc kubenswrapper[4869]: I1001 15:05:24.764482 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-jds44" Oct 01 15:05:24 crc kubenswrapper[4869]: I1001 15:05:24.765299 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"openshift-service-ca.crt" Oct 01 15:05:24 crc kubenswrapper[4869]: I1001 15:05:24.770920 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console-operator"/"console-operator-dockercfg-4xjcr" Oct 01 15:05:24 crc kubenswrapper[4869]: I1001 15:05:24.770995 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-sj2cx" Oct 01 15:05:24 crc kubenswrapper[4869]: I1001 15:05:24.771204 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns-operator"/"dns-operator-dockercfg-9mqw5" Oct 01 15:05:24 crc kubenswrapper[4869]: I1001 15:05:24.771948 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"image-registry-operator-tls" Oct 01 15:05:24 crc kubenswrapper[4869]: I1001 15:05:24.772067 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"cluster-image-registry-operator-dockercfg-m4qtx" Oct 01 15:05:24 crc kubenswrapper[4869]: I1001 15:05:24.772240 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"openshift-service-ca.crt" Oct 01 15:05:24 crc kubenswrapper[4869]: I1001 15:05:24.772440 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-dockercfg-vw8fw" Oct 01 15:05:24 crc kubenswrapper[4869]: I1001 15:05:24.772623 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"kube-root-ca.crt" Oct 01 15:05:24 crc kubenswrapper[4869]: I1001 15:05:24.772754 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-serving-cert" Oct 01 15:05:24 crc kubenswrapper[4869]: I1001 15:05:24.770918 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-8mczm"] Oct 01 15:05:24 crc kubenswrapper[4869]: I1001 15:05:24.773214 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-vkxf8"] Oct 01 15:05:24 crc kubenswrapper[4869]: I1001 15:05:24.773468 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/catalog-operator-68c6474976-qp4hv"] Oct 01 15:05:24 crc kubenswrapper[4869]: I1001 15:05:24.773702 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/multus-admission-controller-857f4d67dd-tgtxf"] Oct 01 15:05:24 crc kubenswrapper[4869]: I1001 15:05:24.774052 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-sm2sz"] Oct 01 15:05:24 crc kubenswrapper[4869]: I1001 15:05:24.774371 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-sm2sz" Oct 01 15:05:24 crc kubenswrapper[4869]: I1001 15:05:24.774650 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-8mczm" Oct 01 15:05:24 crc kubenswrapper[4869]: I1001 15:05:24.774880 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-vkxf8" Oct 01 15:05:24 crc kubenswrapper[4869]: I1001 15:05:24.775063 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-qp4hv" Oct 01 15:05:24 crc kubenswrapper[4869]: I1001 15:05:24.775324 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-admission-controller-857f4d67dd-tgtxf" Oct 01 15:05:24 crc kubenswrapper[4869]: I1001 15:05:24.775443 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-config" Oct 01 15:05:24 crc kubenswrapper[4869]: I1001 15:05:24.775661 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager-operator"/"kube-root-ca.crt" Oct 01 15:05:24 crc kubenswrapper[4869]: I1001 15:05:24.775850 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-serving-cert" Oct 01 15:05:24 crc kubenswrapper[4869]: I1001 15:05:24.775878 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"service-ca" Oct 01 15:05:24 crc kubenswrapper[4869]: I1001 15:05:24.776116 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns-operator"/"kube-root-ca.crt" Oct 01 15:05:24 crc kubenswrapper[4869]: I1001 15:05:24.776207 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"console-config" Oct 01 15:05:24 crc kubenswrapper[4869]: I1001 15:05:24.776296 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns-operator"/"openshift-service-ca.crt" Oct 01 15:05:24 crc kubenswrapper[4869]: I1001 15:05:24.776483 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"oauth-serving-cert" Oct 01 15:05:24 crc kubenswrapper[4869]: I1001 15:05:24.776655 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-dockercfg-f62pw" Oct 01 15:05:24 crc kubenswrapper[4869]: I1001 15:05:24.780352 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-oauth-config" Oct 01 15:05:24 crc kubenswrapper[4869]: I1001 15:05:24.780555 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-serving-cert" Oct 01 15:05:24 crc kubenswrapper[4869]: I1001 15:05:24.781811 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29322180-8fxq6"] Oct 01 15:05:24 crc kubenswrapper[4869]: I1001 15:05:24.782414 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29322180-8fxq6" Oct 01 15:05:24 crc kubenswrapper[4869]: I1001 15:05:24.784851 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns-operator"/"metrics-tls" Oct 01 15:05:24 crc kubenswrapper[4869]: I1001 15:05:24.785388 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"trusted-ca-bundle" Oct 01 15:05:24 crc kubenswrapper[4869]: I1001 15:05:24.785704 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"openshift-service-ca.crt" Oct 01 15:05:24 crc kubenswrapper[4869]: I1001 15:05:24.794458 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"trusted-ca" Oct 01 15:05:24 crc kubenswrapper[4869]: I1001 15:05:24.794670 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"control-plane-machine-set-operator-tls" Oct 01 15:05:24 crc kubenswrapper[4869]: I1001 15:05:24.794805 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"catalog-operator-serving-cert" Oct 01 15:05:24 crc kubenswrapper[4869]: I1001 15:05:24.795230 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/78536e39-e31c-4666-86e9-4a0c35993e8e-config\") pod \"machine-approver-56656f9798-kv7s4\" (UID: \"78536e39-e31c-4666-86e9-4a0c35993e8e\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-kv7s4" Oct 01 15:05:24 crc kubenswrapper[4869]: I1001 15:05:24.795299 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6679m\" (UniqueName: \"kubernetes.io/projected/8367e54f-3b20-4903-8dcf-d3ae02e516ca-kube-api-access-6679m\") pod \"controller-manager-879f6c89f-s8l4m\" (UID: \"8367e54f-3b20-4903-8dcf-d3ae02e516ca\") " pod="openshift-controller-manager/controller-manager-879f6c89f-s8l4m" Oct 01 15:05:24 crc kubenswrapper[4869]: I1001 15:05:24.795336 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/2424fe66-6e4a-4dc6-b00e-c5a1b01e3f4a-audit-dir\") pod \"apiserver-76f77b778f-hxvsn\" (UID: \"2424fe66-6e4a-4dc6-b00e-c5a1b01e3f4a\") " pod="openshift-apiserver/apiserver-76f77b778f-hxvsn" Oct 01 15:05:24 crc kubenswrapper[4869]: I1001 15:05:24.795367 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/2ec0b99f-cb20-410b-8142-a3d046ed6578-control-plane-machine-set-operator-tls\") pod \"control-plane-machine-set-operator-78cbb6b69f-vkxf8\" (UID: \"2ec0b99f-cb20-410b-8142-a3d046ed6578\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-vkxf8" Oct 01 15:05:24 crc kubenswrapper[4869]: I1001 15:05:24.795391 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2424fe66-6e4a-4dc6-b00e-c5a1b01e3f4a-config\") pod \"apiserver-76f77b778f-hxvsn\" (UID: \"2424fe66-6e4a-4dc6-b00e-c5a1b01e3f4a\") " pod="openshift-apiserver/apiserver-76f77b778f-hxvsn" Oct 01 15:05:24 crc kubenswrapper[4869]: I1001 15:05:24.795417 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8b781c72-08fb-4464-9869-87595882ef0a-serving-cert\") pod \"authentication-operator-69f744f599-jzt8h\" (UID: \"8b781c72-08fb-4464-9869-87595882ef0a\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-jzt8h" Oct 01 15:05:24 crc kubenswrapper[4869]: I1001 15:05:24.795444 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ql7f8\" (UniqueName: \"kubernetes.io/projected/8ffaf4d1-4b33-48a0-ae56-4b116924f58a-kube-api-access-ql7f8\") pod \"cluster-image-registry-operator-dc59b4c8b-j7g7t\" (UID: \"8ffaf4d1-4b33-48a0-ae56-4b116924f58a\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-j7g7t" Oct 01 15:05:24 crc kubenswrapper[4869]: I1001 15:05:24.795470 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/0a95b64f-5e82-446c-8369-7304b2c6ec5d-profile-collector-cert\") pod \"catalog-operator-68c6474976-qp4hv\" (UID: \"0a95b64f-5e82-446c-8369-7304b2c6ec5d\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-qp4hv" Oct 01 15:05:24 crc kubenswrapper[4869]: I1001 15:05:24.795497 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2b6c50d9-ffec-4ecb-914e-c683a2d3b9e4-config\") pod \"machine-api-operator-5694c8668f-jdrcg\" (UID: \"2b6c50d9-ffec-4ecb-914e-c683a2d3b9e4\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-jdrcg" Oct 01 15:05:24 crc kubenswrapper[4869]: I1001 15:05:24.795518 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/78536e39-e31c-4666-86e9-4a0c35993e8e-auth-proxy-config\") pod \"machine-approver-56656f9798-kv7s4\" (UID: \"78536e39-e31c-4666-86e9-4a0c35993e8e\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-kv7s4" Oct 01 15:05:24 crc kubenswrapper[4869]: I1001 15:05:24.795538 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3935bd54-70b2-4d02-941b-94e00b4ec101-config\") pod \"openshift-apiserver-operator-796bbdcf4f-xjqf4\" (UID: \"3935bd54-70b2-4d02-941b-94e00b4ec101\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-xjqf4" Oct 01 15:05:24 crc kubenswrapper[4869]: I1001 15:05:24.795560 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/e3343b45-e092-46ef-9e1b-6d4d55167c19-etcd-client\") pod \"apiserver-7bbb656c7d-pdvwx\" (UID: \"e3343b45-e092-46ef-9e1b-6d4d55167c19\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-pdvwx" Oct 01 15:05:24 crc kubenswrapper[4869]: I1001 15:05:24.795591 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/8367e54f-3b20-4903-8dcf-d3ae02e516ca-proxy-ca-bundles\") pod \"controller-manager-879f6c89f-s8l4m\" (UID: \"8367e54f-3b20-4903-8dcf-d3ae02e516ca\") " pod="openshift-controller-manager/controller-manager-879f6c89f-s8l4m" Oct 01 15:05:24 crc kubenswrapper[4869]: I1001 15:05:24.795614 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jg8jf\" (UniqueName: \"kubernetes.io/projected/2ec0b99f-cb20-410b-8142-a3d046ed6578-kube-api-access-jg8jf\") pod \"control-plane-machine-set-operator-78cbb6b69f-vkxf8\" (UID: \"2ec0b99f-cb20-410b-8142-a3d046ed6578\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-vkxf8" Oct 01 15:05:24 crc kubenswrapper[4869]: I1001 15:05:24.795639 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-r4fd7\" (UniqueName: \"kubernetes.io/projected/2424fe66-6e4a-4dc6-b00e-c5a1b01e3f4a-kube-api-access-r4fd7\") pod \"apiserver-76f77b778f-hxvsn\" (UID: \"2424fe66-6e4a-4dc6-b00e-c5a1b01e3f4a\") " pod="openshift-apiserver/apiserver-76f77b778f-hxvsn" Oct 01 15:05:24 crc kubenswrapper[4869]: I1001 15:05:24.795661 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rc5nb\" (UniqueName: \"kubernetes.io/projected/3635d59c-a52d-465d-8402-80153cd7369b-kube-api-access-rc5nb\") pod \"route-controller-manager-6576b87f9c-5zhxq\" (UID: \"3635d59c-a52d-465d-8402-80153cd7369b\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-5zhxq" Oct 01 15:05:24 crc kubenswrapper[4869]: I1001 15:05:24.795697 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/8ffaf4d1-4b33-48a0-ae56-4b116924f58a-image-registry-operator-tls\") pod \"cluster-image-registry-operator-dc59b4c8b-j7g7t\" (UID: \"8ffaf4d1-4b33-48a0-ae56-4b116924f58a\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-j7g7t" Oct 01 15:05:24 crc kubenswrapper[4869]: I1001 15:05:24.795720 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-v428h\" (UniqueName: \"kubernetes.io/projected/0a95b64f-5e82-446c-8369-7304b2c6ec5d-kube-api-access-v428h\") pod \"catalog-operator-68c6474976-qp4hv\" (UID: \"0a95b64f-5e82-446c-8369-7304b2c6ec5d\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-qp4hv" Oct 01 15:05:24 crc kubenswrapper[4869]: I1001 15:05:24.795739 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/3157872f-8582-4335-af4d-18e4ab38d91c-trusted-ca\") pod \"console-operator-58897d9998-vg26t\" (UID: \"3157872f-8582-4335-af4d-18e4ab38d91c\") " pod="openshift-console-operator/console-operator-58897d9998-vg26t" Oct 01 15:05:24 crc kubenswrapper[4869]: I1001 15:05:24.795760 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/2424fe66-6e4a-4dc6-b00e-c5a1b01e3f4a-audit\") pod \"apiserver-76f77b778f-hxvsn\" (UID: \"2424fe66-6e4a-4dc6-b00e-c5a1b01e3f4a\") " pod="openshift-apiserver/apiserver-76f77b778f-hxvsn" Oct 01 15:05:24 crc kubenswrapper[4869]: I1001 15:05:24.795780 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/3635d59c-a52d-465d-8402-80153cd7369b-client-ca\") pod \"route-controller-manager-6576b87f9c-5zhxq\" (UID: \"3635d59c-a52d-465d-8402-80153cd7369b\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-5zhxq" Oct 01 15:05:24 crc kubenswrapper[4869]: I1001 15:05:24.795801 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/5604dc28-0cb8-47af-990f-737c37b7a8e1-samples-operator-tls\") pod \"cluster-samples-operator-665b6dd947-fx7tg\" (UID: \"5604dc28-0cb8-47af-990f-737c37b7a8e1\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-fx7tg" Oct 01 15:05:24 crc kubenswrapper[4869]: I1001 15:05:24.795822 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3157872f-8582-4335-af4d-18e4ab38d91c-config\") pod \"console-operator-58897d9998-vg26t\" (UID: \"3157872f-8582-4335-af4d-18e4ab38d91c\") " pod="openshift-console-operator/console-operator-58897d9998-vg26t" Oct 01 15:05:24 crc kubenswrapper[4869]: I1001 15:05:24.795853 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8b781c72-08fb-4464-9869-87595882ef0a-config\") pod \"authentication-operator-69f744f599-jzt8h\" (UID: \"8b781c72-08fb-4464-9869-87595882ef0a\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-jzt8h" Oct 01 15:05:24 crc kubenswrapper[4869]: I1001 15:05:24.795872 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/2424fe66-6e4a-4dc6-b00e-c5a1b01e3f4a-etcd-serving-ca\") pod \"apiserver-76f77b778f-hxvsn\" (UID: \"2424fe66-6e4a-4dc6-b00e-c5a1b01e3f4a\") " pod="openshift-apiserver/apiserver-76f77b778f-hxvsn" Oct 01 15:05:24 crc kubenswrapper[4869]: I1001 15:05:24.795895 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/2424fe66-6e4a-4dc6-b00e-c5a1b01e3f4a-trusted-ca-bundle\") pod \"apiserver-76f77b778f-hxvsn\" (UID: \"2424fe66-6e4a-4dc6-b00e-c5a1b01e3f4a\") " pod="openshift-apiserver/apiserver-76f77b778f-hxvsn" Oct 01 15:05:24 crc kubenswrapper[4869]: I1001 15:05:24.795915 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8c9zw\" (UniqueName: \"kubernetes.io/projected/e3343b45-e092-46ef-9e1b-6d4d55167c19-kube-api-access-8c9zw\") pod \"apiserver-7bbb656c7d-pdvwx\" (UID: \"e3343b45-e092-46ef-9e1b-6d4d55167c19\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-pdvwx" Oct 01 15:05:24 crc kubenswrapper[4869]: I1001 15:05:24.795939 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/8b781c72-08fb-4464-9869-87595882ef0a-service-ca-bundle\") pod \"authentication-operator-69f744f599-jzt8h\" (UID: \"8b781c72-08fb-4464-9869-87595882ef0a\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-jzt8h" Oct 01 15:05:24 crc kubenswrapper[4869]: I1001 15:05:24.795959 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/e3343b45-e092-46ef-9e1b-6d4d55167c19-audit-dir\") pod \"apiserver-7bbb656c7d-pdvwx\" (UID: \"e3343b45-e092-46ef-9e1b-6d4d55167c19\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-pdvwx" Oct 01 15:05:24 crc kubenswrapper[4869]: I1001 15:05:24.795977 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/3157872f-8582-4335-af4d-18e4ab38d91c-serving-cert\") pod \"console-operator-58897d9998-vg26t\" (UID: \"3157872f-8582-4335-af4d-18e4ab38d91c\") " pod="openshift-console-operator/console-operator-58897d9998-vg26t" Oct 01 15:05:24 crc kubenswrapper[4869]: I1001 15:05:24.795999 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/8367e54f-3b20-4903-8dcf-d3ae02e516ca-client-ca\") pod \"controller-manager-879f6c89f-s8l4m\" (UID: \"8367e54f-3b20-4903-8dcf-d3ae02e516ca\") " pod="openshift-controller-manager/controller-manager-879f6c89f-s8l4m" Oct 01 15:05:24 crc kubenswrapper[4869]: I1001 15:05:24.796021 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/e544db96-88d4-4638-b95a-de0e7c17e2d9-kube-api-access\") pod \"kube-controller-manager-operator-78b949d7b-sj2cx\" (UID: \"e544db96-88d4-4638-b95a-de0e7c17e2d9\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-sj2cx" Oct 01 15:05:24 crc kubenswrapper[4869]: I1001 15:05:24.796043 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/2b6c50d9-ffec-4ecb-914e-c683a2d3b9e4-machine-api-operator-tls\") pod \"machine-api-operator-5694c8668f-jdrcg\" (UID: \"2b6c50d9-ffec-4ecb-914e-c683a2d3b9e4\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-jdrcg" Oct 01 15:05:24 crc kubenswrapper[4869]: I1001 15:05:24.796046 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/78536e39-e31c-4666-86e9-4a0c35993e8e-config\") pod \"machine-approver-56656f9798-kv7s4\" (UID: \"78536e39-e31c-4666-86e9-4a0c35993e8e\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-kv7s4" Oct 01 15:05:24 crc kubenswrapper[4869]: I1001 15:05:24.796065 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/e3343b45-e092-46ef-9e1b-6d4d55167c19-etcd-serving-ca\") pod \"apiserver-7bbb656c7d-pdvwx\" (UID: \"e3343b45-e092-46ef-9e1b-6d4d55167c19\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-pdvwx" Oct 01 15:05:24 crc kubenswrapper[4869]: I1001 15:05:24.796084 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/2424fe66-6e4a-4dc6-b00e-c5a1b01e3f4a-image-import-ca\") pod \"apiserver-76f77b778f-hxvsn\" (UID: \"2424fe66-6e4a-4dc6-b00e-c5a1b01e3f4a\") " pod="openshift-apiserver/apiserver-76f77b778f-hxvsn" Oct 01 15:05:24 crc kubenswrapper[4869]: I1001 15:05:24.796103 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pnz9r\" (UniqueName: \"kubernetes.io/projected/8b781c72-08fb-4464-9869-87595882ef0a-kube-api-access-pnz9r\") pod \"authentication-operator-69f744f599-jzt8h\" (UID: \"8b781c72-08fb-4464-9869-87595882ef0a\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-jzt8h" Oct 01 15:05:24 crc kubenswrapper[4869]: I1001 15:05:24.796121 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/e3343b45-e092-46ef-9e1b-6d4d55167c19-audit-policies\") pod \"apiserver-7bbb656c7d-pdvwx\" (UID: \"e3343b45-e092-46ef-9e1b-6d4d55167c19\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-pdvwx" Oct 01 15:05:24 crc kubenswrapper[4869]: I1001 15:05:24.796143 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cj9hx\" (UniqueName: \"kubernetes.io/projected/78536e39-e31c-4666-86e9-4a0c35993e8e-kube-api-access-cj9hx\") pod \"machine-approver-56656f9798-kv7s4\" (UID: \"78536e39-e31c-4666-86e9-4a0c35993e8e\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-kv7s4" Oct 01 15:05:24 crc kubenswrapper[4869]: I1001 15:05:24.796161 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/2424fe66-6e4a-4dc6-b00e-c5a1b01e3f4a-node-pullsecrets\") pod \"apiserver-76f77b778f-hxvsn\" (UID: \"2424fe66-6e4a-4dc6-b00e-c5a1b01e3f4a\") " pod="openshift-apiserver/apiserver-76f77b778f-hxvsn" Oct 01 15:05:24 crc kubenswrapper[4869]: I1001 15:05:24.796183 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/2b6c50d9-ffec-4ecb-914e-c683a2d3b9e4-images\") pod \"machine-api-operator-5694c8668f-jdrcg\" (UID: \"2b6c50d9-ffec-4ecb-914e-c683a2d3b9e4\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-jdrcg" Oct 01 15:05:24 crc kubenswrapper[4869]: I1001 15:05:24.796202 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/8b781c72-08fb-4464-9869-87595882ef0a-trusted-ca-bundle\") pod \"authentication-operator-69f744f599-jzt8h\" (UID: \"8b781c72-08fb-4464-9869-87595882ef0a\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-jzt8h" Oct 01 15:05:24 crc kubenswrapper[4869]: I1001 15:05:24.796223 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e544db96-88d4-4638-b95a-de0e7c17e2d9-serving-cert\") pod \"kube-controller-manager-operator-78b949d7b-sj2cx\" (UID: \"e544db96-88d4-4638-b95a-de0e7c17e2d9\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-sj2cx" Oct 01 15:05:24 crc kubenswrapper[4869]: I1001 15:05:24.796252 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/2424fe66-6e4a-4dc6-b00e-c5a1b01e3f4a-etcd-client\") pod \"apiserver-76f77b778f-hxvsn\" (UID: \"2424fe66-6e4a-4dc6-b00e-c5a1b01e3f4a\") " pod="openshift-apiserver/apiserver-76f77b778f-hxvsn" Oct 01 15:05:24 crc kubenswrapper[4869]: I1001 15:05:24.796295 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/58c072ca-9e0a-418d-90ba-ea33213d42c7-srv-cert\") pod \"olm-operator-6b444d44fb-sm2sz\" (UID: \"58c072ca-9e0a-418d-90ba-ea33213d42c7\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-sm2sz" Oct 01 15:05:24 crc kubenswrapper[4869]: I1001 15:05:24.796318 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/58c072ca-9e0a-418d-90ba-ea33213d42c7-profile-collector-cert\") pod \"olm-operator-6b444d44fb-sm2sz\" (UID: \"58c072ca-9e0a-418d-90ba-ea33213d42c7\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-sm2sz" Oct 01 15:05:24 crc kubenswrapper[4869]: I1001 15:05:24.796339 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e544db96-88d4-4638-b95a-de0e7c17e2d9-config\") pod \"kube-controller-manager-operator-78b949d7b-sj2cx\" (UID: \"e544db96-88d4-4638-b95a-de0e7c17e2d9\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-sj2cx" Oct 01 15:05:24 crc kubenswrapper[4869]: I1001 15:05:24.796359 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/8ffaf4d1-4b33-48a0-ae56-4b116924f58a-trusted-ca\") pod \"cluster-image-registry-operator-dc59b4c8b-j7g7t\" (UID: \"8ffaf4d1-4b33-48a0-ae56-4b116924f58a\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-j7g7t" Oct 01 15:05:24 crc kubenswrapper[4869]: I1001 15:05:24.796380 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/3635d59c-a52d-465d-8402-80153cd7369b-serving-cert\") pod \"route-controller-manager-6576b87f9c-5zhxq\" (UID: \"3635d59c-a52d-465d-8402-80153cd7369b\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-5zhxq" Oct 01 15:05:24 crc kubenswrapper[4869]: I1001 15:05:24.796404 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/78536e39-e31c-4666-86e9-4a0c35993e8e-machine-approver-tls\") pod \"machine-approver-56656f9798-kv7s4\" (UID: \"78536e39-e31c-4666-86e9-4a0c35993e8e\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-kv7s4" Oct 01 15:05:24 crc kubenswrapper[4869]: I1001 15:05:24.796430 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/2424fe66-6e4a-4dc6-b00e-c5a1b01e3f4a-serving-cert\") pod \"apiserver-76f77b778f-hxvsn\" (UID: \"2424fe66-6e4a-4dc6-b00e-c5a1b01e3f4a\") " pod="openshift-apiserver/apiserver-76f77b778f-hxvsn" Oct 01 15:05:24 crc kubenswrapper[4869]: I1001 15:05:24.796461 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e3343b45-e092-46ef-9e1b-6d4d55167c19-serving-cert\") pod \"apiserver-7bbb656c7d-pdvwx\" (UID: \"e3343b45-e092-46ef-9e1b-6d4d55167c19\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-pdvwx" Oct 01 15:05:24 crc kubenswrapper[4869]: I1001 15:05:24.796482 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/2424fe66-6e4a-4dc6-b00e-c5a1b01e3f4a-encryption-config\") pod \"apiserver-76f77b778f-hxvsn\" (UID: \"2424fe66-6e4a-4dc6-b00e-c5a1b01e3f4a\") " pod="openshift-apiserver/apiserver-76f77b778f-hxvsn" Oct 01 15:05:24 crc kubenswrapper[4869]: I1001 15:05:24.796507 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/8ffaf4d1-4b33-48a0-ae56-4b116924f58a-bound-sa-token\") pod \"cluster-image-registry-operator-dc59b4c8b-j7g7t\" (UID: \"8ffaf4d1-4b33-48a0-ae56-4b116924f58a\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-j7g7t" Oct 01 15:05:24 crc kubenswrapper[4869]: I1001 15:05:24.796527 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/0a95b64f-5e82-446c-8369-7304b2c6ec5d-srv-cert\") pod \"catalog-operator-68c6474976-qp4hv\" (UID: \"0a95b64f-5e82-446c-8369-7304b2c6ec5d\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-qp4hv" Oct 01 15:05:24 crc kubenswrapper[4869]: I1001 15:05:24.796551 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6j2vg\" (UniqueName: \"kubernetes.io/projected/2b6c50d9-ffec-4ecb-914e-c683a2d3b9e4-kube-api-access-6j2vg\") pod \"machine-api-operator-5694c8668f-jdrcg\" (UID: \"2b6c50d9-ffec-4ecb-914e-c683a2d3b9e4\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-jdrcg" Oct 01 15:05:24 crc kubenswrapper[4869]: I1001 15:05:24.796572 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3635d59c-a52d-465d-8402-80153cd7369b-config\") pod \"route-controller-manager-6576b87f9c-5zhxq\" (UID: \"3635d59c-a52d-465d-8402-80153cd7369b\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-5zhxq" Oct 01 15:05:24 crc kubenswrapper[4869]: I1001 15:05:24.796595 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/e3343b45-e092-46ef-9e1b-6d4d55167c19-encryption-config\") pod \"apiserver-7bbb656c7d-pdvwx\" (UID: \"e3343b45-e092-46ef-9e1b-6d4d55167c19\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-pdvwx" Oct 01 15:05:24 crc kubenswrapper[4869]: I1001 15:05:24.796616 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8367e54f-3b20-4903-8dcf-d3ae02e516ca-serving-cert\") pod \"controller-manager-879f6c89f-s8l4m\" (UID: \"8367e54f-3b20-4903-8dcf-d3ae02e516ca\") " pod="openshift-controller-manager/controller-manager-879f6c89f-s8l4m" Oct 01 15:05:24 crc kubenswrapper[4869]: I1001 15:05:24.796643 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mqbcm\" (UniqueName: \"kubernetes.io/projected/2d8f8fae-d727-4763-bb02-0a74320ba8c4-kube-api-access-mqbcm\") pod \"downloads-7954f5f757-dxcl2\" (UID: \"2d8f8fae-d727-4763-bb02-0a74320ba8c4\") " pod="openshift-console/downloads-7954f5f757-dxcl2" Oct 01 15:05:24 crc kubenswrapper[4869]: I1001 15:05:24.796669 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lds7j\" (UniqueName: \"kubernetes.io/projected/3935bd54-70b2-4d02-941b-94e00b4ec101-kube-api-access-lds7j\") pod \"openshift-apiserver-operator-796bbdcf4f-xjqf4\" (UID: \"3935bd54-70b2-4d02-941b-94e00b4ec101\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-xjqf4" Oct 01 15:05:24 crc kubenswrapper[4869]: I1001 15:05:24.796691 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/e3343b45-e092-46ef-9e1b-6d4d55167c19-trusted-ca-bundle\") pod \"apiserver-7bbb656c7d-pdvwx\" (UID: \"e3343b45-e092-46ef-9e1b-6d4d55167c19\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-pdvwx" Oct 01 15:05:24 crc kubenswrapper[4869]: I1001 15:05:24.796728 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/3935bd54-70b2-4d02-941b-94e00b4ec101-serving-cert\") pod \"openshift-apiserver-operator-796bbdcf4f-xjqf4\" (UID: \"3935bd54-70b2-4d02-941b-94e00b4ec101\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-xjqf4" Oct 01 15:05:24 crc kubenswrapper[4869]: I1001 15:05:24.796751 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9ftj5\" (UniqueName: \"kubernetes.io/projected/5604dc28-0cb8-47af-990f-737c37b7a8e1-kube-api-access-9ftj5\") pod \"cluster-samples-operator-665b6dd947-fx7tg\" (UID: \"5604dc28-0cb8-47af-990f-737c37b7a8e1\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-fx7tg" Oct 01 15:05:24 crc kubenswrapper[4869]: I1001 15:05:24.796776 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lfktn\" (UniqueName: \"kubernetes.io/projected/3157872f-8582-4335-af4d-18e4ab38d91c-kube-api-access-lfktn\") pod \"console-operator-58897d9998-vg26t\" (UID: \"3157872f-8582-4335-af4d-18e4ab38d91c\") " pod="openshift-console-operator/console-operator-58897d9998-vg26t" Oct 01 15:05:24 crc kubenswrapper[4869]: I1001 15:05:24.796786 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/2424fe66-6e4a-4dc6-b00e-c5a1b01e3f4a-audit\") pod \"apiserver-76f77b778f-hxvsn\" (UID: \"2424fe66-6e4a-4dc6-b00e-c5a1b01e3f4a\") " pod="openshift-apiserver/apiserver-76f77b778f-hxvsn" Oct 01 15:05:24 crc kubenswrapper[4869]: I1001 15:05:24.796800 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8367e54f-3b20-4903-8dcf-d3ae02e516ca-config\") pod \"controller-manager-879f6c89f-s8l4m\" (UID: \"8367e54f-3b20-4903-8dcf-d3ae02e516ca\") " pod="openshift-controller-manager/controller-manager-879f6c89f-s8l4m" Oct 01 15:05:24 crc kubenswrapper[4869]: I1001 15:05:24.796851 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xs9sm\" (UniqueName: \"kubernetes.io/projected/58c072ca-9e0a-418d-90ba-ea33213d42c7-kube-api-access-xs9sm\") pod \"olm-operator-6b444d44fb-sm2sz\" (UID: \"58c072ca-9e0a-418d-90ba-ea33213d42c7\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-sm2sz" Oct 01 15:05:24 crc kubenswrapper[4869]: I1001 15:05:24.799070 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/2424fe66-6e4a-4dc6-b00e-c5a1b01e3f4a-node-pullsecrets\") pod \"apiserver-76f77b778f-hxvsn\" (UID: \"2424fe66-6e4a-4dc6-b00e-c5a1b01e3f4a\") " pod="openshift-apiserver/apiserver-76f77b778f-hxvsn" Oct 01 15:05:24 crc kubenswrapper[4869]: I1001 15:05:24.804587 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/2424fe66-6e4a-4dc6-b00e-c5a1b01e3f4a-etcd-serving-ca\") pod \"apiserver-76f77b778f-hxvsn\" (UID: \"2424fe66-6e4a-4dc6-b00e-c5a1b01e3f4a\") " pod="openshift-apiserver/apiserver-76f77b778f-hxvsn" Oct 01 15:05:24 crc kubenswrapper[4869]: I1001 15:05:24.808960 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-hjpxg"] Oct 01 15:05:24 crc kubenswrapper[4869]: I1001 15:05:24.809649 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-t5kll"] Oct 01 15:05:24 crc kubenswrapper[4869]: I1001 15:05:24.810002 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-t5kll" Oct 01 15:05:24 crc kubenswrapper[4869]: I1001 15:05:24.810249 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-hjpxg" Oct 01 15:05:24 crc kubenswrapper[4869]: I1001 15:05:24.811028 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/78536e39-e31c-4666-86e9-4a0c35993e8e-auth-proxy-config\") pod \"machine-approver-56656f9798-kv7s4\" (UID: \"78536e39-e31c-4666-86e9-4a0c35993e8e\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-kv7s4" Oct 01 15:05:24 crc kubenswrapper[4869]: I1001 15:05:24.795496 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/2424fe66-6e4a-4dc6-b00e-c5a1b01e3f4a-audit-dir\") pod \"apiserver-76f77b778f-hxvsn\" (UID: \"2424fe66-6e4a-4dc6-b00e-c5a1b01e3f4a\") " pod="openshift-apiserver/apiserver-76f77b778f-hxvsn" Oct 01 15:05:24 crc kubenswrapper[4869]: I1001 15:05:24.812615 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3935bd54-70b2-4d02-941b-94e00b4ec101-config\") pod \"openshift-apiserver-operator-796bbdcf4f-xjqf4\" (UID: \"3935bd54-70b2-4d02-941b-94e00b4ec101\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-xjqf4" Oct 01 15:05:24 crc kubenswrapper[4869]: I1001 15:05:24.812788 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/e3343b45-e092-46ef-9e1b-6d4d55167c19-audit-dir\") pod \"apiserver-7bbb656c7d-pdvwx\" (UID: \"e3343b45-e092-46ef-9e1b-6d4d55167c19\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-pdvwx" Oct 01 15:05:24 crc kubenswrapper[4869]: I1001 15:05:24.812953 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2424fe66-6e4a-4dc6-b00e-c5a1b01e3f4a-config\") pod \"apiserver-76f77b778f-hxvsn\" (UID: \"2424fe66-6e4a-4dc6-b00e-c5a1b01e3f4a\") " pod="openshift-apiserver/apiserver-76f77b778f-hxvsn" Oct 01 15:05:24 crc kubenswrapper[4869]: I1001 15:05:24.815283 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-storage-version-migrator/migrator-59844c95c7-zb5c7"] Oct 01 15:05:24 crc kubenswrapper[4869]: I1001 15:05:24.816005 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-xpzff"] Oct 01 15:05:24 crc kubenswrapper[4869]: I1001 15:05:24.816432 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-zb5c7" Oct 01 15:05:24 crc kubenswrapper[4869]: I1001 15:05:24.816044 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-idp-0-file-data" Oct 01 15:05:24 crc kubenswrapper[4869]: I1001 15:05:24.816754 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-xpzff" Oct 01 15:05:24 crc kubenswrapper[4869]: I1001 15:05:24.817678 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/5604dc28-0cb8-47af-990f-737c37b7a8e1-samples-operator-tls\") pod \"cluster-samples-operator-665b6dd947-fx7tg\" (UID: \"5604dc28-0cb8-47af-990f-737c37b7a8e1\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-fx7tg" Oct 01 15:05:24 crc kubenswrapper[4869]: I1001 15:05:24.818073 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/2b6c50d9-ffec-4ecb-914e-c683a2d3b9e4-machine-api-operator-tls\") pod \"machine-api-operator-5694c8668f-jdrcg\" (UID: \"2b6c50d9-ffec-4ecb-914e-c683a2d3b9e4\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-jdrcg" Oct 01 15:05:24 crc kubenswrapper[4869]: I1001 15:05:24.818419 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/78536e39-e31c-4666-86e9-4a0c35993e8e-machine-approver-tls\") pod \"machine-approver-56656f9798-kv7s4\" (UID: \"78536e39-e31c-4666-86e9-4a0c35993e8e\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-kv7s4" Oct 01 15:05:24 crc kubenswrapper[4869]: I1001 15:05:24.820060 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/3935bd54-70b2-4d02-941b-94e00b4ec101-serving-cert\") pod \"openshift-apiserver-operator-796bbdcf4f-xjqf4\" (UID: \"3935bd54-70b2-4d02-941b-94e00b4ec101\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-xjqf4" Oct 01 15:05:24 crc kubenswrapper[4869]: I1001 15:05:24.821628 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8367e54f-3b20-4903-8dcf-d3ae02e516ca-serving-cert\") pod \"controller-manager-879f6c89f-s8l4m\" (UID: \"8367e54f-3b20-4903-8dcf-d3ae02e516ca\") " pod="openshift-controller-manager/controller-manager-879f6c89f-s8l4m" Oct 01 15:05:24 crc kubenswrapper[4869]: I1001 15:05:24.821757 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-etcd-operator/etcd-operator-b45778765-vjs4q"] Oct 01 15:05:24 crc kubenswrapper[4869]: I1001 15:05:24.821959 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/8b781c72-08fb-4464-9869-87595882ef0a-trusted-ca-bundle\") pod \"authentication-operator-69f744f599-jzt8h\" (UID: \"8b781c72-08fb-4464-9869-87595882ef0a\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-jzt8h" Oct 01 15:05:24 crc kubenswrapper[4869]: I1001 15:05:24.831431 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/8367e54f-3b20-4903-8dcf-d3ae02e516ca-proxy-ca-bundles\") pod \"controller-manager-879f6c89f-s8l4m\" (UID: \"8367e54f-3b20-4903-8dcf-d3ae02e516ca\") " pod="openshift-controller-manager/controller-manager-879f6c89f-s8l4m" Oct 01 15:05:24 crc kubenswrapper[4869]: I1001 15:05:24.832739 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/8b781c72-08fb-4464-9869-87595882ef0a-service-ca-bundle\") pod \"authentication-operator-69f744f599-jzt8h\" (UID: \"8b781c72-08fb-4464-9869-87595882ef0a\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-jzt8h" Oct 01 15:05:24 crc kubenswrapper[4869]: I1001 15:05:24.833017 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/8367e54f-3b20-4903-8dcf-d3ae02e516ca-client-ca\") pod \"controller-manager-879f6c89f-s8l4m\" (UID: \"8367e54f-3b20-4903-8dcf-d3ae02e516ca\") " pod="openshift-controller-manager/controller-manager-879f6c89f-s8l4m" Oct 01 15:05:24 crc kubenswrapper[4869]: I1001 15:05:24.833128 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8b781c72-08fb-4464-9869-87595882ef0a-serving-cert\") pod \"authentication-operator-69f744f599-jzt8h\" (UID: \"8b781c72-08fb-4464-9869-87595882ef0a\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-jzt8h" Oct 01 15:05:24 crc kubenswrapper[4869]: I1001 15:05:24.833468 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8b781c72-08fb-4464-9869-87595882ef0a-config\") pod \"authentication-operator-69f744f599-jzt8h\" (UID: \"8b781c72-08fb-4464-9869-87595882ef0a\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-jzt8h" Oct 01 15:05:24 crc kubenswrapper[4869]: I1001 15:05:24.833717 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"audit" Oct 01 15:05:24 crc kubenswrapper[4869]: I1001 15:05:24.836512 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8367e54f-3b20-4903-8dcf-d3ae02e516ca-config\") pod \"controller-manager-879f6c89f-s8l4m\" (UID: \"8367e54f-3b20-4903-8dcf-d3ae02e516ca\") " pod="openshift-controller-manager/controller-manager-879f6c89f-s8l4m" Oct 01 15:05:24 crc kubenswrapper[4869]: I1001 15:05:24.837020 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/e3343b45-e092-46ef-9e1b-6d4d55167c19-etcd-client\") pod \"apiserver-7bbb656c7d-pdvwx\" (UID: \"e3343b45-e092-46ef-9e1b-6d4d55167c19\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-pdvwx" Oct 01 15:05:24 crc kubenswrapper[4869]: I1001 15:05:24.837938 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-klj2b"] Oct 01 15:05:24 crc kubenswrapper[4869]: I1001 15:05:24.838526 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-controller-84d6567774-bb9hc"] Oct 01 15:05:24 crc kubenswrapper[4869]: I1001 15:05:24.843195 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/2424fe66-6e4a-4dc6-b00e-c5a1b01e3f4a-trusted-ca-bundle\") pod \"apiserver-76f77b778f-hxvsn\" (UID: \"2424fe66-6e4a-4dc6-b00e-c5a1b01e3f4a\") " pod="openshift-apiserver/apiserver-76f77b778f-hxvsn" Oct 01 15:05:24 crc kubenswrapper[4869]: I1001 15:05:24.843836 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd-operator/etcd-operator-b45778765-vjs4q" Oct 01 15:05:24 crc kubenswrapper[4869]: I1001 15:05:24.844347 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2b6c50d9-ffec-4ecb-914e-c683a2d3b9e4-config\") pod \"machine-api-operator-5694c8668f-jdrcg\" (UID: \"2b6c50d9-ffec-4ecb-914e-c683a2d3b9e4\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-jdrcg" Oct 01 15:05:24 crc kubenswrapper[4869]: I1001 15:05:24.845511 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/2424fe66-6e4a-4dc6-b00e-c5a1b01e3f4a-image-import-ca\") pod \"apiserver-76f77b778f-hxvsn\" (UID: \"2424fe66-6e4a-4dc6-b00e-c5a1b01e3f4a\") " pod="openshift-apiserver/apiserver-76f77b778f-hxvsn" Oct 01 15:05:24 crc kubenswrapper[4869]: I1001 15:05:24.847437 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-bb9hc" Oct 01 15:05:24 crc kubenswrapper[4869]: I1001 15:05:24.847830 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-klj2b" Oct 01 15:05:24 crc kubenswrapper[4869]: I1001 15:05:24.843197 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"images\" (UniqueName: \"kubernetes.io/configmap/2b6c50d9-ffec-4ecb-914e-c683a2d3b9e4-images\") pod \"machine-api-operator-5694c8668f-jdrcg\" (UID: \"2b6c50d9-ffec-4ecb-914e-c683a2d3b9e4\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-jdrcg" Oct 01 15:05:24 crc kubenswrapper[4869]: I1001 15:05:24.851563 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-provider-selection" Oct 01 15:05:24 crc kubenswrapper[4869]: I1001 15:05:24.852170 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/e3343b45-e092-46ef-9e1b-6d4d55167c19-audit-policies\") pod \"apiserver-7bbb656c7d-pdvwx\" (UID: \"e3343b45-e092-46ef-9e1b-6d4d55167c19\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-pdvwx" Oct 01 15:05:24 crc kubenswrapper[4869]: I1001 15:05:24.853805 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/2424fe66-6e4a-4dc6-b00e-c5a1b01e3f4a-etcd-client\") pod \"apiserver-76f77b778f-hxvsn\" (UID: \"2424fe66-6e4a-4dc6-b00e-c5a1b01e3f4a\") " pod="openshift-apiserver/apiserver-76f77b778f-hxvsn" Oct 01 15:05:24 crc kubenswrapper[4869]: I1001 15:05:24.854151 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/e3343b45-e092-46ef-9e1b-6d4d55167c19-trusted-ca-bundle\") pod \"apiserver-7bbb656c7d-pdvwx\" (UID: \"e3343b45-e092-46ef-9e1b-6d4d55167c19\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-pdvwx" Oct 01 15:05:24 crc kubenswrapper[4869]: I1001 15:05:24.854568 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/2424fe66-6e4a-4dc6-b00e-c5a1b01e3f4a-serving-cert\") pod \"apiserver-76f77b778f-hxvsn\" (UID: \"2424fe66-6e4a-4dc6-b00e-c5a1b01e3f4a\") " pod="openshift-apiserver/apiserver-76f77b778f-hxvsn" Oct 01 15:05:24 crc kubenswrapper[4869]: I1001 15:05:24.855957 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ingress-operator/ingress-operator-5b745b69d9-gdgpp"] Oct 01 15:05:24 crc kubenswrapper[4869]: I1001 15:05:24.857167 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/2424fe66-6e4a-4dc6-b00e-c5a1b01e3f4a-encryption-config\") pod \"apiserver-76f77b778f-hxvsn\" (UID: \"2424fe66-6e4a-4dc6-b00e-c5a1b01e3f4a\") " pod="openshift-apiserver/apiserver-76f77b778f-hxvsn" Oct 01 15:05:24 crc kubenswrapper[4869]: I1001 15:05:24.858314 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/e3343b45-e092-46ef-9e1b-6d4d55167c19-encryption-config\") pod \"apiserver-7bbb656c7d-pdvwx\" (UID: \"e3343b45-e092-46ef-9e1b-6d4d55167c19\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-pdvwx" Oct 01 15:05:24 crc kubenswrapper[4869]: I1001 15:05:24.860751 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ingress/router-default-5444994796-mdc2f"] Oct 01 15:05:24 crc kubenswrapper[4869]: I1001 15:05:24.861028 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-cliconfig" Oct 01 15:05:24 crc kubenswrapper[4869]: I1001 15:05:24.863750 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress/router-default-5444994796-mdc2f" Oct 01 15:05:24 crc kubenswrapper[4869]: I1001 15:05:24.864239 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/e3343b45-e092-46ef-9e1b-6d4d55167c19-etcd-serving-ca\") pod \"apiserver-7bbb656c7d-pdvwx\" (UID: \"e3343b45-e092-46ef-9e1b-6d4d55167c19\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-pdvwx" Oct 01 15:05:24 crc kubenswrapper[4869]: I1001 15:05:24.864706 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-operator-74547568cd-hnx9p"] Oct 01 15:05:24 crc kubenswrapper[4869]: I1001 15:05:24.866944 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-gdgpp" Oct 01 15:05:24 crc kubenswrapper[4869]: I1001 15:05:24.867810 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e3343b45-e092-46ef-9e1b-6d4d55167c19-serving-cert\") pod \"apiserver-7bbb656c7d-pdvwx\" (UID: \"e3343b45-e092-46ef-9e1b-6d4d55167c19\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-pdvwx" Oct 01 15:05:24 crc kubenswrapper[4869]: I1001 15:05:24.869153 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-wht8b"] Oct 01 15:05:24 crc kubenswrapper[4869]: I1001 15:05:24.869489 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-hnx9p" Oct 01 15:05:24 crc kubenswrapper[4869]: I1001 15:05:24.869888 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-service-ca/service-ca-9c57cc56f-qd5ft"] Oct 01 15:05:24 crc kubenswrapper[4869]: I1001 15:05:24.870276 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-service-ca-operator/service-ca-operator-777779d784-jlnv5"] Oct 01 15:05:24 crc kubenswrapper[4869]: I1001 15:05:24.870508 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-wht8b" Oct 01 15:05:24 crc kubenswrapper[4869]: I1001 15:05:24.870688 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-xjqf4"] Oct 01 15:05:24 crc kubenswrapper[4869]: I1001 15:05:24.870788 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca-operator/service-ca-operator-777779d784-jlnv5" Oct 01 15:05:24 crc kubenswrapper[4869]: I1001 15:05:24.870830 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca/service-ca-9c57cc56f-qd5ft" Oct 01 15:05:24 crc kubenswrapper[4869]: I1001 15:05:24.871254 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-server-w5ngh"] Oct 01 15:05:24 crc kubenswrapper[4869]: I1001 15:05:24.871727 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-server-w5ngh" Oct 01 15:05:24 crc kubenswrapper[4869]: I1001 15:05:24.871954 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-5zhxq"] Oct 01 15:05:24 crc kubenswrapper[4869]: I1001 15:05:24.873055 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-s8l4m"] Oct 01 15:05:24 crc kubenswrapper[4869]: I1001 15:05:24.874016 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/machine-api-operator-5694c8668f-jdrcg"] Oct 01 15:05:24 crc kubenswrapper[4869]: I1001 15:05:24.875072 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/downloads-7954f5f757-dxcl2"] Oct 01 15:05:24 crc kubenswrapper[4869]: I1001 15:05:24.876193 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver/apiserver-76f77b778f-hxvsn"] Oct 01 15:05:24 crc kubenswrapper[4869]: I1001 15:05:24.877186 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-fx7tg"] Oct 01 15:05:24 crc kubenswrapper[4869]: I1001 15:05:24.878356 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator/migrator-59844c95c7-zb5c7"] Oct 01 15:05:24 crc kubenswrapper[4869]: I1001 15:05:24.879816 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-sm2sz"] Oct 01 15:05:24 crc kubenswrapper[4869]: I1001 15:05:24.880322 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication-operator/authentication-operator-69f744f599-jzt8h"] Oct 01 15:05:24 crc kubenswrapper[4869]: I1001 15:05:24.881177 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns-operator/dns-operator-744455d44c-87546"] Oct 01 15:05:24 crc kubenswrapper[4869]: I1001 15:05:24.881804 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-service-ca" Oct 01 15:05:24 crc kubenswrapper[4869]: I1001 15:05:24.882363 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["hostpath-provisioner/csi-hostpathplugin-9pwb5"] Oct 01 15:05:24 crc kubenswrapper[4869]: I1001 15:05:24.884047 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-f9d7485db-jv4xs"] Oct 01 15:05:24 crc kubenswrapper[4869]: I1001 15:05:24.884094 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="hostpath-provisioner/csi-hostpathplugin-9pwb5" Oct 01 15:05:24 crc kubenswrapper[4869]: I1001 15:05:24.885574 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-oauth-apiserver/apiserver-7bbb656c7d-pdvwx"] Oct 01 15:05:24 crc kubenswrapper[4869]: I1001 15:05:24.886697 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/catalog-operator-68c6474976-qp4hv"] Oct 01 15:05:24 crc kubenswrapper[4869]: I1001 15:05:24.887965 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-config-operator/openshift-config-operator-7777fb866f-gqrbk"] Oct 01 15:05:24 crc kubenswrapper[4869]: I1001 15:05:24.888921 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-dns/dns-default-nk6rv"] Oct 01 15:05:24 crc kubenswrapper[4869]: I1001 15:05:24.890042 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/dns-default-nk6rv" Oct 01 15:05:24 crc kubenswrapper[4869]: I1001 15:05:24.892293 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console-operator/console-operator-58897d9998-vg26t"] Oct 01 15:05:24 crc kubenswrapper[4869]: I1001 15:05:24.893527 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-t5kll"] Oct 01 15:05:24 crc kubenswrapper[4869]: I1001 15:05:24.895214 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29322180-8fxq6"] Oct 01 15:05:24 crc kubenswrapper[4869]: I1001 15:05:24.896456 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-wht8b"] Oct 01 15:05:24 crc kubenswrapper[4869]: I1001 15:05:24.897613 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jg8jf\" (UniqueName: \"kubernetes.io/projected/2ec0b99f-cb20-410b-8142-a3d046ed6578-kube-api-access-jg8jf\") pod \"control-plane-machine-set-operator-78cbb6b69f-vkxf8\" (UID: \"2ec0b99f-cb20-410b-8142-a3d046ed6578\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-vkxf8" Oct 01 15:05:24 crc kubenswrapper[4869]: I1001 15:05:24.897649 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rc5nb\" (UniqueName: \"kubernetes.io/projected/3635d59c-a52d-465d-8402-80153cd7369b-kube-api-access-rc5nb\") pod \"route-controller-manager-6576b87f9c-5zhxq\" (UID: \"3635d59c-a52d-465d-8402-80153cd7369b\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-5zhxq" Oct 01 15:05:24 crc kubenswrapper[4869]: I1001 15:05:24.897681 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/8ffaf4d1-4b33-48a0-ae56-4b116924f58a-image-registry-operator-tls\") pod \"cluster-image-registry-operator-dc59b4c8b-j7g7t\" (UID: \"8ffaf4d1-4b33-48a0-ae56-4b116924f58a\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-j7g7t" Oct 01 15:05:24 crc kubenswrapper[4869]: I1001 15:05:24.897704 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-v428h\" (UniqueName: \"kubernetes.io/projected/0a95b64f-5e82-446c-8369-7304b2c6ec5d-kube-api-access-v428h\") pod \"catalog-operator-68c6474976-qp4hv\" (UID: \"0a95b64f-5e82-446c-8369-7304b2c6ec5d\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-qp4hv" Oct 01 15:05:24 crc kubenswrapper[4869]: I1001 15:05:24.897726 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/3157872f-8582-4335-af4d-18e4ab38d91c-trusted-ca\") pod \"console-operator-58897d9998-vg26t\" (UID: \"3157872f-8582-4335-af4d-18e4ab38d91c\") " pod="openshift-console-operator/console-operator-58897d9998-vg26t" Oct 01 15:05:24 crc kubenswrapper[4869]: I1001 15:05:24.897748 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/3635d59c-a52d-465d-8402-80153cd7369b-client-ca\") pod \"route-controller-manager-6576b87f9c-5zhxq\" (UID: \"3635d59c-a52d-465d-8402-80153cd7369b\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-5zhxq" Oct 01 15:05:24 crc kubenswrapper[4869]: I1001 15:05:24.897837 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3157872f-8582-4335-af4d-18e4ab38d91c-config\") pod \"console-operator-58897d9998-vg26t\" (UID: \"3157872f-8582-4335-af4d-18e4ab38d91c\") " pod="openshift-console-operator/console-operator-58897d9998-vg26t" Oct 01 15:05:24 crc kubenswrapper[4869]: I1001 15:05:24.897876 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/3157872f-8582-4335-af4d-18e4ab38d91c-serving-cert\") pod \"console-operator-58897d9998-vg26t\" (UID: \"3157872f-8582-4335-af4d-18e4ab38d91c\") " pod="openshift-console-operator/console-operator-58897d9998-vg26t" Oct 01 15:05:24 crc kubenswrapper[4869]: I1001 15:05:24.897905 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/e544db96-88d4-4638-b95a-de0e7c17e2d9-kube-api-access\") pod \"kube-controller-manager-operator-78b949d7b-sj2cx\" (UID: \"e544db96-88d4-4638-b95a-de0e7c17e2d9\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-sj2cx" Oct 01 15:05:24 crc kubenswrapper[4869]: I1001 15:05:24.897939 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e544db96-88d4-4638-b95a-de0e7c17e2d9-serving-cert\") pod \"kube-controller-manager-operator-78b949d7b-sj2cx\" (UID: \"e544db96-88d4-4638-b95a-de0e7c17e2d9\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-sj2cx" Oct 01 15:05:24 crc kubenswrapper[4869]: I1001 15:05:24.897981 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/58c072ca-9e0a-418d-90ba-ea33213d42c7-srv-cert\") pod \"olm-operator-6b444d44fb-sm2sz\" (UID: \"58c072ca-9e0a-418d-90ba-ea33213d42c7\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-sm2sz" Oct 01 15:05:24 crc kubenswrapper[4869]: I1001 15:05:24.898002 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/58c072ca-9e0a-418d-90ba-ea33213d42c7-profile-collector-cert\") pod \"olm-operator-6b444d44fb-sm2sz\" (UID: \"58c072ca-9e0a-418d-90ba-ea33213d42c7\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-sm2sz" Oct 01 15:05:24 crc kubenswrapper[4869]: I1001 15:05:24.898017 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e544db96-88d4-4638-b95a-de0e7c17e2d9-config\") pod \"kube-controller-manager-operator-78b949d7b-sj2cx\" (UID: \"e544db96-88d4-4638-b95a-de0e7c17e2d9\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-sj2cx" Oct 01 15:05:24 crc kubenswrapper[4869]: I1001 15:05:24.898035 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/8ffaf4d1-4b33-48a0-ae56-4b116924f58a-trusted-ca\") pod \"cluster-image-registry-operator-dc59b4c8b-j7g7t\" (UID: \"8ffaf4d1-4b33-48a0-ae56-4b116924f58a\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-j7g7t" Oct 01 15:05:24 crc kubenswrapper[4869]: I1001 15:05:24.898051 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/3635d59c-a52d-465d-8402-80153cd7369b-serving-cert\") pod \"route-controller-manager-6576b87f9c-5zhxq\" (UID: \"3635d59c-a52d-465d-8402-80153cd7369b\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-5zhxq" Oct 01 15:05:24 crc kubenswrapper[4869]: I1001 15:05:24.898094 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/8ffaf4d1-4b33-48a0-ae56-4b116924f58a-bound-sa-token\") pod \"cluster-image-registry-operator-dc59b4c8b-j7g7t\" (UID: \"8ffaf4d1-4b33-48a0-ae56-4b116924f58a\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-j7g7t" Oct 01 15:05:24 crc kubenswrapper[4869]: I1001 15:05:24.898112 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/0a95b64f-5e82-446c-8369-7304b2c6ec5d-srv-cert\") pod \"catalog-operator-68c6474976-qp4hv\" (UID: \"0a95b64f-5e82-446c-8369-7304b2c6ec5d\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-qp4hv" Oct 01 15:05:24 crc kubenswrapper[4869]: I1001 15:05:24.900070 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3635d59c-a52d-465d-8402-80153cd7369b-config\") pod \"route-controller-manager-6576b87f9c-5zhxq\" (UID: \"3635d59c-a52d-465d-8402-80153cd7369b\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-5zhxq" Oct 01 15:05:24 crc kubenswrapper[4869]: I1001 15:05:24.900104 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mqbcm\" (UniqueName: \"kubernetes.io/projected/2d8f8fae-d727-4763-bb02-0a74320ba8c4-kube-api-access-mqbcm\") pod \"downloads-7954f5f757-dxcl2\" (UID: \"2d8f8fae-d727-4763-bb02-0a74320ba8c4\") " pod="openshift-console/downloads-7954f5f757-dxcl2" Oct 01 15:05:24 crc kubenswrapper[4869]: I1001 15:05:24.900143 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lfktn\" (UniqueName: \"kubernetes.io/projected/3157872f-8582-4335-af4d-18e4ab38d91c-kube-api-access-lfktn\") pod \"console-operator-58897d9998-vg26t\" (UID: \"3157872f-8582-4335-af4d-18e4ab38d91c\") " pod="openshift-console-operator/console-operator-58897d9998-vg26t" Oct 01 15:05:24 crc kubenswrapper[4869]: I1001 15:05:24.900159 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3157872f-8582-4335-af4d-18e4ab38d91c-config\") pod \"console-operator-58897d9998-vg26t\" (UID: \"3157872f-8582-4335-af4d-18e4ab38d91c\") " pod="openshift-console-operator/console-operator-58897d9998-vg26t" Oct 01 15:05:24 crc kubenswrapper[4869]: I1001 15:05:24.900165 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xs9sm\" (UniqueName: \"kubernetes.io/projected/58c072ca-9e0a-418d-90ba-ea33213d42c7-kube-api-access-xs9sm\") pod \"olm-operator-6b444d44fb-sm2sz\" (UID: \"58c072ca-9e0a-418d-90ba-ea33213d42c7\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-sm2sz" Oct 01 15:05:24 crc kubenswrapper[4869]: I1001 15:05:24.900026 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/8ffaf4d1-4b33-48a0-ae56-4b116924f58a-trusted-ca\") pod \"cluster-image-registry-operator-dc59b4c8b-j7g7t\" (UID: \"8ffaf4d1-4b33-48a0-ae56-4b116924f58a\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-j7g7t" Oct 01 15:05:24 crc kubenswrapper[4869]: I1001 15:05:24.900033 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/3157872f-8582-4335-af4d-18e4ab38d91c-trusted-ca\") pod \"console-operator-58897d9998-vg26t\" (UID: \"3157872f-8582-4335-af4d-18e4ab38d91c\") " pod="openshift-console-operator/console-operator-58897d9998-vg26t" Oct 01 15:05:24 crc kubenswrapper[4869]: I1001 15:05:24.898876 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-4zhjs"] Oct 01 15:05:24 crc kubenswrapper[4869]: I1001 15:05:24.900392 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/2ec0b99f-cb20-410b-8142-a3d046ed6578-control-plane-machine-set-operator-tls\") pod \"control-plane-machine-set-operator-78cbb6b69f-vkxf8\" (UID: \"2ec0b99f-cb20-410b-8142-a3d046ed6578\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-vkxf8" Oct 01 15:05:24 crc kubenswrapper[4869]: I1001 15:05:24.900489 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ql7f8\" (UniqueName: \"kubernetes.io/projected/8ffaf4d1-4b33-48a0-ae56-4b116924f58a-kube-api-access-ql7f8\") pod \"cluster-image-registry-operator-dc59b4c8b-j7g7t\" (UID: \"8ffaf4d1-4b33-48a0-ae56-4b116924f58a\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-j7g7t" Oct 01 15:05:24 crc kubenswrapper[4869]: I1001 15:05:24.900555 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/0a95b64f-5e82-446c-8369-7304b2c6ec5d-profile-collector-cert\") pod \"catalog-operator-68c6474976-qp4hv\" (UID: \"0a95b64f-5e82-446c-8369-7304b2c6ec5d\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-qp4hv" Oct 01 15:05:24 crc kubenswrapper[4869]: I1001 15:05:24.899717 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/3635d59c-a52d-465d-8402-80153cd7369b-client-ca\") pod \"route-controller-manager-6576b87f9c-5zhxq\" (UID: \"3635d59c-a52d-465d-8402-80153cd7369b\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-5zhxq" Oct 01 15:05:24 crc kubenswrapper[4869]: I1001 15:05:24.900905 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca/service-ca-9c57cc56f-qd5ft"] Oct 01 15:05:24 crc kubenswrapper[4869]: I1001 15:05:24.901202 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3635d59c-a52d-465d-8402-80153cd7369b-config\") pod \"route-controller-manager-6576b87f9c-5zhxq\" (UID: \"3635d59c-a52d-465d-8402-80153cd7369b\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-5zhxq" Oct 01 15:05:24 crc kubenswrapper[4869]: I1001 15:05:24.901806 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/3157872f-8582-4335-af4d-18e4ab38d91c-serving-cert\") pod \"console-operator-58897d9998-vg26t\" (UID: \"3157872f-8582-4335-af4d-18e4ab38d91c\") " pod="openshift-console-operator/console-operator-58897d9998-vg26t" Oct 01 15:05:24 crc kubenswrapper[4869]: I1001 15:05:24.902076 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/8ffaf4d1-4b33-48a0-ae56-4b116924f58a-image-registry-operator-tls\") pod \"cluster-image-registry-operator-dc59b4c8b-j7g7t\" (UID: \"8ffaf4d1-4b33-48a0-ae56-4b116924f58a\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-j7g7t" Oct 01 15:05:24 crc kubenswrapper[4869]: I1001 15:05:24.902879 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-vkxf8"] Oct 01 15:05:24 crc kubenswrapper[4869]: I1001 15:05:24.903969 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-jds44"] Oct 01 15:05:24 crc kubenswrapper[4869]: I1001 15:05:24.904780 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-j7g7t"] Oct 01 15:05:24 crc kubenswrapper[4869]: I1001 15:05:24.906358 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-l5vnp"] Oct 01 15:05:24 crc kubenswrapper[4869]: I1001 15:05:24.906516 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/2ec0b99f-cb20-410b-8142-a3d046ed6578-control-plane-machine-set-operator-tls\") pod \"control-plane-machine-set-operator-78cbb6b69f-vkxf8\" (UID: \"2ec0b99f-cb20-410b-8142-a3d046ed6578\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-vkxf8" Oct 01 15:05:24 crc kubenswrapper[4869]: I1001 15:05:24.908299 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-apiserver/kube-apiserver-crc" Oct 01 15:05:24 crc kubenswrapper[4869]: I1001 15:05:24.912096 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-hjpxg"] Oct 01 15:05:24 crc kubenswrapper[4869]: I1001 15:05:24.912246 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/cni-sysctl-allowlist-ds-xffhm"] Oct 01 15:05:24 crc kubenswrapper[4869]: I1001 15:05:24.913438 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/cni-sysctl-allowlist-ds-xffhm" Oct 01 15:05:24 crc kubenswrapper[4869]: I1001 15:05:24.914856 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/0a95b64f-5e82-446c-8369-7304b2c6ec5d-srv-cert\") pod \"catalog-operator-68c6474976-qp4hv\" (UID: \"0a95b64f-5e82-446c-8369-7304b2c6ec5d\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-qp4hv" Oct 01 15:05:24 crc kubenswrapper[4869]: I1001 15:05:24.916274 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-8mczm"] Oct 01 15:05:24 crc kubenswrapper[4869]: I1001 15:05:24.918628 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/3635d59c-a52d-465d-8402-80153cd7369b-serving-cert\") pod \"route-controller-manager-6576b87f9c-5zhxq\" (UID: \"3635d59c-a52d-465d-8402-80153cd7369b\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-5zhxq" Oct 01 15:05:24 crc kubenswrapper[4869]: I1001 15:05:24.919653 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-operator-74547568cd-hnx9p"] Oct 01 15:05:24 crc kubenswrapper[4869]: I1001 15:05:24.921192 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-trusted-ca-bundle" Oct 01 15:05:24 crc kubenswrapper[4869]: I1001 15:05:24.921370 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"kube-root-ca.crt" Oct 01 15:05:24 crc kubenswrapper[4869]: I1001 15:05:24.924712 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ingress-canary/ingress-canary-v6xf7"] Oct 01 15:05:24 crc kubenswrapper[4869]: I1001 15:05:24.925555 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-canary/ingress-canary-v6xf7" Oct 01 15:05:24 crc kubenswrapper[4869]: I1001 15:05:24.926367 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/multus-admission-controller-857f4d67dd-tgtxf"] Oct 01 15:05:24 crc kubenswrapper[4869]: I1001 15:05:24.927388 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-klj2b"] Oct 01 15:05:24 crc kubenswrapper[4869]: I1001 15:05:24.928368 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca-operator/service-ca-operator-777779d784-jlnv5"] Oct 01 15:05:24 crc kubenswrapper[4869]: I1001 15:05:24.929380 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns/dns-default-nk6rv"] Oct 01 15:05:24 crc kubenswrapper[4869]: I1001 15:05:24.930481 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-xpzff"] Oct 01 15:05:24 crc kubenswrapper[4869]: I1001 15:05:24.931608 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-etcd-operator/etcd-operator-b45778765-vjs4q"] Oct 01 15:05:24 crc kubenswrapper[4869]: I1001 15:05:24.932555 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-sj2cx"] Oct 01 15:05:24 crc kubenswrapper[4869]: I1001 15:05:24.933622 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["hostpath-provisioner/csi-hostpathplugin-9pwb5"] Oct 01 15:05:24 crc kubenswrapper[4869]: I1001 15:05:24.936315 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-canary/ingress-canary-v6xf7"] Oct 01 15:05:24 crc kubenswrapper[4869]: I1001 15:05:24.937070 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-operator/ingress-operator-5b745b69d9-gdgpp"] Oct 01 15:05:24 crc kubenswrapper[4869]: I1001 15:05:24.938048 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-controller-84d6567774-bb9hc"] Oct 01 15:05:24 crc kubenswrapper[4869]: I1001 15:05:24.941124 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"openshift-service-ca.crt" Oct 01 15:05:24 crc kubenswrapper[4869]: I1001 15:05:24.961158 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-admission-controller-secret" Oct 01 15:05:24 crc kubenswrapper[4869]: I1001 15:05:24.980624 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"olm-operator-serviceaccount-dockercfg-rq7zk" Oct 01 15:05:25 crc kubenswrapper[4869]: I1001 15:05:25.002073 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"olm-operator-serving-cert" Oct 01 15:05:25 crc kubenswrapper[4869]: I1001 15:05:25.013117 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/58c072ca-9e0a-418d-90ba-ea33213d42c7-srv-cert\") pod \"olm-operator-6b444d44fb-sm2sz\" (UID: \"58c072ca-9e0a-418d-90ba-ea33213d42c7\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-sm2sz" Oct 01 15:05:25 crc kubenswrapper[4869]: I1001 15:05:25.020721 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-serving-cert" Oct 01 15:05:25 crc kubenswrapper[4869]: I1001 15:05:25.032304 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e544db96-88d4-4638-b95a-de0e7c17e2d9-serving-cert\") pod \"kube-controller-manager-operator-78b949d7b-sj2cx\" (UID: \"e544db96-88d4-4638-b95a-de0e7c17e2d9\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-sj2cx" Oct 01 15:05:25 crc kubenswrapper[4869]: I1001 15:05:25.040964 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"pprof-cert" Oct 01 15:05:25 crc kubenswrapper[4869]: I1001 15:05:25.056761 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/0a95b64f-5e82-446c-8369-7304b2c6ec5d-profile-collector-cert\") pod \"catalog-operator-68c6474976-qp4hv\" (UID: \"0a95b64f-5e82-446c-8369-7304b2c6ec5d\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-qp4hv" Oct 01 15:05:25 crc kubenswrapper[4869]: I1001 15:05:25.057700 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/58c072ca-9e0a-418d-90ba-ea33213d42c7-profile-collector-cert\") pod \"olm-operator-6b444d44fb-sm2sz\" (UID: \"58c072ca-9e0a-418d-90ba-ea33213d42c7\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-sm2sz" Oct 01 15:05:25 crc kubenswrapper[4869]: I1001 15:05:25.061038 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"kube-root-ca.crt" Oct 01 15:05:25 crc kubenswrapper[4869]: I1001 15:05:25.081003 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"oauth-openshift-dockercfg-znhcc" Oct 01 15:05:25 crc kubenswrapper[4869]: I1001 15:05:25.100652 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-serving-cert" Oct 01 15:05:25 crc kubenswrapper[4869]: I1001 15:05:25.121628 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-router-certs" Oct 01 15:05:25 crc kubenswrapper[4869]: I1001 15:05:25.159079 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-ocp-branding-template" Oct 01 15:05:25 crc kubenswrapper[4869]: I1001 15:05:25.167357 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-login" Oct 01 15:05:25 crc kubenswrapper[4869]: I1001 15:05:25.181878 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-dockercfg-gkqpw" Oct 01 15:05:25 crc kubenswrapper[4869]: I1001 15:05:25.201469 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-error" Oct 01 15:05:25 crc kubenswrapper[4869]: I1001 15:05:25.222097 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-session" Oct 01 15:05:25 crc kubenswrapper[4869]: I1001 15:05:25.241847 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-ac-dockercfg-9lkdf" Oct 01 15:05:25 crc kubenswrapper[4869]: I1001 15:05:25.263033 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"control-plane-machine-set-operator-dockercfg-k9rxt" Oct 01 15:05:25 crc kubenswrapper[4869]: I1001 15:05:25.281330 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-config" Oct 01 15:05:25 crc kubenswrapper[4869]: I1001 15:05:25.289630 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e544db96-88d4-4638-b95a-de0e7c17e2d9-config\") pod \"kube-controller-manager-operator-78b949d7b-sj2cx\" (UID: \"e544db96-88d4-4638-b95a-de0e7c17e2d9\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-sj2cx" Oct 01 15:05:25 crc kubenswrapper[4869]: I1001 15:05:25.321757 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Oct 01 15:05:25 crc kubenswrapper[4869]: I1001 15:05:25.341298 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Oct 01 15:05:25 crc kubenswrapper[4869]: I1001 15:05:25.400418 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6679m\" (UniqueName: \"kubernetes.io/projected/8367e54f-3b20-4903-8dcf-d3ae02e516ca-kube-api-access-6679m\") pod \"controller-manager-879f6c89f-s8l4m\" (UID: \"8367e54f-3b20-4903-8dcf-d3ae02e516ca\") " pod="openshift-controller-manager/controller-manager-879f6c89f-s8l4m" Oct 01 15:05:25 crc kubenswrapper[4869]: I1001 15:05:25.419953 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cj9hx\" (UniqueName: \"kubernetes.io/projected/78536e39-e31c-4666-86e9-4a0c35993e8e-kube-api-access-cj9hx\") pod \"machine-approver-56656f9798-kv7s4\" (UID: \"78536e39-e31c-4666-86e9-4a0c35993e8e\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-kv7s4" Oct 01 15:05:25 crc kubenswrapper[4869]: I1001 15:05:25.442682 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-r4fd7\" (UniqueName: \"kubernetes.io/projected/2424fe66-6e4a-4dc6-b00e-c5a1b01e3f4a-kube-api-access-r4fd7\") pod \"apiserver-76f77b778f-hxvsn\" (UID: \"2424fe66-6e4a-4dc6-b00e-c5a1b01e3f4a\") " pod="openshift-apiserver/apiserver-76f77b778f-hxvsn" Oct 01 15:05:25 crc kubenswrapper[4869]: I1001 15:05:25.461521 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"openshift-service-ca.crt" Oct 01 15:05:25 crc kubenswrapper[4869]: I1001 15:05:25.461863 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6j2vg\" (UniqueName: \"kubernetes.io/projected/2b6c50d9-ffec-4ecb-914e-c683a2d3b9e4-kube-api-access-6j2vg\") pod \"machine-api-operator-5694c8668f-jdrcg\" (UID: \"2b6c50d9-ffec-4ecb-914e-c683a2d3b9e4\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-jdrcg" Oct 01 15:05:25 crc kubenswrapper[4869]: I1001 15:05:25.468847 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/machine-api-operator-5694c8668f-jdrcg" Oct 01 15:05:25 crc kubenswrapper[4869]: I1001 15:05:25.481423 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"package-server-manager-serving-cert" Oct 01 15:05:25 crc kubenswrapper[4869]: I1001 15:05:25.501766 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"marketplace-operator-dockercfg-5nsgg" Oct 01 15:05:25 crc kubenswrapper[4869]: I1001 15:05:25.521619 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"marketplace-operator-metrics" Oct 01 15:05:25 crc kubenswrapper[4869]: I1001 15:05:25.551032 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"marketplace-trusted-ca" Oct 01 15:05:25 crc kubenswrapper[4869]: I1001 15:05:25.562395 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"kube-root-ca.crt" Oct 01 15:05:25 crc kubenswrapper[4869]: I1001 15:05:25.580971 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 01 15:05:25 crc kubenswrapper[4869]: I1001 15:05:25.581733 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 01 15:05:25 crc kubenswrapper[4869]: I1001 15:05:25.583717 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-rz7qp" Oct 01 15:05:25 crc kubenswrapper[4869]: I1001 15:05:25.583856 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver/apiserver-76f77b778f-hxvsn" Oct 01 15:05:25 crc kubenswrapper[4869]: I1001 15:05:25.584410 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 01 15:05:25 crc kubenswrapper[4869]: I1001 15:05:25.592783 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-s8l4m" Oct 01 15:05:25 crc kubenswrapper[4869]: I1001 15:05:25.613659 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9ftj5\" (UniqueName: \"kubernetes.io/projected/5604dc28-0cb8-47af-990f-737c37b7a8e1-kube-api-access-9ftj5\") pod \"cluster-samples-operator-665b6dd947-fx7tg\" (UID: \"5604dc28-0cb8-47af-990f-737c37b7a8e1\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-fx7tg" Oct 01 15:05:25 crc kubenswrapper[4869]: I1001 15:05:25.644409 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pnz9r\" (UniqueName: \"kubernetes.io/projected/8b781c72-08fb-4464-9869-87595882ef0a-kube-api-access-pnz9r\") pod \"authentication-operator-69f744f599-jzt8h\" (UID: \"8b781c72-08fb-4464-9869-87595882ef0a\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-jzt8h" Oct 01 15:05:25 crc kubenswrapper[4869]: I1001 15:05:25.662405 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator"/"kube-storage-version-migrator-sa-dockercfg-5xfcg" Oct 01 15:05:25 crc kubenswrapper[4869]: I1001 15:05:25.662558 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8c9zw\" (UniqueName: \"kubernetes.io/projected/e3343b45-e092-46ef-9e1b-6d4d55167c19-kube-api-access-8c9zw\") pod \"apiserver-7bbb656c7d-pdvwx\" (UID: \"e3343b45-e092-46ef-9e1b-6d4d55167c19\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-pdvwx" Oct 01 15:05:25 crc kubenswrapper[4869]: I1001 15:05:25.666095 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lds7j\" (UniqueName: \"kubernetes.io/projected/3935bd54-70b2-4d02-941b-94e00b4ec101-kube-api-access-lds7j\") pod \"openshift-apiserver-operator-796bbdcf4f-xjqf4\" (UID: \"3935bd54-70b2-4d02-941b-94e00b4ec101\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-xjqf4" Oct 01 15:05:25 crc kubenswrapper[4869]: I1001 15:05:25.675151 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-kv7s4" Oct 01 15:05:25 crc kubenswrapper[4869]: I1001 15:05:25.683534 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator"/"kube-root-ca.crt" Oct 01 15:05:25 crc kubenswrapper[4869]: I1001 15:05:25.693110 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication-operator/authentication-operator-69f744f599-jzt8h" Oct 01 15:05:25 crc kubenswrapper[4869]: W1001 15:05:25.698921 4869 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod78536e39_e31c_4666_86e9_4a0c35993e8e.slice/crio-e7b6d7b7dff29533e9c8cfbe574a4fad5e306e753511b5f11ec332fd9fdacdd9 WatchSource:0}: Error finding container e7b6d7b7dff29533e9c8cfbe574a4fad5e306e753511b5f11ec332fd9fdacdd9: Status 404 returned error can't find the container with id e7b6d7b7dff29533e9c8cfbe574a4fad5e306e753511b5f11ec332fd9fdacdd9 Oct 01 15:05:25 crc kubenswrapper[4869]: I1001 15:05:25.701715 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator"/"openshift-service-ca.crt" Oct 01 15:05:25 crc kubenswrapper[4869]: I1001 15:05:25.712557 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/machine-api-operator-5694c8668f-jdrcg"] Oct 01 15:05:25 crc kubenswrapper[4869]: I1001 15:05:25.721547 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-scheduler-operator"/"kube-root-ca.crt" Oct 01 15:05:25 crc kubenswrapper[4869]: I1001 15:05:25.741542 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-scheduler-operator"/"openshift-kube-scheduler-operator-dockercfg-qt55r" Oct 01 15:05:25 crc kubenswrapper[4869]: W1001 15:05:25.747350 4869 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod2b6c50d9_ffec_4ecb_914e_c683a2d3b9e4.slice/crio-50f5dfe6e3898a220e4d9a4b1e78083fca03b2641791bd2bba0d4429108311e0 WatchSource:0}: Error finding container 50f5dfe6e3898a220e4d9a4b1e78083fca03b2641791bd2bba0d4429108311e0: Status 404 returned error can't find the container with id 50f5dfe6e3898a220e4d9a4b1e78083fca03b2641791bd2bba0d4429108311e0 Oct 01 15:05:25 crc kubenswrapper[4869]: I1001 15:05:25.761612 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-scheduler-operator"/"kube-scheduler-operator-serving-cert" Oct 01 15:05:25 crc kubenswrapper[4869]: I1001 15:05:25.764371 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-pdvwx" Oct 01 15:05:25 crc kubenswrapper[4869]: I1001 15:05:25.777763 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-fx7tg" Oct 01 15:05:25 crc kubenswrapper[4869]: I1001 15:05:25.781096 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-scheduler-operator"/"openshift-kube-scheduler-operator-config" Oct 01 15:05:25 crc kubenswrapper[4869]: I1001 15:05:25.801776 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"openshift-service-ca.crt" Oct 01 15:05:25 crc kubenswrapper[4869]: I1001 15:05:25.824561 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-client" Oct 01 15:05:25 crc kubenswrapper[4869]: I1001 15:05:25.825494 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver/apiserver-76f77b778f-hxvsn"] Oct 01 15:05:25 crc kubenswrapper[4869]: I1001 15:05:25.841491 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-operator-dockercfg-r9srn" Oct 01 15:05:25 crc kubenswrapper[4869]: I1001 15:05:25.852116 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-xjqf4" Oct 01 15:05:25 crc kubenswrapper[4869]: I1001 15:05:25.859496 4869 request.go:700] Waited for 1.014892581s due to client-side throttling, not priority and fairness, request: GET:https://api-int.crc.testing:6443/api/v1/namespaces/openshift-etcd-operator/configmaps?fieldSelector=metadata.name%3Detcd-service-ca-bundle&limit=500&resourceVersion=0 Oct 01 15:05:25 crc kubenswrapper[4869]: I1001 15:05:25.861153 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-service-ca-bundle" Oct 01 15:05:25 crc kubenswrapper[4869]: I1001 15:05:25.878903 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-s8l4m"] Oct 01 15:05:25 crc kubenswrapper[4869]: I1001 15:05:25.879008 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-kv7s4" event={"ID":"78536e39-e31c-4666-86e9-4a0c35993e8e","Type":"ContainerStarted","Data":"e7b6d7b7dff29533e9c8cfbe574a4fad5e306e753511b5f11ec332fd9fdacdd9"} Oct 01 15:05:25 crc kubenswrapper[4869]: I1001 15:05:25.879913 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/machine-api-operator-5694c8668f-jdrcg" event={"ID":"2b6c50d9-ffec-4ecb-914e-c683a2d3b9e4","Type":"ContainerStarted","Data":"50f5dfe6e3898a220e4d9a4b1e78083fca03b2641791bd2bba0d4429108311e0"} Oct 01 15:05:25 crc kubenswrapper[4869]: I1001 15:05:25.880960 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-ca-bundle" Oct 01 15:05:25 crc kubenswrapper[4869]: W1001 15:05:25.884085 4869 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod2424fe66_6e4a_4dc6_b00e_c5a1b01e3f4a.slice/crio-443a5bdb08ed5b08bbe566231457f95cc7b84fc26db99f3ac55bb5c5d119aae4 WatchSource:0}: Error finding container 443a5bdb08ed5b08bbe566231457f95cc7b84fc26db99f3ac55bb5c5d119aae4: Status 404 returned error can't find the container with id 443a5bdb08ed5b08bbe566231457f95cc7b84fc26db99f3ac55bb5c5d119aae4 Oct 01 15:05:25 crc kubenswrapper[4869]: I1001 15:05:25.905723 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"kube-root-ca.crt" Oct 01 15:05:25 crc kubenswrapper[4869]: I1001 15:05:25.926108 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication-operator/authentication-operator-69f744f599-jzt8h"] Oct 01 15:05:25 crc kubenswrapper[4869]: I1001 15:05:25.929906 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-operator-serving-cert" Oct 01 15:05:25 crc kubenswrapper[4869]: I1001 15:05:25.940719 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"mcc-proxy-tls" Oct 01 15:05:25 crc kubenswrapper[4869]: W1001 15:05:25.944549 4869 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod8b781c72_08fb_4464_9869_87595882ef0a.slice/crio-76776cc1b8f68c1c08e25ab5d80daff1afe84b7283f35b5587f5c022398207f4 WatchSource:0}: Error finding container 76776cc1b8f68c1c08e25ab5d80daff1afe84b7283f35b5587f5c022398207f4: Status 404 returned error can't find the container with id 76776cc1b8f68c1c08e25ab5d80daff1afe84b7283f35b5587f5c022398207f4 Oct 01 15:05:25 crc kubenswrapper[4869]: I1001 15:05:25.962987 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"openshift-service-ca.crt" Oct 01 15:05:25 crc kubenswrapper[4869]: I1001 15:05:25.971630 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-oauth-apiserver/apiserver-7bbb656c7d-pdvwx"] Oct 01 15:05:25 crc kubenswrapper[4869]: I1001 15:05:25.980907 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-controller-dockercfg-c2lfx" Oct 01 15:05:25 crc kubenswrapper[4869]: W1001 15:05:25.985954 4869 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pode3343b45_e092_46ef_9e1b_6d4d55167c19.slice/crio-c80968e46bd383769616e670c34a1ae9eef84d9cbf9a683d5c4e1c7d95745c42 WatchSource:0}: Error finding container c80968e46bd383769616e670c34a1ae9eef84d9cbf9a683d5c4e1c7d95745c42: Status 404 returned error can't find the container with id c80968e46bd383769616e670c34a1ae9eef84d9cbf9a683d5c4e1c7d95745c42 Oct 01 15:05:26 crc kubenswrapper[4869]: I1001 15:05:26.000873 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator-operator"/"kube-storage-version-migrator-operator-dockercfg-2bh8d" Oct 01 15:05:26 crc kubenswrapper[4869]: I1001 15:05:26.017452 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-fx7tg"] Oct 01 15:05:26 crc kubenswrapper[4869]: I1001 15:05:26.020889 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator-operator"/"serving-cert" Oct 01 15:05:26 crc kubenswrapper[4869]: I1001 15:05:26.043826 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"config" Oct 01 15:05:26 crc kubenswrapper[4869]: I1001 15:05:26.061152 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"kube-root-ca.crt" Oct 01 15:05:26 crc kubenswrapper[4869]: I1001 15:05:26.078879 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-xjqf4"] Oct 01 15:05:26 crc kubenswrapper[4869]: I1001 15:05:26.081649 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-operator-config" Oct 01 15:05:26 crc kubenswrapper[4869]: I1001 15:05:26.100718 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"kube-root-ca.crt" Oct 01 15:05:26 crc kubenswrapper[4869]: I1001 15:05:26.121612 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-certs-default" Oct 01 15:05:26 crc kubenswrapper[4869]: I1001 15:05:26.140503 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-stats-default" Oct 01 15:05:26 crc kubenswrapper[4869]: I1001 15:05:26.160436 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"service-ca-bundle" Oct 01 15:05:26 crc kubenswrapper[4869]: I1001 15:05:26.181058 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-dockercfg-zdk86" Oct 01 15:05:26 crc kubenswrapper[4869]: I1001 15:05:26.201619 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"openshift-service-ca.crt" Oct 01 15:05:26 crc kubenswrapper[4869]: I1001 15:05:26.222030 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"openshift-service-ca.crt" Oct 01 15:05:26 crc kubenswrapper[4869]: I1001 15:05:26.241248 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-operator"/"metrics-tls" Oct 01 15:05:26 crc kubenswrapper[4869]: I1001 15:05:26.271597 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"trusted-ca" Oct 01 15:05:26 crc kubenswrapper[4869]: I1001 15:05:26.281728 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-operator"/"ingress-operator-dockercfg-7lnqk" Oct 01 15:05:26 crc kubenswrapper[4869]: I1001 15:05:26.300833 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"kube-root-ca.crt" Oct 01 15:05:26 crc kubenswrapper[4869]: I1001 15:05:26.321520 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-metrics-certs-default" Oct 01 15:05:26 crc kubenswrapper[4869]: I1001 15:05:26.341107 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"machine-config-operator-images" Oct 01 15:05:26 crc kubenswrapper[4869]: I1001 15:05:26.361243 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"mco-proxy-tls" Oct 01 15:05:26 crc kubenswrapper[4869]: I1001 15:05:26.381042 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-operator-dockercfg-98p87" Oct 01 15:05:26 crc kubenswrapper[4869]: I1001 15:05:26.401678 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"packageserver-service-cert" Oct 01 15:05:26 crc kubenswrapper[4869]: I1001 15:05:26.421019 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"kube-root-ca.crt" Oct 01 15:05:26 crc kubenswrapper[4869]: I1001 15:05:26.441974 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca-operator"/"service-ca-operator-dockercfg-rg9jl" Oct 01 15:05:26 crc kubenswrapper[4869]: I1001 15:05:26.462871 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca-operator"/"serving-cert" Oct 01 15:05:26 crc kubenswrapper[4869]: I1001 15:05:26.481515 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"openshift-service-ca.crt" Oct 01 15:05:26 crc kubenswrapper[4869]: I1001 15:05:26.501350 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca"/"signing-key" Oct 01 15:05:26 crc kubenswrapper[4869]: I1001 15:05:26.521636 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"openshift-service-ca.crt" Oct 01 15:05:26 crc kubenswrapper[4869]: I1001 15:05:26.542715 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"service-ca-operator-config" Oct 01 15:05:26 crc kubenswrapper[4869]: I1001 15:05:26.561563 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"signing-cabundle" Oct 01 15:05:26 crc kubenswrapper[4869]: I1001 15:05:26.581851 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"kube-root-ca.crt" Oct 01 15:05:26 crc kubenswrapper[4869]: I1001 15:05:26.601105 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca"/"service-ca-dockercfg-pn86c" Oct 01 15:05:26 crc kubenswrapper[4869]: I1001 15:05:26.621610 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-server-dockercfg-qx5rd" Oct 01 15:05:26 crc kubenswrapper[4869]: I1001 15:05:26.642083 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"node-bootstrapper-token" Oct 01 15:05:26 crc kubenswrapper[4869]: I1001 15:05:26.662164 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-server-tls" Oct 01 15:05:26 crc kubenswrapper[4869]: I1001 15:05:26.682068 4869 reflector.go:368] Caches populated for *v1.Secret from object-"hostpath-provisioner"/"csi-hostpath-provisioner-sa-dockercfg-qd74k" Oct 01 15:05:26 crc kubenswrapper[4869]: I1001 15:05:26.701074 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"hostpath-provisioner"/"openshift-service-ca.crt" Oct 01 15:05:26 crc kubenswrapper[4869]: I1001 15:05:26.722620 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"hostpath-provisioner"/"kube-root-ca.crt" Oct 01 15:05:26 crc kubenswrapper[4869]: I1001 15:05:26.778643 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"dns-default" Oct 01 15:05:26 crc kubenswrapper[4869]: I1001 15:05:26.778743 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"dns-default-metrics-tls" Oct 01 15:05:26 crc kubenswrapper[4869]: I1001 15:05:26.782067 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"dns-dockercfg-jwfmh" Oct 01 15:05:26 crc kubenswrapper[4869]: I1001 15:05:26.835071 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/e544db96-88d4-4638-b95a-de0e7c17e2d9-kube-api-access\") pod \"kube-controller-manager-operator-78b949d7b-sj2cx\" (UID: \"e544db96-88d4-4638-b95a-de0e7c17e2d9\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-sj2cx" Oct 01 15:05:26 crc kubenswrapper[4869]: I1001 15:05:26.841069 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jg8jf\" (UniqueName: \"kubernetes.io/projected/2ec0b99f-cb20-410b-8142-a3d046ed6578-kube-api-access-jg8jf\") pod \"control-plane-machine-set-operator-78cbb6b69f-vkxf8\" (UID: \"2ec0b99f-cb20-410b-8142-a3d046ed6578\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-vkxf8" Oct 01 15:05:26 crc kubenswrapper[4869]: I1001 15:05:26.859388 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/8ffaf4d1-4b33-48a0-ae56-4b116924f58a-bound-sa-token\") pod \"cluster-image-registry-operator-dc59b4c8b-j7g7t\" (UID: \"8ffaf4d1-4b33-48a0-ae56-4b116924f58a\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-j7g7t" Oct 01 15:05:26 crc kubenswrapper[4869]: I1001 15:05:26.880081 4869 request.go:700] Waited for 1.980385873s due to client-side throttling, not priority and fairness, request: POST:https://api-int.crc.testing:6443/api/v1/namespaces/openshift-operator-lifecycle-manager/serviceaccounts/olm-operator-serviceaccount/token Oct 01 15:05:26 crc kubenswrapper[4869]: I1001 15:05:26.887593 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/machine-api-operator-5694c8668f-jdrcg" event={"ID":"2b6c50d9-ffec-4ecb-914e-c683a2d3b9e4","Type":"ContainerStarted","Data":"305e617c4b835d03abaea5128aeba25d8a4caab3e797cd30f184b82f584db6ec"} Oct 01 15:05:26 crc kubenswrapper[4869]: I1001 15:05:26.887667 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/machine-api-operator-5694c8668f-jdrcg" event={"ID":"2b6c50d9-ffec-4ecb-914e-c683a2d3b9e4","Type":"ContainerStarted","Data":"cc105ad6db063aef471d8293a7c0f56a664a4388805cf6756e0570651c538fe6"} Oct 01 15:05:26 crc kubenswrapper[4869]: I1001 15:05:26.889480 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-s8l4m" event={"ID":"8367e54f-3b20-4903-8dcf-d3ae02e516ca","Type":"ContainerStarted","Data":"edf3d3805b9c8aa57b9b4995817df24bc06cefbe5fda9f79542d1715fb7612fb"} Oct 01 15:05:26 crc kubenswrapper[4869]: I1001 15:05:26.889522 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-s8l4m" event={"ID":"8367e54f-3b20-4903-8dcf-d3ae02e516ca","Type":"ContainerStarted","Data":"3c69d7e8347bffdee5fa5d619939f5a7774e1c535edc50326919f3567698681b"} Oct 01 15:05:26 crc kubenswrapper[4869]: I1001 15:05:26.889966 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-controller-manager/controller-manager-879f6c89f-s8l4m" Oct 01 15:05:26 crc kubenswrapper[4869]: I1001 15:05:26.898818 4869 patch_prober.go:28] interesting pod/controller-manager-879f6c89f-s8l4m container/controller-manager namespace/openshift-controller-manager: Readiness probe status=failure output="Get \"https://10.217.0.7:8443/healthz\": dial tcp 10.217.0.7:8443: connect: connection refused" start-of-body= Oct 01 15:05:26 crc kubenswrapper[4869]: I1001 15:05:26.898890 4869 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-controller-manager/controller-manager-879f6c89f-s8l4m" podUID="8367e54f-3b20-4903-8dcf-d3ae02e516ca" containerName="controller-manager" probeResult="failure" output="Get \"https://10.217.0.7:8443/healthz\": dial tcp 10.217.0.7:8443: connect: connection refused" Oct 01 15:05:26 crc kubenswrapper[4869]: I1001 15:05:26.898910 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-xjqf4" event={"ID":"3935bd54-70b2-4d02-941b-94e00b4ec101","Type":"ContainerStarted","Data":"323bab5d519ebd78d06041c75e754e9d483aa1cb7590f6f8cf79a3b443d0cdbf"} Oct 01 15:05:26 crc kubenswrapper[4869]: I1001 15:05:26.898941 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-xjqf4" event={"ID":"3935bd54-70b2-4d02-941b-94e00b4ec101","Type":"ContainerStarted","Data":"7ef314cfcef182f0f92cf7c5e7e2c827dd7d4cb504b165ca609a61e289f8566f"} Oct 01 15:05:26 crc kubenswrapper[4869]: I1001 15:05:26.900939 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-fx7tg" event={"ID":"5604dc28-0cb8-47af-990f-737c37b7a8e1","Type":"ContainerStarted","Data":"198d4f126f005cc298cca86bb46f3ebf90db0e3ee0cb96820e689e0d3e3eb43f"} Oct 01 15:05:26 crc kubenswrapper[4869]: I1001 15:05:26.900971 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-fx7tg" event={"ID":"5604dc28-0cb8-47af-990f-737c37b7a8e1","Type":"ContainerStarted","Data":"0c1244479187d58ddf1c3a2abd2f38080320c3896748797c544f2ab255557f91"} Oct 01 15:05:26 crc kubenswrapper[4869]: I1001 15:05:26.900984 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-fx7tg" event={"ID":"5604dc28-0cb8-47af-990f-737c37b7a8e1","Type":"ContainerStarted","Data":"d56ffb327ac20b8e596eb0e7fd3eaede0adcc7b7d751260f2ac686a926ad4546"} Oct 01 15:05:26 crc kubenswrapper[4869]: I1001 15:05:26.901665 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rc5nb\" (UniqueName: \"kubernetes.io/projected/3635d59c-a52d-465d-8402-80153cd7369b-kube-api-access-rc5nb\") pod \"route-controller-manager-6576b87f9c-5zhxq\" (UID: \"3635d59c-a52d-465d-8402-80153cd7369b\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-5zhxq" Oct 01 15:05:26 crc kubenswrapper[4869]: I1001 15:05:26.904088 4869 generic.go:334] "Generic (PLEG): container finished" podID="e3343b45-e092-46ef-9e1b-6d4d55167c19" containerID="ed3a136e192d991fb6d6522bdd5adea5afda1acf91621b828ef831880674e4e9" exitCode=0 Oct 01 15:05:26 crc kubenswrapper[4869]: I1001 15:05:26.904155 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-pdvwx" event={"ID":"e3343b45-e092-46ef-9e1b-6d4d55167c19","Type":"ContainerDied","Data":"ed3a136e192d991fb6d6522bdd5adea5afda1acf91621b828ef831880674e4e9"} Oct 01 15:05:26 crc kubenswrapper[4869]: I1001 15:05:26.904383 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-pdvwx" event={"ID":"e3343b45-e092-46ef-9e1b-6d4d55167c19","Type":"ContainerStarted","Data":"c80968e46bd383769616e670c34a1ae9eef84d9cbf9a683d5c4e1c7d95745c42"} Oct 01 15:05:26 crc kubenswrapper[4869]: I1001 15:05:26.906929 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-v428h\" (UniqueName: \"kubernetes.io/projected/0a95b64f-5e82-446c-8369-7304b2c6ec5d-kube-api-access-v428h\") pod \"catalog-operator-68c6474976-qp4hv\" (UID: \"0a95b64f-5e82-446c-8369-7304b2c6ec5d\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-qp4hv" Oct 01 15:05:26 crc kubenswrapper[4869]: I1001 15:05:26.908312 4869 generic.go:334] "Generic (PLEG): container finished" podID="2424fe66-6e4a-4dc6-b00e-c5a1b01e3f4a" containerID="7c87e1da6bd93ab0e1264d34057ba567638ec069cc61f312ac49eba7262aa295" exitCode=0 Oct 01 15:05:26 crc kubenswrapper[4869]: I1001 15:05:26.908449 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-hxvsn" event={"ID":"2424fe66-6e4a-4dc6-b00e-c5a1b01e3f4a","Type":"ContainerDied","Data":"7c87e1da6bd93ab0e1264d34057ba567638ec069cc61f312ac49eba7262aa295"} Oct 01 15:05:26 crc kubenswrapper[4869]: I1001 15:05:26.908479 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-hxvsn" event={"ID":"2424fe66-6e4a-4dc6-b00e-c5a1b01e3f4a","Type":"ContainerStarted","Data":"443a5bdb08ed5b08bbe566231457f95cc7b84fc26db99f3ac55bb5c5d119aae4"} Oct 01 15:05:26 crc kubenswrapper[4869]: I1001 15:05:26.918442 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication-operator/authentication-operator-69f744f599-jzt8h" event={"ID":"8b781c72-08fb-4464-9869-87595882ef0a","Type":"ContainerStarted","Data":"6aa5c8350e8ea960938281b9d3efa2c8ea57df2a4d045f79f1b06379fdd6adf7"} Oct 01 15:05:26 crc kubenswrapper[4869]: I1001 15:05:26.918487 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication-operator/authentication-operator-69f744f599-jzt8h" event={"ID":"8b781c72-08fb-4464-9869-87595882ef0a","Type":"ContainerStarted","Data":"76776cc1b8f68c1c08e25ab5d80daff1afe84b7283f35b5587f5c022398207f4"} Oct 01 15:05:26 crc kubenswrapper[4869]: I1001 15:05:26.921387 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xs9sm\" (UniqueName: \"kubernetes.io/projected/58c072ca-9e0a-418d-90ba-ea33213d42c7-kube-api-access-xs9sm\") pod \"olm-operator-6b444d44fb-sm2sz\" (UID: \"58c072ca-9e0a-418d-90ba-ea33213d42c7\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-sm2sz" Oct 01 15:05:26 crc kubenswrapper[4869]: I1001 15:05:26.931417 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-kv7s4" event={"ID":"78536e39-e31c-4666-86e9-4a0c35993e8e","Type":"ContainerStarted","Data":"b2ee12d3ae96d552730fafb22199a0671fdc701b6ad4bb483082ce6245307d97"} Oct 01 15:05:26 crc kubenswrapper[4869]: I1001 15:05:26.931480 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-kv7s4" event={"ID":"78536e39-e31c-4666-86e9-4a0c35993e8e","Type":"ContainerStarted","Data":"6fc8c8af14ac6589224881331571076c5eca4bfccf8a10fe7bbc0f829b0ab894"} Oct 01 15:05:26 crc kubenswrapper[4869]: I1001 15:05:26.951388 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lfktn\" (UniqueName: \"kubernetes.io/projected/3157872f-8582-4335-af4d-18e4ab38d91c-kube-api-access-lfktn\") pod \"console-operator-58897d9998-vg26t\" (UID: \"3157872f-8582-4335-af4d-18e4ab38d91c\") " pod="openshift-console-operator/console-operator-58897d9998-vg26t" Oct 01 15:05:26 crc kubenswrapper[4869]: I1001 15:05:26.962698 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mqbcm\" (UniqueName: \"kubernetes.io/projected/2d8f8fae-d727-4763-bb02-0a74320ba8c4-kube-api-access-mqbcm\") pod \"downloads-7954f5f757-dxcl2\" (UID: \"2d8f8fae-d727-4763-bb02-0a74320ba8c4\") " pod="openshift-console/downloads-7954f5f757-dxcl2" Oct 01 15:05:26 crc kubenswrapper[4869]: I1001 15:05:26.980980 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/downloads-7954f5f757-dxcl2" Oct 01 15:05:26 crc kubenswrapper[4869]: I1001 15:05:26.982077 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"cni-sysctl-allowlist" Oct 01 15:05:26 crc kubenswrapper[4869]: I1001 15:05:26.982339 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ql7f8\" (UniqueName: \"kubernetes.io/projected/8ffaf4d1-4b33-48a0-ae56-4b116924f58a-kube-api-access-ql7f8\") pod \"cluster-image-registry-operator-dc59b4c8b-j7g7t\" (UID: \"8ffaf4d1-4b33-48a0-ae56-4b116924f58a\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-j7g7t" Oct 01 15:05:26 crc kubenswrapper[4869]: I1001 15:05:26.987701 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-5zhxq" Oct 01 15:05:27 crc kubenswrapper[4869]: I1001 15:05:27.001700 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-canary"/"default-dockercfg-2llfx" Oct 01 15:05:27 crc kubenswrapper[4869]: I1001 15:05:27.022515 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console-operator/console-operator-58897d9998-vg26t" Oct 01 15:05:27 crc kubenswrapper[4869]: I1001 15:05:27.026605 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-canary"/"canary-serving-cert" Oct 01 15:05:27 crc kubenswrapper[4869]: I1001 15:05:27.037801 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-j7g7t" Oct 01 15:05:27 crc kubenswrapper[4869]: I1001 15:05:27.041564 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-canary"/"kube-root-ca.crt" Oct 01 15:05:27 crc kubenswrapper[4869]: I1001 15:05:27.054817 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-sj2cx" Oct 01 15:05:27 crc kubenswrapper[4869]: I1001 15:05:27.062243 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-canary"/"openshift-service-ca.crt" Oct 01 15:05:27 crc kubenswrapper[4869]: I1001 15:05:27.062250 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-sm2sz" Oct 01 15:05:27 crc kubenswrapper[4869]: I1001 15:05:27.076597 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-vkxf8" Oct 01 15:05:27 crc kubenswrapper[4869]: I1001 15:05:27.092131 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-qp4hv" Oct 01 15:05:27 crc kubenswrapper[4869]: I1001 15:05:27.124611 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-console"/"networking-console-plugin-cert" Oct 01 15:05:27 crc kubenswrapper[4869]: I1001 15:05:27.137206 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-4zhjs\" (UID: \"79b5f958-f252-4703-b785-05b0d01a6e72\") " pod="openshift-image-registry/image-registry-697d97f7c8-4zhjs" Oct 01 15:05:27 crc kubenswrapper[4869]: I1001 15:05:27.137270 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kpwj8\" (UniqueName: \"kubernetes.io/projected/2f6f6869-bd98-4c21-a184-a3b7bd5f0b5b-kube-api-access-kpwj8\") pod \"openshift-config-operator-7777fb866f-gqrbk\" (UID: \"2f6f6869-bd98-4c21-a184-a3b7bd5f0b5b\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-gqrbk" Oct 01 15:05:27 crc kubenswrapper[4869]: I1001 15:05:27.137295 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/4a5e3470-0f08-44e6-8d4c-f968950c75f2-metrics-tls\") pod \"dns-operator-744455d44c-87546\" (UID: \"4a5e3470-0f08-44e6-8d4c-f968950c75f2\") " pod="openshift-dns-operator/dns-operator-744455d44c-87546" Oct 01 15:05:27 crc kubenswrapper[4869]: I1001 15:05:27.137315 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6ee4ec07-4fc1-4d3e-9b21-e111b2950c5a-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-558db77b4-8mczm\" (UID: \"6ee4ec07-4fc1-4d3e-9b21-e111b2950c5a\") " pod="openshift-authentication/oauth-openshift-558db77b4-8mczm" Oct 01 15:05:27 crc kubenswrapper[4869]: I1001 15:05:27.137356 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/4636576a-d3da-4491-a146-a6ffe6382a06-console-serving-cert\") pod \"console-f9d7485db-jv4xs\" (UID: \"4636576a-d3da-4491-a146-a6ffe6382a06\") " pod="openshift-console/console-f9d7485db-jv4xs" Oct 01 15:05:27 crc kubenswrapper[4869]: I1001 15:05:27.137406 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/6ee4ec07-4fc1-4d3e-9b21-e111b2950c5a-v4-0-config-system-service-ca\") pod \"oauth-openshift-558db77b4-8mczm\" (UID: \"6ee4ec07-4fc1-4d3e-9b21-e111b2950c5a\") " pod="openshift-authentication/oauth-openshift-558db77b4-8mczm" Oct 01 15:05:27 crc kubenswrapper[4869]: I1001 15:05:27.137422 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/4636576a-d3da-4491-a146-a6ffe6382a06-console-oauth-config\") pod \"console-f9d7485db-jv4xs\" (UID: \"4636576a-d3da-4491-a146-a6ffe6382a06\") " pod="openshift-console/console-f9d7485db-jv4xs" Oct 01 15:05:27 crc kubenswrapper[4869]: I1001 15:05:27.137438 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/79b5f958-f252-4703-b785-05b0d01a6e72-registry-tls\") pod \"image-registry-697d97f7c8-4zhjs\" (UID: \"79b5f958-f252-4703-b785-05b0d01a6e72\") " pod="openshift-image-registry/image-registry-697d97f7c8-4zhjs" Oct 01 15:05:27 crc kubenswrapper[4869]: I1001 15:05:27.137457 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/6ee4ec07-4fc1-4d3e-9b21-e111b2950c5a-v4-0-config-system-router-certs\") pod \"oauth-openshift-558db77b4-8mczm\" (UID: \"6ee4ec07-4fc1-4d3e-9b21-e111b2950c5a\") " pod="openshift-authentication/oauth-openshift-558db77b4-8mczm" Oct 01 15:05:27 crc kubenswrapper[4869]: I1001 15:05:27.137491 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/4636576a-d3da-4491-a146-a6ffe6382a06-service-ca\") pod \"console-f9d7485db-jv4xs\" (UID: \"4636576a-d3da-4491-a146-a6ffe6382a06\") " pod="openshift-console/console-f9d7485db-jv4xs" Oct 01 15:05:27 crc kubenswrapper[4869]: I1001 15:05:27.137519 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/4636576a-d3da-4491-a146-a6ffe6382a06-trusted-ca-bundle\") pod \"console-f9d7485db-jv4xs\" (UID: \"4636576a-d3da-4491-a146-a6ffe6382a06\") " pod="openshift-console/console-f9d7485db-jv4xs" Oct 01 15:05:27 crc kubenswrapper[4869]: I1001 15:05:27.137539 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/6ee4ec07-4fc1-4d3e-9b21-e111b2950c5a-v4-0-config-user-template-login\") pod \"oauth-openshift-558db77b4-8mczm\" (UID: \"6ee4ec07-4fc1-4d3e-9b21-e111b2950c5a\") " pod="openshift-authentication/oauth-openshift-558db77b4-8mczm" Oct 01 15:05:27 crc kubenswrapper[4869]: I1001 15:05:27.137556 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9a717f28-31d3-4239-a315-97236362d5cb-config\") pod \"kube-apiserver-operator-766d6c64bb-l5vnp\" (UID: \"9a717f28-31d3-4239-a315-97236362d5cb\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-l5vnp" Oct 01 15:05:27 crc kubenswrapper[4869]: I1001 15:05:27.137602 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/6ee4ec07-4fc1-4d3e-9b21-e111b2950c5a-v4-0-config-system-session\") pod \"oauth-openshift-558db77b4-8mczm\" (UID: \"6ee4ec07-4fc1-4d3e-9b21-e111b2950c5a\") " pod="openshift-authentication/oauth-openshift-558db77b4-8mczm" Oct 01 15:05:27 crc kubenswrapper[4869]: I1001 15:05:27.137620 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/6ee4ec07-4fc1-4d3e-9b21-e111b2950c5a-v4-0-config-system-cliconfig\") pod \"oauth-openshift-558db77b4-8mczm\" (UID: \"6ee4ec07-4fc1-4d3e-9b21-e111b2950c5a\") " pod="openshift-authentication/oauth-openshift-558db77b4-8mczm" Oct 01 15:05:27 crc kubenswrapper[4869]: I1001 15:05:27.137637 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jn46p\" (UniqueName: \"kubernetes.io/projected/6ee4ec07-4fc1-4d3e-9b21-e111b2950c5a-kube-api-access-jn46p\") pod \"oauth-openshift-558db77b4-8mczm\" (UID: \"6ee4ec07-4fc1-4d3e-9b21-e111b2950c5a\") " pod="openshift-authentication/oauth-openshift-558db77b4-8mczm" Oct 01 15:05:27 crc kubenswrapper[4869]: I1001 15:05:27.137654 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/9a717f28-31d3-4239-a315-97236362d5cb-kube-api-access\") pod \"kube-apiserver-operator-766d6c64bb-l5vnp\" (UID: \"9a717f28-31d3-4239-a315-97236362d5cb\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-l5vnp" Oct 01 15:05:27 crc kubenswrapper[4869]: I1001 15:05:27.137676 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/79b5f958-f252-4703-b785-05b0d01a6e72-trusted-ca\") pod \"image-registry-697d97f7c8-4zhjs\" (UID: \"79b5f958-f252-4703-b785-05b0d01a6e72\") " pod="openshift-image-registry/image-registry-697d97f7c8-4zhjs" Oct 01 15:05:27 crc kubenswrapper[4869]: I1001 15:05:27.137719 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/6ee4ec07-4fc1-4d3e-9b21-e111b2950c5a-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-558db77b4-8mczm\" (UID: \"6ee4ec07-4fc1-4d3e-9b21-e111b2950c5a\") " pod="openshift-authentication/oauth-openshift-558db77b4-8mczm" Oct 01 15:05:27 crc kubenswrapper[4869]: I1001 15:05:27.137771 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/79b5f958-f252-4703-b785-05b0d01a6e72-installation-pull-secrets\") pod \"image-registry-697d97f7c8-4zhjs\" (UID: \"79b5f958-f252-4703-b785-05b0d01a6e72\") " pod="openshift-image-registry/image-registry-697d97f7c8-4zhjs" Oct 01 15:05:27 crc kubenswrapper[4869]: I1001 15:05:27.137791 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/f4295e29-ac36-4236-b679-4cd87ea76347-serving-cert\") pod \"openshift-controller-manager-operator-756b6f6bc6-jds44\" (UID: \"f4295e29-ac36-4236-b679-4cd87ea76347\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-jds44" Oct 01 15:05:27 crc kubenswrapper[4869]: I1001 15:05:27.137810 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/6ee4ec07-4fc1-4d3e-9b21-e111b2950c5a-audit-policies\") pod \"oauth-openshift-558db77b4-8mczm\" (UID: \"6ee4ec07-4fc1-4d3e-9b21-e111b2950c5a\") " pod="openshift-authentication/oauth-openshift-558db77b4-8mczm" Oct 01 15:05:27 crc kubenswrapper[4869]: I1001 15:05:27.137830 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/6ee4ec07-4fc1-4d3e-9b21-e111b2950c5a-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-558db77b4-8mczm\" (UID: \"6ee4ec07-4fc1-4d3e-9b21-e111b2950c5a\") " pod="openshift-authentication/oauth-openshift-558db77b4-8mczm" Oct 01 15:05:27 crc kubenswrapper[4869]: I1001 15:05:27.137850 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/79b5f958-f252-4703-b785-05b0d01a6e72-ca-trust-extracted\") pod \"image-registry-697d97f7c8-4zhjs\" (UID: \"79b5f958-f252-4703-b785-05b0d01a6e72\") " pod="openshift-image-registry/image-registry-697d97f7c8-4zhjs" Oct 01 15:05:27 crc kubenswrapper[4869]: I1001 15:05:27.137901 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/4636576a-d3da-4491-a146-a6ffe6382a06-oauth-serving-cert\") pod \"console-f9d7485db-jv4xs\" (UID: \"4636576a-d3da-4491-a146-a6ffe6382a06\") " pod="openshift-console/console-f9d7485db-jv4xs" Oct 01 15:05:27 crc kubenswrapper[4869]: I1001 15:05:27.137936 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/79b5f958-f252-4703-b785-05b0d01a6e72-bound-sa-token\") pod \"image-registry-697d97f7c8-4zhjs\" (UID: \"79b5f958-f252-4703-b785-05b0d01a6e72\") " pod="openshift-image-registry/image-registry-697d97f7c8-4zhjs" Oct 01 15:05:27 crc kubenswrapper[4869]: I1001 15:05:27.137956 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/6ee4ec07-4fc1-4d3e-9b21-e111b2950c5a-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-558db77b4-8mczm\" (UID: \"6ee4ec07-4fc1-4d3e-9b21-e111b2950c5a\") " pod="openshift-authentication/oauth-openshift-558db77b4-8mczm" Oct 01 15:05:27 crc kubenswrapper[4869]: I1001 15:05:27.137974 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cvl5m\" (UniqueName: \"kubernetes.io/projected/4636576a-d3da-4491-a146-a6ffe6382a06-kube-api-access-cvl5m\") pod \"console-f9d7485db-jv4xs\" (UID: \"4636576a-d3da-4491-a146-a6ffe6382a06\") " pod="openshift-console/console-f9d7485db-jv4xs" Oct 01 15:05:27 crc kubenswrapper[4869]: I1001 15:05:27.137993 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/4636576a-d3da-4491-a146-a6ffe6382a06-console-config\") pod \"console-f9d7485db-jv4xs\" (UID: \"4636576a-d3da-4491-a146-a6ffe6382a06\") " pod="openshift-console/console-f9d7485db-jv4xs" Oct 01 15:05:27 crc kubenswrapper[4869]: I1001 15:05:27.138008 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f4295e29-ac36-4236-b679-4cd87ea76347-config\") pod \"openshift-controller-manager-operator-756b6f6bc6-jds44\" (UID: \"f4295e29-ac36-4236-b679-4cd87ea76347\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-jds44" Oct 01 15:05:27 crc kubenswrapper[4869]: I1001 15:05:27.138023 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9a717f28-31d3-4239-a315-97236362d5cb-serving-cert\") pod \"kube-apiserver-operator-766d6c64bb-l5vnp\" (UID: \"9a717f28-31d3-4239-a315-97236362d5cb\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-l5vnp" Oct 01 15:05:27 crc kubenswrapper[4869]: I1001 15:05:27.138078 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/79b5f958-f252-4703-b785-05b0d01a6e72-registry-certificates\") pod \"image-registry-697d97f7c8-4zhjs\" (UID: \"79b5f958-f252-4703-b785-05b0d01a6e72\") " pod="openshift-image-registry/image-registry-697d97f7c8-4zhjs" Oct 01 15:05:27 crc kubenswrapper[4869]: I1001 15:05:27.138110 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/6ee4ec07-4fc1-4d3e-9b21-e111b2950c5a-audit-dir\") pod \"oauth-openshift-558db77b4-8mczm\" (UID: \"6ee4ec07-4fc1-4d3e-9b21-e111b2950c5a\") " pod="openshift-authentication/oauth-openshift-558db77b4-8mczm" Oct 01 15:05:27 crc kubenswrapper[4869]: I1001 15:05:27.138127 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/2f6f6869-bd98-4c21-a184-a3b7bd5f0b5b-serving-cert\") pod \"openshift-config-operator-7777fb866f-gqrbk\" (UID: \"2f6f6869-bd98-4c21-a184-a3b7bd5f0b5b\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-gqrbk" Oct 01 15:05:27 crc kubenswrapper[4869]: I1001 15:05:27.138142 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mv4fq\" (UniqueName: \"kubernetes.io/projected/f4295e29-ac36-4236-b679-4cd87ea76347-kube-api-access-mv4fq\") pod \"openshift-controller-manager-operator-756b6f6bc6-jds44\" (UID: \"f4295e29-ac36-4236-b679-4cd87ea76347\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-jds44" Oct 01 15:05:27 crc kubenswrapper[4869]: I1001 15:05:27.138162 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/bd217fac-bac4-44af-8942-0385d47f21d2-webhook-certs\") pod \"multus-admission-controller-857f4d67dd-tgtxf\" (UID: \"bd217fac-bac4-44af-8942-0385d47f21d2\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-tgtxf" Oct 01 15:05:27 crc kubenswrapper[4869]: I1001 15:05:27.138180 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/6ee4ec07-4fc1-4d3e-9b21-e111b2950c5a-v4-0-config-system-serving-cert\") pod \"oauth-openshift-558db77b4-8mczm\" (UID: \"6ee4ec07-4fc1-4d3e-9b21-e111b2950c5a\") " pod="openshift-authentication/oauth-openshift-558db77b4-8mczm" Oct 01 15:05:27 crc kubenswrapper[4869]: I1001 15:05:27.138207 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/6ee4ec07-4fc1-4d3e-9b21-e111b2950c5a-v4-0-config-user-template-error\") pod \"oauth-openshift-558db77b4-8mczm\" (UID: \"6ee4ec07-4fc1-4d3e-9b21-e111b2950c5a\") " pod="openshift-authentication/oauth-openshift-558db77b4-8mczm" Oct 01 15:05:27 crc kubenswrapper[4869]: I1001 15:05:27.138225 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lqgjg\" (UniqueName: \"kubernetes.io/projected/bd217fac-bac4-44af-8942-0385d47f21d2-kube-api-access-lqgjg\") pod \"multus-admission-controller-857f4d67dd-tgtxf\" (UID: \"bd217fac-bac4-44af-8942-0385d47f21d2\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-tgtxf" Oct 01 15:05:27 crc kubenswrapper[4869]: I1001 15:05:27.138240 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-f5ldd\" (UniqueName: \"kubernetes.io/projected/79b5f958-f252-4703-b785-05b0d01a6e72-kube-api-access-f5ldd\") pod \"image-registry-697d97f7c8-4zhjs\" (UID: \"79b5f958-f252-4703-b785-05b0d01a6e72\") " pod="openshift-image-registry/image-registry-697d97f7c8-4zhjs" Oct 01 15:05:27 crc kubenswrapper[4869]: I1001 15:05:27.138350 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-74hd6\" (UniqueName: \"kubernetes.io/projected/4a5e3470-0f08-44e6-8d4c-f968950c75f2-kube-api-access-74hd6\") pod \"dns-operator-744455d44c-87546\" (UID: \"4a5e3470-0f08-44e6-8d4c-f968950c75f2\") " pod="openshift-dns-operator/dns-operator-744455d44c-87546" Oct 01 15:05:27 crc kubenswrapper[4869]: I1001 15:05:27.138368 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/2f6f6869-bd98-4c21-a184-a3b7bd5f0b5b-available-featuregates\") pod \"openshift-config-operator-7777fb866f-gqrbk\" (UID: \"2f6f6869-bd98-4c21-a184-a3b7bd5f0b5b\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-gqrbk" Oct 01 15:05:27 crc kubenswrapper[4869]: E1001 15:05:27.138788 4869 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-01 15:05:27.638772599 +0000 UTC m=+36.785615715 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-4zhjs" (UID: "79b5f958-f252-4703-b785-05b0d01a6e72") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 01 15:05:27 crc kubenswrapper[4869]: I1001 15:05:27.153743 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-console"/"networking-console-plugin" Oct 01 15:05:27 crc kubenswrapper[4869]: I1001 15:05:27.166693 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-diagnostics"/"kube-root-ca.crt" Oct 01 15:05:27 crc kubenswrapper[4869]: I1001 15:05:27.182288 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"metrics-daemon-secret" Oct 01 15:05:27 crc kubenswrapper[4869]: I1001 15:05:27.208357 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"metrics-daemon-sa-dockercfg-d427c" Oct 01 15:05:27 crc kubenswrapper[4869]: I1001 15:05:27.221875 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-diagnostics"/"openshift-service-ca.crt" Oct 01 15:05:27 crc kubenswrapper[4869]: I1001 15:05:27.239919 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 01 15:05:27 crc kubenswrapper[4869]: I1001 15:05:27.240170 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6ee4ec07-4fc1-4d3e-9b21-e111b2950c5a-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-558db77b4-8mczm\" (UID: \"6ee4ec07-4fc1-4d3e-9b21-e111b2950c5a\") " pod="openshift-authentication/oauth-openshift-558db77b4-8mczm" Oct 01 15:05:27 crc kubenswrapper[4869]: I1001 15:05:27.240213 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/9338ac98-e76c-4c74-af46-b646ab637e37-signing-cabundle\") pod \"service-ca-9c57cc56f-qd5ft\" (UID: \"9338ac98-e76c-4c74-af46-b646ab637e37\") " pod="openshift-service-ca/service-ca-9c57cc56f-qd5ft" Oct 01 15:05:27 crc kubenswrapper[4869]: I1001 15:05:27.240237 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/60f75bf5-448e-4770-8eba-26c271028f50-metrics-tls\") pod \"dns-default-nk6rv\" (UID: \"60f75bf5-448e-4770-8eba-26c271028f50\") " pod="openshift-dns/dns-default-nk6rv" Oct 01 15:05:27 crc kubenswrapper[4869]: I1001 15:05:27.240324 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nntzm\" (UniqueName: \"kubernetes.io/projected/5067e051-5c99-4fe9-b4f2-e5dfa8b1c9b4-kube-api-access-nntzm\") pod \"migrator-59844c95c7-zb5c7\" (UID: \"5067e051-5c99-4fe9-b4f2-e5dfa8b1c9b4\") " pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-zb5c7" Oct 01 15:05:27 crc kubenswrapper[4869]: I1001 15:05:27.240347 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-w7hq4\" (UniqueName: \"kubernetes.io/projected/de7935aa-128a-418f-bf83-2b9aabc146ed-kube-api-access-w7hq4\") pod \"package-server-manager-789f6589d5-hjpxg\" (UID: \"de7935aa-128a-418f-bf83-2b9aabc146ed\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-hjpxg" Oct 01 15:05:27 crc kubenswrapper[4869]: I1001 15:05:27.240383 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4ac58a76-114b-4e86-924c-ad1f269b36a8-config\") pod \"kube-storage-version-migrator-operator-b67b599dd-klj2b\" (UID: \"4ac58a76-114b-4e86-924c-ad1f269b36a8\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-klj2b" Oct 01 15:05:27 crc kubenswrapper[4869]: I1001 15:05:27.240408 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/79b5f958-f252-4703-b785-05b0d01a6e72-registry-tls\") pod \"image-registry-697d97f7c8-4zhjs\" (UID: \"79b5f958-f252-4703-b785-05b0d01a6e72\") " pod="openshift-image-registry/image-registry-697d97f7c8-4zhjs" Oct 01 15:05:27 crc kubenswrapper[4869]: I1001 15:05:27.240431 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/dcf200aa-ed03-4e96-a638-568a891b276f-config\") pod \"etcd-operator-b45778765-vjs4q\" (UID: \"dcf200aa-ed03-4e96-a638-568a891b276f\") " pod="openshift-etcd-operator/etcd-operator-b45778765-vjs4q" Oct 01 15:05:27 crc kubenswrapper[4869]: I1001 15:05:27.240469 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 01 15:05:27 crc kubenswrapper[4869]: I1001 15:05:27.240496 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/4636576a-d3da-4491-a146-a6ffe6382a06-service-ca\") pod \"console-f9d7485db-jv4xs\" (UID: \"4636576a-d3da-4491-a146-a6ffe6382a06\") " pod="openshift-console/console-f9d7485db-jv4xs" Oct 01 15:05:27 crc kubenswrapper[4869]: I1001 15:05:27.240541 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/a4dfc6a1-b623-46cb-b1ff-c6b809e8deaa-stats-auth\") pod \"router-default-5444994796-mdc2f\" (UID: \"a4dfc6a1-b623-46cb-b1ff-c6b809e8deaa\") " pod="openshift-ingress/router-default-5444994796-mdc2f" Oct 01 15:05:27 crc kubenswrapper[4869]: I1001 15:05:27.240626 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registration-dir\" (UniqueName: \"kubernetes.io/host-path/45ac9df8-4d47-44e7-a1bb-bd4d0b8fd560-registration-dir\") pod \"csi-hostpathplugin-9pwb5\" (UID: \"45ac9df8-4d47-44e7-a1bb-bd4d0b8fd560\") " pod="hostpath-provisioner/csi-hostpathplugin-9pwb5" Oct 01 15:05:27 crc kubenswrapper[4869]: I1001 15:05:27.240645 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/d8bb34c7-d9d3-466b-979a-f81019e3cc15-node-bootstrap-token\") pod \"machine-config-server-w5ngh\" (UID: \"d8bb34c7-d9d3-466b-979a-f81019e3cc15\") " pod="openshift-machine-config-operator/machine-config-server-w5ngh" Oct 01 15:05:27 crc kubenswrapper[4869]: I1001 15:05:27.240665 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4jrmc\" (UniqueName: \"kubernetes.io/projected/a4dfc6a1-b623-46cb-b1ff-c6b809e8deaa-kube-api-access-4jrmc\") pod \"router-default-5444994796-mdc2f\" (UID: \"a4dfc6a1-b623-46cb-b1ff-c6b809e8deaa\") " pod="openshift-ingress/router-default-5444994796-mdc2f" Oct 01 15:05:27 crc kubenswrapper[4869]: I1001 15:05:27.240704 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jn46p\" (UniqueName: \"kubernetes.io/projected/6ee4ec07-4fc1-4d3e-9b21-e111b2950c5a-kube-api-access-jn46p\") pod \"oauth-openshift-558db77b4-8mczm\" (UID: \"6ee4ec07-4fc1-4d3e-9b21-e111b2950c5a\") " pod="openshift-authentication/oauth-openshift-558db77b4-8mczm" Oct 01 15:05:27 crc kubenswrapper[4869]: I1001 15:05:27.240724 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/9a717f28-31d3-4239-a315-97236362d5cb-kube-api-access\") pod \"kube-apiserver-operator-766d6c64bb-l5vnp\" (UID: \"9a717f28-31d3-4239-a315-97236362d5cb\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-l5vnp" Oct 01 15:05:27 crc kubenswrapper[4869]: I1001 15:05:27.240741 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/4c1dcd57-cee2-45ab-9a92-8cdd8b864f98-config-volume\") pod \"collect-profiles-29322180-8fxq6\" (UID: \"4c1dcd57-cee2-45ab-9a92-8cdd8b864f98\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29322180-8fxq6" Oct 01 15:05:27 crc kubenswrapper[4869]: I1001 15:05:27.240796 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/2d195bae-5172-4387-869b-086f215963ff-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-t5kll\" (UID: \"2d195bae-5172-4387-869b-086f215963ff\") " pod="openshift-marketplace/marketplace-operator-79b997595-t5kll" Oct 01 15:05:27 crc kubenswrapper[4869]: I1001 15:05:27.240822 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/6ee4ec07-4fc1-4d3e-9b21-e111b2950c5a-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-558db77b4-8mczm\" (UID: \"6ee4ec07-4fc1-4d3e-9b21-e111b2950c5a\") " pod="openshift-authentication/oauth-openshift-558db77b4-8mczm" Oct 01 15:05:27 crc kubenswrapper[4869]: I1001 15:05:27.240861 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/4c1dcd57-cee2-45ab-9a92-8cdd8b864f98-secret-volume\") pod \"collect-profiles-29322180-8fxq6\" (UID: \"4c1dcd57-cee2-45ab-9a92-8cdd8b864f98\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29322180-8fxq6" Oct 01 15:05:27 crc kubenswrapper[4869]: I1001 15:05:27.240882 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/62ecb2b0-55b3-4344-8237-705ef292ef63-config\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-xpzff\" (UID: \"62ecb2b0-55b3-4344-8237-705ef292ef63\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-xpzff" Oct 01 15:05:27 crc kubenswrapper[4869]: I1001 15:05:27.240904 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/79b5f958-f252-4703-b785-05b0d01a6e72-installation-pull-secrets\") pod \"image-registry-697d97f7c8-4zhjs\" (UID: \"79b5f958-f252-4703-b785-05b0d01a6e72\") " pod="openshift-image-registry/image-registry-697d97f7c8-4zhjs" Oct 01 15:05:27 crc kubenswrapper[4869]: I1001 15:05:27.240944 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/fa4dadea-9e0e-431f-b6a7-057e5cec4f9f-config\") pod \"service-ca-operator-777779d784-jlnv5\" (UID: \"fa4dadea-9e0e-431f-b6a7-057e5cec4f9f\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-jlnv5" Oct 01 15:05:27 crc kubenswrapper[4869]: I1001 15:05:27.240982 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-dir\" (UniqueName: \"kubernetes.io/host-path/45ac9df8-4d47-44e7-a1bb-bd4d0b8fd560-plugins-dir\") pod \"csi-hostpathplugin-9pwb5\" (UID: \"45ac9df8-4d47-44e7-a1bb-bd4d0b8fd560\") " pod="hostpath-provisioner/csi-hostpathplugin-9pwb5" Oct 01 15:05:27 crc kubenswrapper[4869]: I1001 15:05:27.240999 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/fa4dadea-9e0e-431f-b6a7-057e5cec4f9f-serving-cert\") pod \"service-ca-operator-777779d784-jlnv5\" (UID: \"fa4dadea-9e0e-431f-b6a7-057e5cec4f9f\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-jlnv5" Oct 01 15:05:27 crc kubenswrapper[4869]: I1001 15:05:27.241040 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zbhz5\" (UniqueName: \"kubernetes.io/projected/9338ac98-e76c-4c74-af46-b646ab637e37-kube-api-access-zbhz5\") pod \"service-ca-9c57cc56f-qd5ft\" (UID: \"9338ac98-e76c-4c74-af46-b646ab637e37\") " pod="openshift-service-ca/service-ca-9c57cc56f-qd5ft" Oct 01 15:05:27 crc kubenswrapper[4869]: I1001 15:05:27.241061 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/4636576a-d3da-4491-a146-a6ffe6382a06-oauth-serving-cert\") pod \"console-f9d7485db-jv4xs\" (UID: \"4636576a-d3da-4491-a146-a6ffe6382a06\") " pod="openshift-console/console-f9d7485db-jv4xs" Oct 01 15:05:27 crc kubenswrapper[4869]: I1001 15:05:27.241079 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/4ac58a76-114b-4e86-924c-ad1f269b36a8-serving-cert\") pod \"kube-storage-version-migrator-operator-b67b599dd-klj2b\" (UID: \"4ac58a76-114b-4e86-924c-ad1f269b36a8\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-klj2b" Oct 01 15:05:27 crc kubenswrapper[4869]: I1001 15:05:27.241132 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/79b5f958-f252-4703-b785-05b0d01a6e72-bound-sa-token\") pod \"image-registry-697d97f7c8-4zhjs\" (UID: \"79b5f958-f252-4703-b785-05b0d01a6e72\") " pod="openshift-image-registry/image-registry-697d97f7c8-4zhjs" Oct 01 15:05:27 crc kubenswrapper[4869]: I1001 15:05:27.241196 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/dcf200aa-ed03-4e96-a638-568a891b276f-etcd-service-ca\") pod \"etcd-operator-b45778765-vjs4q\" (UID: \"dcf200aa-ed03-4e96-a638-568a891b276f\") " pod="openshift-etcd-operator/etcd-operator-b45778765-vjs4q" Oct 01 15:05:27 crc kubenswrapper[4869]: I1001 15:05:27.241231 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/4636576a-d3da-4491-a146-a6ffe6382a06-console-config\") pod \"console-f9d7485db-jv4xs\" (UID: \"4636576a-d3da-4491-a146-a6ffe6382a06\") " pod="openshift-console/console-f9d7485db-jv4xs" Oct 01 15:05:27 crc kubenswrapper[4869]: I1001 15:05:27.241293 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f4295e29-ac36-4236-b679-4cd87ea76347-config\") pod \"openshift-controller-manager-operator-756b6f6bc6-jds44\" (UID: \"f4295e29-ac36-4236-b679-4cd87ea76347\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-jds44" Oct 01 15:05:27 crc kubenswrapper[4869]: I1001 15:05:27.241310 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9a717f28-31d3-4239-a315-97236362d5cb-serving-cert\") pod \"kube-apiserver-operator-766d6c64bb-l5vnp\" (UID: \"9a717f28-31d3-4239-a315-97236362d5cb\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-l5vnp" Oct 01 15:05:27 crc kubenswrapper[4869]: I1001 15:05:27.241329 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"socket-dir\" (UniqueName: \"kubernetes.io/host-path/45ac9df8-4d47-44e7-a1bb-bd4d0b8fd560-socket-dir\") pod \"csi-hostpathplugin-9pwb5\" (UID: \"45ac9df8-4d47-44e7-a1bb-bd4d0b8fd560\") " pod="hostpath-provisioner/csi-hostpathplugin-9pwb5" Oct 01 15:05:27 crc kubenswrapper[4869]: I1001 15:05:27.241371 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vpnfr\" (UniqueName: \"kubernetes.io/projected/4c1dcd57-cee2-45ab-9a92-8cdd8b864f98-kube-api-access-vpnfr\") pod \"collect-profiles-29322180-8fxq6\" (UID: \"4c1dcd57-cee2-45ab-9a92-8cdd8b864f98\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29322180-8fxq6" Oct 01 15:05:27 crc kubenswrapper[4869]: I1001 15:05:27.241408 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/3968ac06-9cc3-4dc8-827f-aff3261f131f-proxy-tls\") pod \"machine-config-controller-84d6567774-bb9hc\" (UID: \"3968ac06-9cc3-4dc8-827f-aff3261f131f\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-bb9hc" Oct 01 15:05:27 crc kubenswrapper[4869]: I1001 15:05:27.241451 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/906a4a45-c846-4d80-a3ec-10070aae888c-metrics-tls\") pod \"ingress-operator-5b745b69d9-gdgpp\" (UID: \"906a4a45-c846-4d80-a3ec-10070aae888c\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-gdgpp" Oct 01 15:05:27 crc kubenswrapper[4869]: I1001 15:05:27.241483 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mv4fq\" (UniqueName: \"kubernetes.io/projected/f4295e29-ac36-4236-b679-4cd87ea76347-kube-api-access-mv4fq\") pod \"openshift-controller-manager-operator-756b6f6bc6-jds44\" (UID: \"f4295e29-ac36-4236-b679-4cd87ea76347\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-jds44" Oct 01 15:05:27 crc kubenswrapper[4869]: I1001 15:05:27.241502 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9m2kq\" (UniqueName: \"kubernetes.io/projected/dcf200aa-ed03-4e96-a638-568a891b276f-kube-api-access-9m2kq\") pod \"etcd-operator-b45778765-vjs4q\" (UID: \"dcf200aa-ed03-4e96-a638-568a891b276f\") " pod="openshift-etcd-operator/etcd-operator-b45778765-vjs4q" Oct 01 15:05:27 crc kubenswrapper[4869]: I1001 15:05:27.241525 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/a4dfc6a1-b623-46cb-b1ff-c6b809e8deaa-service-ca-bundle\") pod \"router-default-5444994796-mdc2f\" (UID: \"a4dfc6a1-b623-46cb-b1ff-c6b809e8deaa\") " pod="openshift-ingress/router-default-5444994796-mdc2f" Oct 01 15:05:27 crc kubenswrapper[4869]: I1001 15:05:27.241547 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/2f6f6869-bd98-4c21-a184-a3b7bd5f0b5b-serving-cert\") pod \"openshift-config-operator-7777fb866f-gqrbk\" (UID: \"2f6f6869-bd98-4c21-a184-a3b7bd5f0b5b\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-gqrbk" Oct 01 15:05:27 crc kubenswrapper[4869]: I1001 15:05:27.241567 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/2d195bae-5172-4387-869b-086f215963ff-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-t5kll\" (UID: \"2d195bae-5172-4387-869b-086f215963ff\") " pod="openshift-marketplace/marketplace-operator-79b997595-t5kll" Oct 01 15:05:27 crc kubenswrapper[4869]: I1001 15:05:27.241592 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ready\" (UniqueName: \"kubernetes.io/empty-dir/9ff567e5-6f6b-4b25-a8a0-3aa792f4291d-ready\") pod \"cni-sysctl-allowlist-ds-xffhm\" (UID: \"9ff567e5-6f6b-4b25-a8a0-3aa792f4291d\") " pod="openshift-multus/cni-sysctl-allowlist-ds-xffhm" Oct 01 15:05:27 crc kubenswrapper[4869]: I1001 15:05:27.241608 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/dcf200aa-ed03-4e96-a638-568a891b276f-serving-cert\") pod \"etcd-operator-b45778765-vjs4q\" (UID: \"dcf200aa-ed03-4e96-a638-568a891b276f\") " pod="openshift-etcd-operator/etcd-operator-b45778765-vjs4q" Oct 01 15:05:27 crc kubenswrapper[4869]: I1001 15:05:27.241637 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lqgjg\" (UniqueName: \"kubernetes.io/projected/bd217fac-bac4-44af-8942-0385d47f21d2-kube-api-access-lqgjg\") pod \"multus-admission-controller-857f4d67dd-tgtxf\" (UID: \"bd217fac-bac4-44af-8942-0385d47f21d2\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-tgtxf" Oct 01 15:05:27 crc kubenswrapper[4869]: I1001 15:05:27.241660 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/6ee4ec07-4fc1-4d3e-9b21-e111b2950c5a-v4-0-config-user-template-error\") pod \"oauth-openshift-558db77b4-8mczm\" (UID: \"6ee4ec07-4fc1-4d3e-9b21-e111b2950c5a\") " pod="openshift-authentication/oauth-openshift-558db77b4-8mczm" Oct 01 15:05:27 crc kubenswrapper[4869]: I1001 15:05:27.241718 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/328e4a8e-5321-43fa-8b1a-6ff72a374b6d-tmpfs\") pod \"packageserver-d55dfcdfc-wht8b\" (UID: \"328e4a8e-5321-43fa-8b1a-6ff72a374b6d\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-wht8b" Oct 01 15:05:27 crc kubenswrapper[4869]: I1001 15:05:27.241743 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-f2n6f\" (UniqueName: \"kubernetes.io/projected/906a4a45-c846-4d80-a3ec-10070aae888c-kube-api-access-f2n6f\") pod \"ingress-operator-5b745b69d9-gdgpp\" (UID: \"906a4a45-c846-4d80-a3ec-10070aae888c\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-gdgpp" Oct 01 15:05:27 crc kubenswrapper[4869]: I1001 15:05:27.241798 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gj95g\" (UniqueName: \"kubernetes.io/projected/42cd6256-ae7b-46c8-a910-0d8723fa9351-kube-api-access-gj95g\") pod \"machine-config-operator-74547568cd-hnx9p\" (UID: \"42cd6256-ae7b-46c8-a910-0d8723fa9351\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-hnx9p" Oct 01 15:05:27 crc kubenswrapper[4869]: I1001 15:05:27.241830 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-74hd6\" (UniqueName: \"kubernetes.io/projected/4a5e3470-0f08-44e6-8d4c-f968950c75f2-kube-api-access-74hd6\") pod \"dns-operator-744455d44c-87546\" (UID: \"4a5e3470-0f08-44e6-8d4c-f968950c75f2\") " pod="openshift-dns-operator/dns-operator-744455d44c-87546" Oct 01 15:05:27 crc kubenswrapper[4869]: I1001 15:05:27.241871 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"csi-data-dir\" (UniqueName: \"kubernetes.io/host-path/45ac9df8-4d47-44e7-a1bb-bd4d0b8fd560-csi-data-dir\") pod \"csi-hostpathplugin-9pwb5\" (UID: \"45ac9df8-4d47-44e7-a1bb-bd4d0b8fd560\") " pod="hostpath-provisioner/csi-hostpathplugin-9pwb5" Oct 01 15:05:27 crc kubenswrapper[4869]: I1001 15:05:27.241891 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/2f6f6869-bd98-4c21-a184-a3b7bd5f0b5b-available-featuregates\") pod \"openshift-config-operator-7777fb866f-gqrbk\" (UID: \"2f6f6869-bd98-4c21-a184-a3b7bd5f0b5b\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-gqrbk" Oct 01 15:05:27 crc kubenswrapper[4869]: I1001 15:05:27.241910 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qnknb\" (UniqueName: \"kubernetes.io/projected/328e4a8e-5321-43fa-8b1a-6ff72a374b6d-kube-api-access-qnknb\") pod \"packageserver-d55dfcdfc-wht8b\" (UID: \"328e4a8e-5321-43fa-8b1a-6ff72a374b6d\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-wht8b" Oct 01 15:05:27 crc kubenswrapper[4869]: I1001 15:05:27.241951 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/906a4a45-c846-4d80-a3ec-10070aae888c-trusted-ca\") pod \"ingress-operator-5b745b69d9-gdgpp\" (UID: \"906a4a45-c846-4d80-a3ec-10070aae888c\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-gdgpp" Oct 01 15:05:27 crc kubenswrapper[4869]: I1001 15:05:27.241971 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-89r4r\" (UniqueName: \"kubernetes.io/projected/3968ac06-9cc3-4dc8-827f-aff3261f131f-kube-api-access-89r4r\") pod \"machine-config-controller-84d6567774-bb9hc\" (UID: \"3968ac06-9cc3-4dc8-827f-aff3261f131f\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-bb9hc" Oct 01 15:05:27 crc kubenswrapper[4869]: I1001 15:05:27.241987 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/62ecb2b0-55b3-4344-8237-705ef292ef63-serving-cert\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-xpzff\" (UID: \"62ecb2b0-55b3-4344-8237-705ef292ef63\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-xpzff" Oct 01 15:05:27 crc kubenswrapper[4869]: I1001 15:05:27.242037 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/4636576a-d3da-4491-a146-a6ffe6382a06-console-serving-cert\") pod \"console-f9d7485db-jv4xs\" (UID: \"4636576a-d3da-4491-a146-a6ffe6382a06\") " pod="openshift-console/console-f9d7485db-jv4xs" Oct 01 15:05:27 crc kubenswrapper[4869]: I1001 15:05:27.242055 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/6ee4ec07-4fc1-4d3e-9b21-e111b2950c5a-v4-0-config-system-service-ca\") pod \"oauth-openshift-558db77b4-8mczm\" (UID: \"6ee4ec07-4fc1-4d3e-9b21-e111b2950c5a\") " pod="openshift-authentication/oauth-openshift-558db77b4-8mczm" Oct 01 15:05:27 crc kubenswrapper[4869]: I1001 15:05:27.242097 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/42cd6256-ae7b-46c8-a910-0d8723fa9351-auth-proxy-config\") pod \"machine-config-operator-74547568cd-hnx9p\" (UID: \"42cd6256-ae7b-46c8-a910-0d8723fa9351\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-hnx9p" Oct 01 15:05:27 crc kubenswrapper[4869]: I1001 15:05:27.242120 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/de7935aa-128a-418f-bf83-2b9aabc146ed-package-server-manager-serving-cert\") pod \"package-server-manager-789f6589d5-hjpxg\" (UID: \"de7935aa-128a-418f-bf83-2b9aabc146ed\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-hjpxg" Oct 01 15:05:27 crc kubenswrapper[4869]: I1001 15:05:27.242146 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/4636576a-d3da-4491-a146-a6ffe6382a06-console-oauth-config\") pod \"console-f9d7485db-jv4xs\" (UID: \"4636576a-d3da-4491-a146-a6ffe6382a06\") " pod="openshift-console/console-f9d7485db-jv4xs" Oct 01 15:05:27 crc kubenswrapper[4869]: I1001 15:05:27.242193 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tuning-conf-dir\" (UniqueName: \"kubernetes.io/host-path/9ff567e5-6f6b-4b25-a8a0-3aa792f4291d-tuning-conf-dir\") pod \"cni-sysctl-allowlist-ds-xffhm\" (UID: \"9ff567e5-6f6b-4b25-a8a0-3aa792f4291d\") " pod="openshift-multus/cni-sysctl-allowlist-ds-xffhm" Oct 01 15:05:27 crc kubenswrapper[4869]: I1001 15:05:27.242217 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/dcf200aa-ed03-4e96-a638-568a891b276f-etcd-client\") pod \"etcd-operator-b45778765-vjs4q\" (UID: \"dcf200aa-ed03-4e96-a638-568a891b276f\") " pod="openshift-etcd-operator/etcd-operator-b45778765-vjs4q" Oct 01 15:05:27 crc kubenswrapper[4869]: I1001 15:05:27.242297 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/6ee4ec07-4fc1-4d3e-9b21-e111b2950c5a-v4-0-config-system-router-certs\") pod \"oauth-openshift-558db77b4-8mczm\" (UID: \"6ee4ec07-4fc1-4d3e-9b21-e111b2950c5a\") " pod="openshift-authentication/oauth-openshift-558db77b4-8mczm" Oct 01 15:05:27 crc kubenswrapper[4869]: I1001 15:05:27.242338 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/secret/d8bb34c7-d9d3-466b-979a-f81019e3cc15-certs\") pod \"machine-config-server-w5ngh\" (UID: \"d8bb34c7-d9d3-466b-979a-f81019e3cc15\") " pod="openshift-machine-config-operator/machine-config-server-w5ngh" Oct 01 15:05:27 crc kubenswrapper[4869]: I1001 15:05:27.242371 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jnvcn\" (UniqueName: \"kubernetes.io/projected/4ac58a76-114b-4e86-924c-ad1f269b36a8-kube-api-access-jnvcn\") pod \"kube-storage-version-migrator-operator-b67b599dd-klj2b\" (UID: \"4ac58a76-114b-4e86-924c-ad1f269b36a8\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-klj2b" Oct 01 15:05:27 crc kubenswrapper[4869]: I1001 15:05:27.242430 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9a717f28-31d3-4239-a315-97236362d5cb-config\") pod \"kube-apiserver-operator-766d6c64bb-l5vnp\" (UID: \"9a717f28-31d3-4239-a315-97236362d5cb\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-l5vnp" Oct 01 15:05:27 crc kubenswrapper[4869]: I1001 15:05:27.242468 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ht69c\" (UniqueName: \"kubernetes.io/projected/45ac9df8-4d47-44e7-a1bb-bd4d0b8fd560-kube-api-access-ht69c\") pod \"csi-hostpathplugin-9pwb5\" (UID: \"45ac9df8-4d47-44e7-a1bb-bd4d0b8fd560\") " pod="hostpath-provisioner/csi-hostpathplugin-9pwb5" Oct 01 15:05:27 crc kubenswrapper[4869]: I1001 15:05:27.242508 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9qxfd\" (UniqueName: \"kubernetes.io/projected/60f75bf5-448e-4770-8eba-26c271028f50-kube-api-access-9qxfd\") pod \"dns-default-nk6rv\" (UID: \"60f75bf5-448e-4770-8eba-26c271028f50\") " pod="openshift-dns/dns-default-nk6rv" Oct 01 15:05:27 crc kubenswrapper[4869]: I1001 15:05:27.242526 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/4636576a-d3da-4491-a146-a6ffe6382a06-trusted-ca-bundle\") pod \"console-f9d7485db-jv4xs\" (UID: \"4636576a-d3da-4491-a146-a6ffe6382a06\") " pod="openshift-console/console-f9d7485db-jv4xs" Oct 01 15:05:27 crc kubenswrapper[4869]: I1001 15:05:27.242545 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/6ee4ec07-4fc1-4d3e-9b21-e111b2950c5a-v4-0-config-user-template-login\") pod \"oauth-openshift-558db77b4-8mczm\" (UID: \"6ee4ec07-4fc1-4d3e-9b21-e111b2950c5a\") " pod="openshift-authentication/oauth-openshift-558db77b4-8mczm" Oct 01 15:05:27 crc kubenswrapper[4869]: I1001 15:05:27.242584 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/9ff567e5-6f6b-4b25-a8a0-3aa792f4291d-cni-sysctl-allowlist\") pod \"cni-sysctl-allowlist-ds-xffhm\" (UID: \"9ff567e5-6f6b-4b25-a8a0-3aa792f4291d\") " pod="openshift-multus/cni-sysctl-allowlist-ds-xffhm" Oct 01 15:05:27 crc kubenswrapper[4869]: I1001 15:05:27.242601 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"mountpoint-dir\" (UniqueName: \"kubernetes.io/host-path/45ac9df8-4d47-44e7-a1bb-bd4d0b8fd560-mountpoint-dir\") pod \"csi-hostpathplugin-9pwb5\" (UID: \"45ac9df8-4d47-44e7-a1bb-bd4d0b8fd560\") " pod="hostpath-provisioner/csi-hostpathplugin-9pwb5" Oct 01 15:05:27 crc kubenswrapper[4869]: I1001 15:05:27.242673 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/6ee4ec07-4fc1-4d3e-9b21-e111b2950c5a-v4-0-config-system-session\") pod \"oauth-openshift-558db77b4-8mczm\" (UID: \"6ee4ec07-4fc1-4d3e-9b21-e111b2950c5a\") " pod="openshift-authentication/oauth-openshift-558db77b4-8mczm" Oct 01 15:05:27 crc kubenswrapper[4869]: I1001 15:05:27.242695 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/79b5f958-f252-4703-b785-05b0d01a6e72-trusted-ca\") pod \"image-registry-697d97f7c8-4zhjs\" (UID: \"79b5f958-f252-4703-b785-05b0d01a6e72\") " pod="openshift-image-registry/image-registry-697d97f7c8-4zhjs" Oct 01 15:05:27 crc kubenswrapper[4869]: I1001 15:05:27.242734 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/6ee4ec07-4fc1-4d3e-9b21-e111b2950c5a-v4-0-config-system-cliconfig\") pod \"oauth-openshift-558db77b4-8mczm\" (UID: \"6ee4ec07-4fc1-4d3e-9b21-e111b2950c5a\") " pod="openshift-authentication/oauth-openshift-558db77b4-8mczm" Oct 01 15:05:27 crc kubenswrapper[4869]: I1001 15:05:27.242758 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vzrkc\" (UniqueName: \"kubernetes.io/projected/fa4dadea-9e0e-431f-b6a7-057e5cec4f9f-kube-api-access-vzrkc\") pod \"service-ca-operator-777779d784-jlnv5\" (UID: \"fa4dadea-9e0e-431f-b6a7-057e5cec4f9f\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-jlnv5" Oct 01 15:05:27 crc kubenswrapper[4869]: I1001 15:05:27.245126 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/6ee4ec07-4fc1-4d3e-9b21-e111b2950c5a-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-558db77b4-8mczm\" (UID: \"6ee4ec07-4fc1-4d3e-9b21-e111b2950c5a\") " pod="openshift-authentication/oauth-openshift-558db77b4-8mczm" Oct 01 15:05:27 crc kubenswrapper[4869]: I1001 15:05:27.245156 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sk5qg\" (UniqueName: \"kubernetes.io/projected/2d195bae-5172-4387-869b-086f215963ff-kube-api-access-sk5qg\") pod \"marketplace-operator-79b997595-t5kll\" (UID: \"2d195bae-5172-4387-869b-086f215963ff\") " pod="openshift-marketplace/marketplace-operator-79b997595-t5kll" Oct 01 15:05:27 crc kubenswrapper[4869]: I1001 15:05:27.245183 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/f4295e29-ac36-4236-b679-4cd87ea76347-serving-cert\") pod \"openshift-controller-manager-operator-756b6f6bc6-jds44\" (UID: \"f4295e29-ac36-4236-b679-4cd87ea76347\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-jds44" Oct 01 15:05:27 crc kubenswrapper[4869]: I1001 15:05:27.245207 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/6ee4ec07-4fc1-4d3e-9b21-e111b2950c5a-audit-policies\") pod \"oauth-openshift-558db77b4-8mczm\" (UID: \"6ee4ec07-4fc1-4d3e-9b21-e111b2950c5a\") " pod="openshift-authentication/oauth-openshift-558db77b4-8mczm" Oct 01 15:05:27 crc kubenswrapper[4869]: I1001 15:05:27.245237 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/328e4a8e-5321-43fa-8b1a-6ff72a374b6d-apiservice-cert\") pod \"packageserver-d55dfcdfc-wht8b\" (UID: \"328e4a8e-5321-43fa-8b1a-6ff72a374b6d\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-wht8b" Oct 01 15:05:27 crc kubenswrapper[4869]: I1001 15:05:27.245293 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/42cd6256-ae7b-46c8-a910-0d8723fa9351-proxy-tls\") pod \"machine-config-operator-74547568cd-hnx9p\" (UID: \"42cd6256-ae7b-46c8-a910-0d8723fa9351\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-hnx9p" Oct 01 15:05:27 crc kubenswrapper[4869]: I1001 15:05:27.245332 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/79b5f958-f252-4703-b785-05b0d01a6e72-ca-trust-extracted\") pod \"image-registry-697d97f7c8-4zhjs\" (UID: \"79b5f958-f252-4703-b785-05b0d01a6e72\") " pod="openshift-image-registry/image-registry-697d97f7c8-4zhjs" Oct 01 15:05:27 crc kubenswrapper[4869]: I1001 15:05:27.245361 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/60f75bf5-448e-4770-8eba-26c271028f50-config-volume\") pod \"dns-default-nk6rv\" (UID: \"60f75bf5-448e-4770-8eba-26c271028f50\") " pod="openshift-dns/dns-default-nk6rv" Oct 01 15:05:27 crc kubenswrapper[4869]: I1001 15:05:27.245388 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 01 15:05:27 crc kubenswrapper[4869]: I1001 15:05:27.245412 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/6ee4ec07-4fc1-4d3e-9b21-e111b2950c5a-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-558db77b4-8mczm\" (UID: \"6ee4ec07-4fc1-4d3e-9b21-e111b2950c5a\") " pod="openshift-authentication/oauth-openshift-558db77b4-8mczm" Oct 01 15:05:27 crc kubenswrapper[4869]: I1001 15:05:27.245431 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gltw6\" (UniqueName: \"kubernetes.io/projected/9ff567e5-6f6b-4b25-a8a0-3aa792f4291d-kube-api-access-gltw6\") pod \"cni-sysctl-allowlist-ds-xffhm\" (UID: \"9ff567e5-6f6b-4b25-a8a0-3aa792f4291d\") " pod="openshift-multus/cni-sysctl-allowlist-ds-xffhm" Oct 01 15:05:27 crc kubenswrapper[4869]: I1001 15:05:27.245450 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cvl5m\" (UniqueName: \"kubernetes.io/projected/4636576a-d3da-4491-a146-a6ffe6382a06-kube-api-access-cvl5m\") pod \"console-f9d7485db-jv4xs\" (UID: \"4636576a-d3da-4491-a146-a6ffe6382a06\") " pod="openshift-console/console-f9d7485db-jv4xs" Oct 01 15:05:27 crc kubenswrapper[4869]: I1001 15:05:27.245492 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/3968ac06-9cc3-4dc8-827f-aff3261f131f-mcc-auth-proxy-config\") pod \"machine-config-controller-84d6567774-bb9hc\" (UID: \"3968ac06-9cc3-4dc8-827f-aff3261f131f\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-bb9hc" Oct 01 15:05:27 crc kubenswrapper[4869]: I1001 15:05:27.245526 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/79b5f958-f252-4703-b785-05b0d01a6e72-registry-certificates\") pod \"image-registry-697d97f7c8-4zhjs\" (UID: \"79b5f958-f252-4703-b785-05b0d01a6e72\") " pod="openshift-image-registry/image-registry-697d97f7c8-4zhjs" Oct 01 15:05:27 crc kubenswrapper[4869]: I1001 15:05:27.245556 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/6ee4ec07-4fc1-4d3e-9b21-e111b2950c5a-audit-dir\") pod \"oauth-openshift-558db77b4-8mczm\" (UID: \"6ee4ec07-4fc1-4d3e-9b21-e111b2950c5a\") " pod="openshift-authentication/oauth-openshift-558db77b4-8mczm" Oct 01 15:05:27 crc kubenswrapper[4869]: I1001 15:05:27.245572 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/a4dfc6a1-b623-46cb-b1ff-c6b809e8deaa-metrics-certs\") pod \"router-default-5444994796-mdc2f\" (UID: \"a4dfc6a1-b623-46cb-b1ff-c6b809e8deaa\") " pod="openshift-ingress/router-default-5444994796-mdc2f" Oct 01 15:05:27 crc kubenswrapper[4869]: I1001 15:05:27.245591 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/328e4a8e-5321-43fa-8b1a-6ff72a374b6d-webhook-cert\") pod \"packageserver-d55dfcdfc-wht8b\" (UID: \"328e4a8e-5321-43fa-8b1a-6ff72a374b6d\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-wht8b" Oct 01 15:05:27 crc kubenswrapper[4869]: I1001 15:05:27.245621 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/bd217fac-bac4-44af-8942-0385d47f21d2-webhook-certs\") pod \"multus-admission-controller-857f4d67dd-tgtxf\" (UID: \"bd217fac-bac4-44af-8942-0385d47f21d2\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-tgtxf" Oct 01 15:05:27 crc kubenswrapper[4869]: I1001 15:05:27.245640 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/6ee4ec07-4fc1-4d3e-9b21-e111b2950c5a-v4-0-config-system-serving-cert\") pod \"oauth-openshift-558db77b4-8mczm\" (UID: \"6ee4ec07-4fc1-4d3e-9b21-e111b2950c5a\") " pod="openshift-authentication/oauth-openshift-558db77b4-8mczm" Oct 01 15:05:27 crc kubenswrapper[4869]: I1001 15:05:27.245657 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/906a4a45-c846-4d80-a3ec-10070aae888c-bound-sa-token\") pod \"ingress-operator-5b745b69d9-gdgpp\" (UID: \"906a4a45-c846-4d80-a3ec-10070aae888c\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-gdgpp" Oct 01 15:05:27 crc kubenswrapper[4869]: I1001 15:05:27.245688 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/42cd6256-ae7b-46c8-a910-0d8723fa9351-images\") pod \"machine-config-operator-74547568cd-hnx9p\" (UID: \"42cd6256-ae7b-46c8-a910-0d8723fa9351\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-hnx9p" Oct 01 15:05:27 crc kubenswrapper[4869]: I1001 15:05:27.245726 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-f5ldd\" (UniqueName: \"kubernetes.io/projected/79b5f958-f252-4703-b785-05b0d01a6e72-kube-api-access-f5ldd\") pod \"image-registry-697d97f7c8-4zhjs\" (UID: \"79b5f958-f252-4703-b785-05b0d01a6e72\") " pod="openshift-image-registry/image-registry-697d97f7c8-4zhjs" Oct 01 15:05:27 crc kubenswrapper[4869]: I1001 15:05:27.245744 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mbwsq\" (UniqueName: \"kubernetes.io/projected/d8bb34c7-d9d3-466b-979a-f81019e3cc15-kube-api-access-mbwsq\") pod \"machine-config-server-w5ngh\" (UID: \"d8bb34c7-d9d3-466b-979a-f81019e3cc15\") " pod="openshift-machine-config-operator/machine-config-server-w5ngh" Oct 01 15:05:27 crc kubenswrapper[4869]: I1001 15:05:27.245762 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2cn9d\" (UniqueName: \"kubernetes.io/projected/0e4f6d72-0c4a-40db-adc3-5b113c7bc499-kube-api-access-2cn9d\") pod \"ingress-canary-v6xf7\" (UID: \"0e4f6d72-0c4a-40db-adc3-5b113c7bc499\") " pod="openshift-ingress-canary/ingress-canary-v6xf7" Oct 01 15:05:27 crc kubenswrapper[4869]: I1001 15:05:27.245778 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/a4dfc6a1-b623-46cb-b1ff-c6b809e8deaa-default-certificate\") pod \"router-default-5444994796-mdc2f\" (UID: \"a4dfc6a1-b623-46cb-b1ff-c6b809e8deaa\") " pod="openshift-ingress/router-default-5444994796-mdc2f" Oct 01 15:05:27 crc kubenswrapper[4869]: I1001 15:05:27.245811 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/dcf200aa-ed03-4e96-a638-568a891b276f-etcd-ca\") pod \"etcd-operator-b45778765-vjs4q\" (UID: \"dcf200aa-ed03-4e96-a638-568a891b276f\") " pod="openshift-etcd-operator/etcd-operator-b45778765-vjs4q" Oct 01 15:05:27 crc kubenswrapper[4869]: I1001 15:05:27.245827 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/9338ac98-e76c-4c74-af46-b646ab637e37-signing-key\") pod \"service-ca-9c57cc56f-qd5ft\" (UID: \"9338ac98-e76c-4c74-af46-b646ab637e37\") " pod="openshift-service-ca/service-ca-9c57cc56f-qd5ft" Oct 01 15:05:27 crc kubenswrapper[4869]: I1001 15:05:27.245856 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/0e4f6d72-0c4a-40db-adc3-5b113c7bc499-cert\") pod \"ingress-canary-v6xf7\" (UID: \"0e4f6d72-0c4a-40db-adc3-5b113c7bc499\") " pod="openshift-ingress-canary/ingress-canary-v6xf7" Oct 01 15:05:27 crc kubenswrapper[4869]: I1001 15:05:27.245873 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/4a5e3470-0f08-44e6-8d4c-f968950c75f2-metrics-tls\") pod \"dns-operator-744455d44c-87546\" (UID: \"4a5e3470-0f08-44e6-8d4c-f968950c75f2\") " pod="openshift-dns-operator/dns-operator-744455d44c-87546" Oct 01 15:05:27 crc kubenswrapper[4869]: I1001 15:05:27.245892 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/62ecb2b0-55b3-4344-8237-705ef292ef63-kube-api-access\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-xpzff\" (UID: \"62ecb2b0-55b3-4344-8237-705ef292ef63\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-xpzff" Oct 01 15:05:27 crc kubenswrapper[4869]: I1001 15:05:27.245925 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kpwj8\" (UniqueName: \"kubernetes.io/projected/2f6f6869-bd98-4c21-a184-a3b7bd5f0b5b-kube-api-access-kpwj8\") pod \"openshift-config-operator-7777fb866f-gqrbk\" (UID: \"2f6f6869-bd98-4c21-a184-a3b7bd5f0b5b\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-gqrbk" Oct 01 15:05:27 crc kubenswrapper[4869]: I1001 15:05:27.248287 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/2f6f6869-bd98-4c21-a184-a3b7bd5f0b5b-available-featuregates\") pod \"openshift-config-operator-7777fb866f-gqrbk\" (UID: \"2f6f6869-bd98-4c21-a184-a3b7bd5f0b5b\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-gqrbk" Oct 01 15:05:27 crc kubenswrapper[4869]: I1001 15:05:27.249355 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6ee4ec07-4fc1-4d3e-9b21-e111b2950c5a-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-558db77b4-8mczm\" (UID: \"6ee4ec07-4fc1-4d3e-9b21-e111b2950c5a\") " pod="openshift-authentication/oauth-openshift-558db77b4-8mczm" Oct 01 15:05:27 crc kubenswrapper[4869]: I1001 15:05:27.249452 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/6ee4ec07-4fc1-4d3e-9b21-e111b2950c5a-v4-0-config-system-service-ca\") pod \"oauth-openshift-558db77b4-8mczm\" (UID: \"6ee4ec07-4fc1-4d3e-9b21-e111b2950c5a\") " pod="openshift-authentication/oauth-openshift-558db77b4-8mczm" Oct 01 15:05:27 crc kubenswrapper[4869]: I1001 15:05:27.250096 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/79b5f958-f252-4703-b785-05b0d01a6e72-ca-trust-extracted\") pod \"image-registry-697d97f7c8-4zhjs\" (UID: \"79b5f958-f252-4703-b785-05b0d01a6e72\") " pod="openshift-image-registry/image-registry-697d97f7c8-4zhjs" Oct 01 15:05:27 crc kubenswrapper[4869]: I1001 15:05:27.257969 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/6ee4ec07-4fc1-4d3e-9b21-e111b2950c5a-v4-0-config-system-serving-cert\") pod \"oauth-openshift-558db77b4-8mczm\" (UID: \"6ee4ec07-4fc1-4d3e-9b21-e111b2950c5a\") " pod="openshift-authentication/oauth-openshift-558db77b4-8mczm" Oct 01 15:05:27 crc kubenswrapper[4869]: I1001 15:05:27.259337 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9a717f28-31d3-4239-a315-97236362d5cb-config\") pod \"kube-apiserver-operator-766d6c64bb-l5vnp\" (UID: \"9a717f28-31d3-4239-a315-97236362d5cb\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-l5vnp" Oct 01 15:05:27 crc kubenswrapper[4869]: I1001 15:05:27.261178 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/4636576a-d3da-4491-a146-a6ffe6382a06-trusted-ca-bundle\") pod \"console-f9d7485db-jv4xs\" (UID: \"4636576a-d3da-4491-a146-a6ffe6382a06\") " pod="openshift-console/console-f9d7485db-jv4xs" Oct 01 15:05:27 crc kubenswrapper[4869]: I1001 15:05:27.262532 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/79b5f958-f252-4703-b785-05b0d01a6e72-installation-pull-secrets\") pod \"image-registry-697d97f7c8-4zhjs\" (UID: \"79b5f958-f252-4703-b785-05b0d01a6e72\") " pod="openshift-image-registry/image-registry-697d97f7c8-4zhjs" Oct 01 15:05:27 crc kubenswrapper[4869]: I1001 15:05:27.264029 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 01 15:05:27 crc kubenswrapper[4869]: I1001 15:05:27.265162 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/4636576a-d3da-4491-a146-a6ffe6382a06-oauth-serving-cert\") pod \"console-f9d7485db-jv4xs\" (UID: \"4636576a-d3da-4491-a146-a6ffe6382a06\") " pod="openshift-console/console-f9d7485db-jv4xs" Oct 01 15:05:27 crc kubenswrapper[4869]: I1001 15:05:27.267732 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/6ee4ec07-4fc1-4d3e-9b21-e111b2950c5a-audit-policies\") pod \"oauth-openshift-558db77b4-8mczm\" (UID: \"6ee4ec07-4fc1-4d3e-9b21-e111b2950c5a\") " pod="openshift-authentication/oauth-openshift-558db77b4-8mczm" Oct 01 15:05:27 crc kubenswrapper[4869]: I1001 15:05:27.270033 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/bd217fac-bac4-44af-8942-0385d47f21d2-webhook-certs\") pod \"multus-admission-controller-857f4d67dd-tgtxf\" (UID: \"bd217fac-bac4-44af-8942-0385d47f21d2\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-tgtxf" Oct 01 15:05:27 crc kubenswrapper[4869]: I1001 15:05:27.274551 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/6ee4ec07-4fc1-4d3e-9b21-e111b2950c5a-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-558db77b4-8mczm\" (UID: \"6ee4ec07-4fc1-4d3e-9b21-e111b2950c5a\") " pod="openshift-authentication/oauth-openshift-558db77b4-8mczm" Oct 01 15:05:27 crc kubenswrapper[4869]: I1001 15:05:27.274923 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/4636576a-d3da-4491-a146-a6ffe6382a06-console-oauth-config\") pod \"console-f9d7485db-jv4xs\" (UID: \"4636576a-d3da-4491-a146-a6ffe6382a06\") " pod="openshift-console/console-f9d7485db-jv4xs" Oct 01 15:05:27 crc kubenswrapper[4869]: I1001 15:05:27.276098 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/79b5f958-f252-4703-b785-05b0d01a6e72-trusted-ca\") pod \"image-registry-697d97f7c8-4zhjs\" (UID: \"79b5f958-f252-4703-b785-05b0d01a6e72\") " pod="openshift-image-registry/image-registry-697d97f7c8-4zhjs" Oct 01 15:05:27 crc kubenswrapper[4869]: I1001 15:05:27.277205 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/4636576a-d3da-4491-a146-a6ffe6382a06-service-ca\") pod \"console-f9d7485db-jv4xs\" (UID: \"4636576a-d3da-4491-a146-a6ffe6382a06\") " pod="openshift-console/console-f9d7485db-jv4xs" Oct 01 15:05:27 crc kubenswrapper[4869]: I1001 15:05:27.277201 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/6ee4ec07-4fc1-4d3e-9b21-e111b2950c5a-v4-0-config-system-cliconfig\") pod \"oauth-openshift-558db77b4-8mczm\" (UID: \"6ee4ec07-4fc1-4d3e-9b21-e111b2950c5a\") " pod="openshift-authentication/oauth-openshift-558db77b4-8mczm" Oct 01 15:05:27 crc kubenswrapper[4869]: I1001 15:05:27.277511 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/6ee4ec07-4fc1-4d3e-9b21-e111b2950c5a-v4-0-config-system-router-certs\") pod \"oauth-openshift-558db77b4-8mczm\" (UID: \"6ee4ec07-4fc1-4d3e-9b21-e111b2950c5a\") " pod="openshift-authentication/oauth-openshift-558db77b4-8mczm" Oct 01 15:05:27 crc kubenswrapper[4869]: I1001 15:05:27.278729 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/79b5f958-f252-4703-b785-05b0d01a6e72-registry-certificates\") pod \"image-registry-697d97f7c8-4zhjs\" (UID: \"79b5f958-f252-4703-b785-05b0d01a6e72\") " pod="openshift-image-registry/image-registry-697d97f7c8-4zhjs" Oct 01 15:05:27 crc kubenswrapper[4869]: I1001 15:05:27.278766 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/6ee4ec07-4fc1-4d3e-9b21-e111b2950c5a-audit-dir\") pod \"oauth-openshift-558db77b4-8mczm\" (UID: \"6ee4ec07-4fc1-4d3e-9b21-e111b2950c5a\") " pod="openshift-authentication/oauth-openshift-558db77b4-8mczm" Oct 01 15:05:27 crc kubenswrapper[4869]: I1001 15:05:27.279921 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f4295e29-ac36-4236-b679-4cd87ea76347-config\") pod \"openshift-controller-manager-operator-756b6f6bc6-jds44\" (UID: \"f4295e29-ac36-4236-b679-4cd87ea76347\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-jds44" Oct 01 15:05:27 crc kubenswrapper[4869]: I1001 15:05:27.280456 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/6ee4ec07-4fc1-4d3e-9b21-e111b2950c5a-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-558db77b4-8mczm\" (UID: \"6ee4ec07-4fc1-4d3e-9b21-e111b2950c5a\") " pod="openshift-authentication/oauth-openshift-558db77b4-8mczm" Oct 01 15:05:27 crc kubenswrapper[4869]: E1001 15:05:27.280951 4869 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-01 15:05:27.780913339 +0000 UTC m=+36.927756455 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 01 15:05:27 crc kubenswrapper[4869]: I1001 15:05:27.281429 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/2f6f6869-bd98-4c21-a184-a3b7bd5f0b5b-serving-cert\") pod \"openshift-config-operator-7777fb866f-gqrbk\" (UID: \"2f6f6869-bd98-4c21-a184-a3b7bd5f0b5b\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-gqrbk" Oct 01 15:05:27 crc kubenswrapper[4869]: I1001 15:05:27.286604 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/6ee4ec07-4fc1-4d3e-9b21-e111b2950c5a-v4-0-config-user-template-error\") pod \"oauth-openshift-558db77b4-8mczm\" (UID: \"6ee4ec07-4fc1-4d3e-9b21-e111b2950c5a\") " pod="openshift-authentication/oauth-openshift-558db77b4-8mczm" Oct 01 15:05:27 crc kubenswrapper[4869]: I1001 15:05:27.289657 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/6ee4ec07-4fc1-4d3e-9b21-e111b2950c5a-v4-0-config-user-template-login\") pod \"oauth-openshift-558db77b4-8mczm\" (UID: \"6ee4ec07-4fc1-4d3e-9b21-e111b2950c5a\") " pod="openshift-authentication/oauth-openshift-558db77b4-8mczm" Oct 01 15:05:27 crc kubenswrapper[4869]: I1001 15:05:27.289937 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 01 15:05:27 crc kubenswrapper[4869]: I1001 15:05:27.290626 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/4636576a-d3da-4491-a146-a6ffe6382a06-console-serving-cert\") pod \"console-f9d7485db-jv4xs\" (UID: \"4636576a-d3da-4491-a146-a6ffe6382a06\") " pod="openshift-console/console-f9d7485db-jv4xs" Oct 01 15:05:27 crc kubenswrapper[4869]: I1001 15:05:27.290764 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9a717f28-31d3-4239-a315-97236362d5cb-serving-cert\") pod \"kube-apiserver-operator-766d6c64bb-l5vnp\" (UID: \"9a717f28-31d3-4239-a315-97236362d5cb\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-l5vnp" Oct 01 15:05:27 crc kubenswrapper[4869]: I1001 15:05:27.290997 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/6ee4ec07-4fc1-4d3e-9b21-e111b2950c5a-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-558db77b4-8mczm\" (UID: \"6ee4ec07-4fc1-4d3e-9b21-e111b2950c5a\") " pod="openshift-authentication/oauth-openshift-558db77b4-8mczm" Oct 01 15:05:27 crc kubenswrapper[4869]: I1001 15:05:27.291150 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/4636576a-d3da-4491-a146-a6ffe6382a06-console-config\") pod \"console-f9d7485db-jv4xs\" (UID: \"4636576a-d3da-4491-a146-a6ffe6382a06\") " pod="openshift-console/console-f9d7485db-jv4xs" Oct 01 15:05:27 crc kubenswrapper[4869]: I1001 15:05:27.326167 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/f4295e29-ac36-4236-b679-4cd87ea76347-serving-cert\") pod \"openshift-controller-manager-operator-756b6f6bc6-jds44\" (UID: \"f4295e29-ac36-4236-b679-4cd87ea76347\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-jds44" Oct 01 15:05:27 crc kubenswrapper[4869]: I1001 15:05:27.330119 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/4a5e3470-0f08-44e6-8d4c-f968950c75f2-metrics-tls\") pod \"dns-operator-744455d44c-87546\" (UID: \"4a5e3470-0f08-44e6-8d4c-f968950c75f2\") " pod="openshift-dns-operator/dns-operator-744455d44c-87546" Oct 01 15:05:27 crc kubenswrapper[4869]: I1001 15:05:27.330572 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/79b5f958-f252-4703-b785-05b0d01a6e72-registry-tls\") pod \"image-registry-697d97f7c8-4zhjs\" (UID: \"79b5f958-f252-4703-b785-05b0d01a6e72\") " pod="openshift-image-registry/image-registry-697d97f7c8-4zhjs" Oct 01 15:05:27 crc kubenswrapper[4869]: I1001 15:05:27.335055 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kpwj8\" (UniqueName: \"kubernetes.io/projected/2f6f6869-bd98-4c21-a184-a3b7bd5f0b5b-kube-api-access-kpwj8\") pod \"openshift-config-operator-7777fb866f-gqrbk\" (UID: \"2f6f6869-bd98-4c21-a184-a3b7bd5f0b5b\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-gqrbk" Oct 01 15:05:27 crc kubenswrapper[4869]: I1001 15:05:27.339474 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/6ee4ec07-4fc1-4d3e-9b21-e111b2950c5a-v4-0-config-system-session\") pod \"oauth-openshift-558db77b4-8mczm\" (UID: \"6ee4ec07-4fc1-4d3e-9b21-e111b2950c5a\") " pod="openshift-authentication/oauth-openshift-558db77b4-8mczm" Oct 01 15:05:27 crc kubenswrapper[4869]: I1001 15:05:27.347588 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/2d195bae-5172-4387-869b-086f215963ff-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-t5kll\" (UID: \"2d195bae-5172-4387-869b-086f215963ff\") " pod="openshift-marketplace/marketplace-operator-79b997595-t5kll" Oct 01 15:05:27 crc kubenswrapper[4869]: I1001 15:05:27.347624 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ready\" (UniqueName: \"kubernetes.io/empty-dir/9ff567e5-6f6b-4b25-a8a0-3aa792f4291d-ready\") pod \"cni-sysctl-allowlist-ds-xffhm\" (UID: \"9ff567e5-6f6b-4b25-a8a0-3aa792f4291d\") " pod="openshift-multus/cni-sysctl-allowlist-ds-xffhm" Oct 01 15:05:27 crc kubenswrapper[4869]: I1001 15:05:27.347641 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/dcf200aa-ed03-4e96-a638-568a891b276f-serving-cert\") pod \"etcd-operator-b45778765-vjs4q\" (UID: \"dcf200aa-ed03-4e96-a638-568a891b276f\") " pod="openshift-etcd-operator/etcd-operator-b45778765-vjs4q" Oct 01 15:05:27 crc kubenswrapper[4869]: I1001 15:05:27.347665 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/328e4a8e-5321-43fa-8b1a-6ff72a374b6d-tmpfs\") pod \"packageserver-d55dfcdfc-wht8b\" (UID: \"328e4a8e-5321-43fa-8b1a-6ff72a374b6d\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-wht8b" Oct 01 15:05:27 crc kubenswrapper[4869]: I1001 15:05:27.347680 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-f2n6f\" (UniqueName: \"kubernetes.io/projected/906a4a45-c846-4d80-a3ec-10070aae888c-kube-api-access-f2n6f\") pod \"ingress-operator-5b745b69d9-gdgpp\" (UID: \"906a4a45-c846-4d80-a3ec-10070aae888c\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-gdgpp" Oct 01 15:05:27 crc kubenswrapper[4869]: I1001 15:05:27.347703 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gj95g\" (UniqueName: \"kubernetes.io/projected/42cd6256-ae7b-46c8-a910-0d8723fa9351-kube-api-access-gj95g\") pod \"machine-config-operator-74547568cd-hnx9p\" (UID: \"42cd6256-ae7b-46c8-a910-0d8723fa9351\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-hnx9p" Oct 01 15:05:27 crc kubenswrapper[4869]: I1001 15:05:27.347722 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"csi-data-dir\" (UniqueName: \"kubernetes.io/host-path/45ac9df8-4d47-44e7-a1bb-bd4d0b8fd560-csi-data-dir\") pod \"csi-hostpathplugin-9pwb5\" (UID: \"45ac9df8-4d47-44e7-a1bb-bd4d0b8fd560\") " pod="hostpath-provisioner/csi-hostpathplugin-9pwb5" Oct 01 15:05:27 crc kubenswrapper[4869]: I1001 15:05:27.347738 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qnknb\" (UniqueName: \"kubernetes.io/projected/328e4a8e-5321-43fa-8b1a-6ff72a374b6d-kube-api-access-qnknb\") pod \"packageserver-d55dfcdfc-wht8b\" (UID: \"328e4a8e-5321-43fa-8b1a-6ff72a374b6d\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-wht8b" Oct 01 15:05:27 crc kubenswrapper[4869]: I1001 15:05:27.347753 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/906a4a45-c846-4d80-a3ec-10070aae888c-trusted-ca\") pod \"ingress-operator-5b745b69d9-gdgpp\" (UID: \"906a4a45-c846-4d80-a3ec-10070aae888c\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-gdgpp" Oct 01 15:05:27 crc kubenswrapper[4869]: I1001 15:05:27.347767 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-89r4r\" (UniqueName: \"kubernetes.io/projected/3968ac06-9cc3-4dc8-827f-aff3261f131f-kube-api-access-89r4r\") pod \"machine-config-controller-84d6567774-bb9hc\" (UID: \"3968ac06-9cc3-4dc8-827f-aff3261f131f\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-bb9hc" Oct 01 15:05:27 crc kubenswrapper[4869]: I1001 15:05:27.347782 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/62ecb2b0-55b3-4344-8237-705ef292ef63-serving-cert\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-xpzff\" (UID: \"62ecb2b0-55b3-4344-8237-705ef292ef63\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-xpzff" Oct 01 15:05:27 crc kubenswrapper[4869]: I1001 15:05:27.347798 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/42cd6256-ae7b-46c8-a910-0d8723fa9351-auth-proxy-config\") pod \"machine-config-operator-74547568cd-hnx9p\" (UID: \"42cd6256-ae7b-46c8-a910-0d8723fa9351\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-hnx9p" Oct 01 15:05:27 crc kubenswrapper[4869]: I1001 15:05:27.347813 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/de7935aa-128a-418f-bf83-2b9aabc146ed-package-server-manager-serving-cert\") pod \"package-server-manager-789f6589d5-hjpxg\" (UID: \"de7935aa-128a-418f-bf83-2b9aabc146ed\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-hjpxg" Oct 01 15:05:27 crc kubenswrapper[4869]: I1001 15:05:27.347830 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tuning-conf-dir\" (UniqueName: \"kubernetes.io/host-path/9ff567e5-6f6b-4b25-a8a0-3aa792f4291d-tuning-conf-dir\") pod \"cni-sysctl-allowlist-ds-xffhm\" (UID: \"9ff567e5-6f6b-4b25-a8a0-3aa792f4291d\") " pod="openshift-multus/cni-sysctl-allowlist-ds-xffhm" Oct 01 15:05:27 crc kubenswrapper[4869]: I1001 15:05:27.347849 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/dcf200aa-ed03-4e96-a638-568a891b276f-etcd-client\") pod \"etcd-operator-b45778765-vjs4q\" (UID: \"dcf200aa-ed03-4e96-a638-568a891b276f\") " pod="openshift-etcd-operator/etcd-operator-b45778765-vjs4q" Oct 01 15:05:27 crc kubenswrapper[4869]: I1001 15:05:27.347864 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/secret/d8bb34c7-d9d3-466b-979a-f81019e3cc15-certs\") pod \"machine-config-server-w5ngh\" (UID: \"d8bb34c7-d9d3-466b-979a-f81019e3cc15\") " pod="openshift-machine-config-operator/machine-config-server-w5ngh" Oct 01 15:05:27 crc kubenswrapper[4869]: I1001 15:05:27.347880 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jnvcn\" (UniqueName: \"kubernetes.io/projected/4ac58a76-114b-4e86-924c-ad1f269b36a8-kube-api-access-jnvcn\") pod \"kube-storage-version-migrator-operator-b67b599dd-klj2b\" (UID: \"4ac58a76-114b-4e86-924c-ad1f269b36a8\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-klj2b" Oct 01 15:05:27 crc kubenswrapper[4869]: I1001 15:05:27.347896 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ht69c\" (UniqueName: \"kubernetes.io/projected/45ac9df8-4d47-44e7-a1bb-bd4d0b8fd560-kube-api-access-ht69c\") pod \"csi-hostpathplugin-9pwb5\" (UID: \"45ac9df8-4d47-44e7-a1bb-bd4d0b8fd560\") " pod="hostpath-provisioner/csi-hostpathplugin-9pwb5" Oct 01 15:05:27 crc kubenswrapper[4869]: I1001 15:05:27.347911 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9qxfd\" (UniqueName: \"kubernetes.io/projected/60f75bf5-448e-4770-8eba-26c271028f50-kube-api-access-9qxfd\") pod \"dns-default-nk6rv\" (UID: \"60f75bf5-448e-4770-8eba-26c271028f50\") " pod="openshift-dns/dns-default-nk6rv" Oct 01 15:05:27 crc kubenswrapper[4869]: I1001 15:05:27.347925 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/9ff567e5-6f6b-4b25-a8a0-3aa792f4291d-cni-sysctl-allowlist\") pod \"cni-sysctl-allowlist-ds-xffhm\" (UID: \"9ff567e5-6f6b-4b25-a8a0-3aa792f4291d\") " pod="openshift-multus/cni-sysctl-allowlist-ds-xffhm" Oct 01 15:05:27 crc kubenswrapper[4869]: I1001 15:05:27.347939 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"mountpoint-dir\" (UniqueName: \"kubernetes.io/host-path/45ac9df8-4d47-44e7-a1bb-bd4d0b8fd560-mountpoint-dir\") pod \"csi-hostpathplugin-9pwb5\" (UID: \"45ac9df8-4d47-44e7-a1bb-bd4d0b8fd560\") " pod="hostpath-provisioner/csi-hostpathplugin-9pwb5" Oct 01 15:05:27 crc kubenswrapper[4869]: I1001 15:05:27.347957 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vzrkc\" (UniqueName: \"kubernetes.io/projected/fa4dadea-9e0e-431f-b6a7-057e5cec4f9f-kube-api-access-vzrkc\") pod \"service-ca-operator-777779d784-jlnv5\" (UID: \"fa4dadea-9e0e-431f-b6a7-057e5cec4f9f\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-jlnv5" Oct 01 15:05:27 crc kubenswrapper[4869]: I1001 15:05:27.347975 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sk5qg\" (UniqueName: \"kubernetes.io/projected/2d195bae-5172-4387-869b-086f215963ff-kube-api-access-sk5qg\") pod \"marketplace-operator-79b997595-t5kll\" (UID: \"2d195bae-5172-4387-869b-086f215963ff\") " pod="openshift-marketplace/marketplace-operator-79b997595-t5kll" Oct 01 15:05:27 crc kubenswrapper[4869]: I1001 15:05:27.347991 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/328e4a8e-5321-43fa-8b1a-6ff72a374b6d-apiservice-cert\") pod \"packageserver-d55dfcdfc-wht8b\" (UID: \"328e4a8e-5321-43fa-8b1a-6ff72a374b6d\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-wht8b" Oct 01 15:05:27 crc kubenswrapper[4869]: I1001 15:05:27.348009 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/42cd6256-ae7b-46c8-a910-0d8723fa9351-proxy-tls\") pod \"machine-config-operator-74547568cd-hnx9p\" (UID: \"42cd6256-ae7b-46c8-a910-0d8723fa9351\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-hnx9p" Oct 01 15:05:27 crc kubenswrapper[4869]: I1001 15:05:27.348037 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/60f75bf5-448e-4770-8eba-26c271028f50-config-volume\") pod \"dns-default-nk6rv\" (UID: \"60f75bf5-448e-4770-8eba-26c271028f50\") " pod="openshift-dns/dns-default-nk6rv" Oct 01 15:05:27 crc kubenswrapper[4869]: I1001 15:05:27.348054 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gltw6\" (UniqueName: \"kubernetes.io/projected/9ff567e5-6f6b-4b25-a8a0-3aa792f4291d-kube-api-access-gltw6\") pod \"cni-sysctl-allowlist-ds-xffhm\" (UID: \"9ff567e5-6f6b-4b25-a8a0-3aa792f4291d\") " pod="openshift-multus/cni-sysctl-allowlist-ds-xffhm" Oct 01 15:05:27 crc kubenswrapper[4869]: I1001 15:05:27.348076 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/3968ac06-9cc3-4dc8-827f-aff3261f131f-mcc-auth-proxy-config\") pod \"machine-config-controller-84d6567774-bb9hc\" (UID: \"3968ac06-9cc3-4dc8-827f-aff3261f131f\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-bb9hc" Oct 01 15:05:27 crc kubenswrapper[4869]: I1001 15:05:27.348126 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 01 15:05:27 crc kubenswrapper[4869]: I1001 15:05:27.348145 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/a4dfc6a1-b623-46cb-b1ff-c6b809e8deaa-metrics-certs\") pod \"router-default-5444994796-mdc2f\" (UID: \"a4dfc6a1-b623-46cb-b1ff-c6b809e8deaa\") " pod="openshift-ingress/router-default-5444994796-mdc2f" Oct 01 15:05:27 crc kubenswrapper[4869]: I1001 15:05:27.348163 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/328e4a8e-5321-43fa-8b1a-6ff72a374b6d-webhook-cert\") pod \"packageserver-d55dfcdfc-wht8b\" (UID: \"328e4a8e-5321-43fa-8b1a-6ff72a374b6d\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-wht8b" Oct 01 15:05:27 crc kubenswrapper[4869]: I1001 15:05:27.348180 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/906a4a45-c846-4d80-a3ec-10070aae888c-bound-sa-token\") pod \"ingress-operator-5b745b69d9-gdgpp\" (UID: \"906a4a45-c846-4d80-a3ec-10070aae888c\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-gdgpp" Oct 01 15:05:27 crc kubenswrapper[4869]: I1001 15:05:27.348201 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/42cd6256-ae7b-46c8-a910-0d8723fa9351-images\") pod \"machine-config-operator-74547568cd-hnx9p\" (UID: \"42cd6256-ae7b-46c8-a910-0d8723fa9351\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-hnx9p" Oct 01 15:05:27 crc kubenswrapper[4869]: I1001 15:05:27.348226 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mbwsq\" (UniqueName: \"kubernetes.io/projected/d8bb34c7-d9d3-466b-979a-f81019e3cc15-kube-api-access-mbwsq\") pod \"machine-config-server-w5ngh\" (UID: \"d8bb34c7-d9d3-466b-979a-f81019e3cc15\") " pod="openshift-machine-config-operator/machine-config-server-w5ngh" Oct 01 15:05:27 crc kubenswrapper[4869]: I1001 15:05:27.348246 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2cn9d\" (UniqueName: \"kubernetes.io/projected/0e4f6d72-0c4a-40db-adc3-5b113c7bc499-kube-api-access-2cn9d\") pod \"ingress-canary-v6xf7\" (UID: \"0e4f6d72-0c4a-40db-adc3-5b113c7bc499\") " pod="openshift-ingress-canary/ingress-canary-v6xf7" Oct 01 15:05:27 crc kubenswrapper[4869]: I1001 15:05:27.348279 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/a4dfc6a1-b623-46cb-b1ff-c6b809e8deaa-default-certificate\") pod \"router-default-5444994796-mdc2f\" (UID: \"a4dfc6a1-b623-46cb-b1ff-c6b809e8deaa\") " pod="openshift-ingress/router-default-5444994796-mdc2f" Oct 01 15:05:27 crc kubenswrapper[4869]: I1001 15:05:27.348298 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/dcf200aa-ed03-4e96-a638-568a891b276f-etcd-ca\") pod \"etcd-operator-b45778765-vjs4q\" (UID: \"dcf200aa-ed03-4e96-a638-568a891b276f\") " pod="openshift-etcd-operator/etcd-operator-b45778765-vjs4q" Oct 01 15:05:27 crc kubenswrapper[4869]: I1001 15:05:27.348317 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/9338ac98-e76c-4c74-af46-b646ab637e37-signing-key\") pod \"service-ca-9c57cc56f-qd5ft\" (UID: \"9338ac98-e76c-4c74-af46-b646ab637e37\") " pod="openshift-service-ca/service-ca-9c57cc56f-qd5ft" Oct 01 15:05:27 crc kubenswrapper[4869]: I1001 15:05:27.348339 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/0e4f6d72-0c4a-40db-adc3-5b113c7bc499-cert\") pod \"ingress-canary-v6xf7\" (UID: \"0e4f6d72-0c4a-40db-adc3-5b113c7bc499\") " pod="openshift-ingress-canary/ingress-canary-v6xf7" Oct 01 15:05:27 crc kubenswrapper[4869]: I1001 15:05:27.348354 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/62ecb2b0-55b3-4344-8237-705ef292ef63-kube-api-access\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-xpzff\" (UID: \"62ecb2b0-55b3-4344-8237-705ef292ef63\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-xpzff" Oct 01 15:05:27 crc kubenswrapper[4869]: I1001 15:05:27.348374 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-4zhjs\" (UID: \"79b5f958-f252-4703-b785-05b0d01a6e72\") " pod="openshift-image-registry/image-registry-697d97f7c8-4zhjs" Oct 01 15:05:27 crc kubenswrapper[4869]: I1001 15:05:27.348391 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/9338ac98-e76c-4c74-af46-b646ab637e37-signing-cabundle\") pod \"service-ca-9c57cc56f-qd5ft\" (UID: \"9338ac98-e76c-4c74-af46-b646ab637e37\") " pod="openshift-service-ca/service-ca-9c57cc56f-qd5ft" Oct 01 15:05:27 crc kubenswrapper[4869]: I1001 15:05:27.348410 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/60f75bf5-448e-4770-8eba-26c271028f50-metrics-tls\") pod \"dns-default-nk6rv\" (UID: \"60f75bf5-448e-4770-8eba-26c271028f50\") " pod="openshift-dns/dns-default-nk6rv" Oct 01 15:05:27 crc kubenswrapper[4869]: I1001 15:05:27.348434 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nntzm\" (UniqueName: \"kubernetes.io/projected/5067e051-5c99-4fe9-b4f2-e5dfa8b1c9b4-kube-api-access-nntzm\") pod \"migrator-59844c95c7-zb5c7\" (UID: \"5067e051-5c99-4fe9-b4f2-e5dfa8b1c9b4\") " pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-zb5c7" Oct 01 15:05:27 crc kubenswrapper[4869]: I1001 15:05:27.348459 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-w7hq4\" (UniqueName: \"kubernetes.io/projected/de7935aa-128a-418f-bf83-2b9aabc146ed-kube-api-access-w7hq4\") pod \"package-server-manager-789f6589d5-hjpxg\" (UID: \"de7935aa-128a-418f-bf83-2b9aabc146ed\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-hjpxg" Oct 01 15:05:27 crc kubenswrapper[4869]: I1001 15:05:27.348478 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4ac58a76-114b-4e86-924c-ad1f269b36a8-config\") pod \"kube-storage-version-migrator-operator-b67b599dd-klj2b\" (UID: \"4ac58a76-114b-4e86-924c-ad1f269b36a8\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-klj2b" Oct 01 15:05:27 crc kubenswrapper[4869]: I1001 15:05:27.348494 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/dcf200aa-ed03-4e96-a638-568a891b276f-config\") pod \"etcd-operator-b45778765-vjs4q\" (UID: \"dcf200aa-ed03-4e96-a638-568a891b276f\") " pod="openshift-etcd-operator/etcd-operator-b45778765-vjs4q" Oct 01 15:05:27 crc kubenswrapper[4869]: I1001 15:05:27.348511 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/a4dfc6a1-b623-46cb-b1ff-c6b809e8deaa-stats-auth\") pod \"router-default-5444994796-mdc2f\" (UID: \"a4dfc6a1-b623-46cb-b1ff-c6b809e8deaa\") " pod="openshift-ingress/router-default-5444994796-mdc2f" Oct 01 15:05:27 crc kubenswrapper[4869]: I1001 15:05:27.348531 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 01 15:05:27 crc kubenswrapper[4869]: I1001 15:05:27.348548 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registration-dir\" (UniqueName: \"kubernetes.io/host-path/45ac9df8-4d47-44e7-a1bb-bd4d0b8fd560-registration-dir\") pod \"csi-hostpathplugin-9pwb5\" (UID: \"45ac9df8-4d47-44e7-a1bb-bd4d0b8fd560\") " pod="hostpath-provisioner/csi-hostpathplugin-9pwb5" Oct 01 15:05:27 crc kubenswrapper[4869]: I1001 15:05:27.348563 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/d8bb34c7-d9d3-466b-979a-f81019e3cc15-node-bootstrap-token\") pod \"machine-config-server-w5ngh\" (UID: \"d8bb34c7-d9d3-466b-979a-f81019e3cc15\") " pod="openshift-machine-config-operator/machine-config-server-w5ngh" Oct 01 15:05:27 crc kubenswrapper[4869]: I1001 15:05:27.348579 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4jrmc\" (UniqueName: \"kubernetes.io/projected/a4dfc6a1-b623-46cb-b1ff-c6b809e8deaa-kube-api-access-4jrmc\") pod \"router-default-5444994796-mdc2f\" (UID: \"a4dfc6a1-b623-46cb-b1ff-c6b809e8deaa\") " pod="openshift-ingress/router-default-5444994796-mdc2f" Oct 01 15:05:27 crc kubenswrapper[4869]: I1001 15:05:27.348606 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/4c1dcd57-cee2-45ab-9a92-8cdd8b864f98-config-volume\") pod \"collect-profiles-29322180-8fxq6\" (UID: \"4c1dcd57-cee2-45ab-9a92-8cdd8b864f98\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29322180-8fxq6" Oct 01 15:05:27 crc kubenswrapper[4869]: I1001 15:05:27.348622 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/2d195bae-5172-4387-869b-086f215963ff-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-t5kll\" (UID: \"2d195bae-5172-4387-869b-086f215963ff\") " pod="openshift-marketplace/marketplace-operator-79b997595-t5kll" Oct 01 15:05:27 crc kubenswrapper[4869]: I1001 15:05:27.348639 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/4c1dcd57-cee2-45ab-9a92-8cdd8b864f98-secret-volume\") pod \"collect-profiles-29322180-8fxq6\" (UID: \"4c1dcd57-cee2-45ab-9a92-8cdd8b864f98\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29322180-8fxq6" Oct 01 15:05:27 crc kubenswrapper[4869]: I1001 15:05:27.348655 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/62ecb2b0-55b3-4344-8237-705ef292ef63-config\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-xpzff\" (UID: \"62ecb2b0-55b3-4344-8237-705ef292ef63\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-xpzff" Oct 01 15:05:27 crc kubenswrapper[4869]: I1001 15:05:27.348671 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/fa4dadea-9e0e-431f-b6a7-057e5cec4f9f-config\") pod \"service-ca-operator-777779d784-jlnv5\" (UID: \"fa4dadea-9e0e-431f-b6a7-057e5cec4f9f\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-jlnv5" Oct 01 15:05:27 crc kubenswrapper[4869]: I1001 15:05:27.348686 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-dir\" (UniqueName: \"kubernetes.io/host-path/45ac9df8-4d47-44e7-a1bb-bd4d0b8fd560-plugins-dir\") pod \"csi-hostpathplugin-9pwb5\" (UID: \"45ac9df8-4d47-44e7-a1bb-bd4d0b8fd560\") " pod="hostpath-provisioner/csi-hostpathplugin-9pwb5" Oct 01 15:05:27 crc kubenswrapper[4869]: I1001 15:05:27.348702 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/fa4dadea-9e0e-431f-b6a7-057e5cec4f9f-serving-cert\") pod \"service-ca-operator-777779d784-jlnv5\" (UID: \"fa4dadea-9e0e-431f-b6a7-057e5cec4f9f\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-jlnv5" Oct 01 15:05:27 crc kubenswrapper[4869]: I1001 15:05:27.348717 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zbhz5\" (UniqueName: \"kubernetes.io/projected/9338ac98-e76c-4c74-af46-b646ab637e37-kube-api-access-zbhz5\") pod \"service-ca-9c57cc56f-qd5ft\" (UID: \"9338ac98-e76c-4c74-af46-b646ab637e37\") " pod="openshift-service-ca/service-ca-9c57cc56f-qd5ft" Oct 01 15:05:27 crc kubenswrapper[4869]: I1001 15:05:27.348735 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/4ac58a76-114b-4e86-924c-ad1f269b36a8-serving-cert\") pod \"kube-storage-version-migrator-operator-b67b599dd-klj2b\" (UID: \"4ac58a76-114b-4e86-924c-ad1f269b36a8\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-klj2b" Oct 01 15:05:27 crc kubenswrapper[4869]: I1001 15:05:27.348760 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/dcf200aa-ed03-4e96-a638-568a891b276f-etcd-service-ca\") pod \"etcd-operator-b45778765-vjs4q\" (UID: \"dcf200aa-ed03-4e96-a638-568a891b276f\") " pod="openshift-etcd-operator/etcd-operator-b45778765-vjs4q" Oct 01 15:05:27 crc kubenswrapper[4869]: I1001 15:05:27.348777 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"socket-dir\" (UniqueName: \"kubernetes.io/host-path/45ac9df8-4d47-44e7-a1bb-bd4d0b8fd560-socket-dir\") pod \"csi-hostpathplugin-9pwb5\" (UID: \"45ac9df8-4d47-44e7-a1bb-bd4d0b8fd560\") " pod="hostpath-provisioner/csi-hostpathplugin-9pwb5" Oct 01 15:05:27 crc kubenswrapper[4869]: I1001 15:05:27.348792 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vpnfr\" (UniqueName: \"kubernetes.io/projected/4c1dcd57-cee2-45ab-9a92-8cdd8b864f98-kube-api-access-vpnfr\") pod \"collect-profiles-29322180-8fxq6\" (UID: \"4c1dcd57-cee2-45ab-9a92-8cdd8b864f98\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29322180-8fxq6" Oct 01 15:05:27 crc kubenswrapper[4869]: I1001 15:05:27.348807 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/3968ac06-9cc3-4dc8-827f-aff3261f131f-proxy-tls\") pod \"machine-config-controller-84d6567774-bb9hc\" (UID: \"3968ac06-9cc3-4dc8-827f-aff3261f131f\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-bb9hc" Oct 01 15:05:27 crc kubenswrapper[4869]: I1001 15:05:27.348822 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/906a4a45-c846-4d80-a3ec-10070aae888c-metrics-tls\") pod \"ingress-operator-5b745b69d9-gdgpp\" (UID: \"906a4a45-c846-4d80-a3ec-10070aae888c\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-gdgpp" Oct 01 15:05:27 crc kubenswrapper[4869]: I1001 15:05:27.348843 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9m2kq\" (UniqueName: \"kubernetes.io/projected/dcf200aa-ed03-4e96-a638-568a891b276f-kube-api-access-9m2kq\") pod \"etcd-operator-b45778765-vjs4q\" (UID: \"dcf200aa-ed03-4e96-a638-568a891b276f\") " pod="openshift-etcd-operator/etcd-operator-b45778765-vjs4q" Oct 01 15:05:27 crc kubenswrapper[4869]: I1001 15:05:27.348860 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/a4dfc6a1-b623-46cb-b1ff-c6b809e8deaa-service-ca-bundle\") pod \"router-default-5444994796-mdc2f\" (UID: \"a4dfc6a1-b623-46cb-b1ff-c6b809e8deaa\") " pod="openshift-ingress/router-default-5444994796-mdc2f" Oct 01 15:05:27 crc kubenswrapper[4869]: I1001 15:05:27.349549 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/a4dfc6a1-b623-46cb-b1ff-c6b809e8deaa-service-ca-bundle\") pod \"router-default-5444994796-mdc2f\" (UID: \"a4dfc6a1-b623-46cb-b1ff-c6b809e8deaa\") " pod="openshift-ingress/router-default-5444994796-mdc2f" Oct 01 15:05:27 crc kubenswrapper[4869]: I1001 15:05:27.350396 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/2d195bae-5172-4387-869b-086f215963ff-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-t5kll\" (UID: \"2d195bae-5172-4387-869b-086f215963ff\") " pod="openshift-marketplace/marketplace-operator-79b997595-t5kll" Oct 01 15:05:27 crc kubenswrapper[4869]: I1001 15:05:27.351400 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ready\" (UniqueName: \"kubernetes.io/empty-dir/9ff567e5-6f6b-4b25-a8a0-3aa792f4291d-ready\") pod \"cni-sysctl-allowlist-ds-xffhm\" (UID: \"9ff567e5-6f6b-4b25-a8a0-3aa792f4291d\") " pod="openshift-multus/cni-sysctl-allowlist-ds-xffhm" Oct 01 15:05:27 crc kubenswrapper[4869]: I1001 15:05:27.354706 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jn46p\" (UniqueName: \"kubernetes.io/projected/6ee4ec07-4fc1-4d3e-9b21-e111b2950c5a-kube-api-access-jn46p\") pod \"oauth-openshift-558db77b4-8mczm\" (UID: \"6ee4ec07-4fc1-4d3e-9b21-e111b2950c5a\") " pod="openshift-authentication/oauth-openshift-558db77b4-8mczm" Oct 01 15:05:27 crc kubenswrapper[4869]: I1001 15:05:27.356696 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/9a717f28-31d3-4239-a315-97236362d5cb-kube-api-access\") pod \"kube-apiserver-operator-766d6c64bb-l5vnp\" (UID: \"9a717f28-31d3-4239-a315-97236362d5cb\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-l5vnp" Oct 01 15:05:27 crc kubenswrapper[4869]: I1001 15:05:27.357150 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/dcf200aa-ed03-4e96-a638-568a891b276f-serving-cert\") pod \"etcd-operator-b45778765-vjs4q\" (UID: \"dcf200aa-ed03-4e96-a638-568a891b276f\") " pod="openshift-etcd-operator/etcd-operator-b45778765-vjs4q" Oct 01 15:05:27 crc kubenswrapper[4869]: I1001 15:05:27.357491 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/4c1dcd57-cee2-45ab-9a92-8cdd8b864f98-config-volume\") pod \"collect-profiles-29322180-8fxq6\" (UID: \"4c1dcd57-cee2-45ab-9a92-8cdd8b864f98\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29322180-8fxq6" Oct 01 15:05:27 crc kubenswrapper[4869]: I1001 15:05:27.357820 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/328e4a8e-5321-43fa-8b1a-6ff72a374b6d-tmpfs\") pod \"packageserver-d55dfcdfc-wht8b\" (UID: \"328e4a8e-5321-43fa-8b1a-6ff72a374b6d\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-wht8b" Oct 01 15:05:27 crc kubenswrapper[4869]: I1001 15:05:27.358077 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"csi-data-dir\" (UniqueName: \"kubernetes.io/host-path/45ac9df8-4d47-44e7-a1bb-bd4d0b8fd560-csi-data-dir\") pod \"csi-hostpathplugin-9pwb5\" (UID: \"45ac9df8-4d47-44e7-a1bb-bd4d0b8fd560\") " pod="hostpath-provisioner/csi-hostpathplugin-9pwb5" Oct 01 15:05:27 crc kubenswrapper[4869]: I1001 15:05:27.358945 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/dcf200aa-ed03-4e96-a638-568a891b276f-etcd-ca\") pod \"etcd-operator-b45778765-vjs4q\" (UID: \"dcf200aa-ed03-4e96-a638-568a891b276f\") " pod="openshift-etcd-operator/etcd-operator-b45778765-vjs4q" Oct 01 15:05:27 crc kubenswrapper[4869]: I1001 15:05:27.359184 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/906a4a45-c846-4d80-a3ec-10070aae888c-trusted-ca\") pod \"ingress-operator-5b745b69d9-gdgpp\" (UID: \"906a4a45-c846-4d80-a3ec-10070aae888c\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-gdgpp" Oct 01 15:05:27 crc kubenswrapper[4869]: I1001 15:05:27.359442 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/a4dfc6a1-b623-46cb-b1ff-c6b809e8deaa-default-certificate\") pod \"router-default-5444994796-mdc2f\" (UID: \"a4dfc6a1-b623-46cb-b1ff-c6b809e8deaa\") " pod="openshift-ingress/router-default-5444994796-mdc2f" Oct 01 15:05:27 crc kubenswrapper[4869]: I1001 15:05:27.365459 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/2d195bae-5172-4387-869b-086f215963ff-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-t5kll\" (UID: \"2d195bae-5172-4387-869b-086f215963ff\") " pod="openshift-marketplace/marketplace-operator-79b997595-t5kll" Oct 01 15:05:27 crc kubenswrapper[4869]: I1001 15:05:27.365921 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/42cd6256-ae7b-46c8-a910-0d8723fa9351-auth-proxy-config\") pod \"machine-config-operator-74547568cd-hnx9p\" (UID: \"42cd6256-ae7b-46c8-a910-0d8723fa9351\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-hnx9p" Oct 01 15:05:27 crc kubenswrapper[4869]: I1001 15:05:27.366478 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/dcf200aa-ed03-4e96-a638-568a891b276f-etcd-service-ca\") pod \"etcd-operator-b45778765-vjs4q\" (UID: \"dcf200aa-ed03-4e96-a638-568a891b276f\") " pod="openshift-etcd-operator/etcd-operator-b45778765-vjs4q" Oct 01 15:05:27 crc kubenswrapper[4869]: I1001 15:05:27.366882 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/9338ac98-e76c-4c74-af46-b646ab637e37-signing-key\") pod \"service-ca-9c57cc56f-qd5ft\" (UID: \"9338ac98-e76c-4c74-af46-b646ab637e37\") " pod="openshift-service-ca/service-ca-9c57cc56f-qd5ft" Oct 01 15:05:27 crc kubenswrapper[4869]: E1001 15:05:27.366981 4869 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-01 15:05:27.866963349 +0000 UTC m=+37.013806545 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-4zhjs" (UID: "79b5f958-f252-4703-b785-05b0d01a6e72") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 01 15:05:27 crc kubenswrapper[4869]: I1001 15:05:27.366988 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/fa4dadea-9e0e-431f-b6a7-057e5cec4f9f-config\") pod \"service-ca-operator-777779d784-jlnv5\" (UID: \"fa4dadea-9e0e-431f-b6a7-057e5cec4f9f\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-jlnv5" Oct 01 15:05:27 crc kubenswrapper[4869]: I1001 15:05:27.367122 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tuning-conf-dir\" (UniqueName: \"kubernetes.io/host-path/9ff567e5-6f6b-4b25-a8a0-3aa792f4291d-tuning-conf-dir\") pod \"cni-sysctl-allowlist-ds-xffhm\" (UID: \"9ff567e5-6f6b-4b25-a8a0-3aa792f4291d\") " pod="openshift-multus/cni-sysctl-allowlist-ds-xffhm" Oct 01 15:05:27 crc kubenswrapper[4869]: I1001 15:05:27.367418 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4ac58a76-114b-4e86-924c-ad1f269b36a8-config\") pod \"kube-storage-version-migrator-operator-b67b599dd-klj2b\" (UID: \"4ac58a76-114b-4e86-924c-ad1f269b36a8\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-klj2b" Oct 01 15:05:27 crc kubenswrapper[4869]: I1001 15:05:27.367551 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/62ecb2b0-55b3-4344-8237-705ef292ef63-config\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-xpzff\" (UID: \"62ecb2b0-55b3-4344-8237-705ef292ef63\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-xpzff" Oct 01 15:05:27 crc kubenswrapper[4869]: I1001 15:05:27.367764 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-dir\" (UniqueName: \"kubernetes.io/host-path/45ac9df8-4d47-44e7-a1bb-bd4d0b8fd560-plugins-dir\") pod \"csi-hostpathplugin-9pwb5\" (UID: \"45ac9df8-4d47-44e7-a1bb-bd4d0b8fd560\") " pod="hostpath-provisioner/csi-hostpathplugin-9pwb5" Oct 01 15:05:27 crc kubenswrapper[4869]: I1001 15:05:27.367827 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/dcf200aa-ed03-4e96-a638-568a891b276f-config\") pod \"etcd-operator-b45778765-vjs4q\" (UID: \"dcf200aa-ed03-4e96-a638-568a891b276f\") " pod="openshift-etcd-operator/etcd-operator-b45778765-vjs4q" Oct 01 15:05:27 crc kubenswrapper[4869]: I1001 15:05:27.368020 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/9338ac98-e76c-4c74-af46-b646ab637e37-signing-cabundle\") pod \"service-ca-9c57cc56f-qd5ft\" (UID: \"9338ac98-e76c-4c74-af46-b646ab637e37\") " pod="openshift-service-ca/service-ca-9c57cc56f-qd5ft" Oct 01 15:05:27 crc kubenswrapper[4869]: I1001 15:05:27.369364 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"socket-dir\" (UniqueName: \"kubernetes.io/host-path/45ac9df8-4d47-44e7-a1bb-bd4d0b8fd560-socket-dir\") pod \"csi-hostpathplugin-9pwb5\" (UID: \"45ac9df8-4d47-44e7-a1bb-bd4d0b8fd560\") " pod="hostpath-provisioner/csi-hostpathplugin-9pwb5" Oct 01 15:05:27 crc kubenswrapper[4869]: I1001 15:05:27.373516 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registration-dir\" (UniqueName: \"kubernetes.io/host-path/45ac9df8-4d47-44e7-a1bb-bd4d0b8fd560-registration-dir\") pod \"csi-hostpathplugin-9pwb5\" (UID: \"45ac9df8-4d47-44e7-a1bb-bd4d0b8fd560\") " pod="hostpath-provisioner/csi-hostpathplugin-9pwb5" Oct 01 15:05:27 crc kubenswrapper[4869]: I1001 15:05:27.375939 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 01 15:05:27 crc kubenswrapper[4869]: I1001 15:05:27.376318 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 01 15:05:27 crc kubenswrapper[4869]: I1001 15:05:27.377209 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"images\" (UniqueName: \"kubernetes.io/configmap/42cd6256-ae7b-46c8-a910-0d8723fa9351-images\") pod \"machine-config-operator-74547568cd-hnx9p\" (UID: \"42cd6256-ae7b-46c8-a910-0d8723fa9351\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-hnx9p" Oct 01 15:05:27 crc kubenswrapper[4869]: I1001 15:05:27.377684 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/60f75bf5-448e-4770-8eba-26c271028f50-config-volume\") pod \"dns-default-nk6rv\" (UID: \"60f75bf5-448e-4770-8eba-26c271028f50\") " pod="openshift-dns/dns-default-nk6rv" Oct 01 15:05:27 crc kubenswrapper[4869]: I1001 15:05:27.384219 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/3968ac06-9cc3-4dc8-827f-aff3261f131f-mcc-auth-proxy-config\") pod \"machine-config-controller-84d6567774-bb9hc\" (UID: \"3968ac06-9cc3-4dc8-827f-aff3261f131f\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-bb9hc" Oct 01 15:05:27 crc kubenswrapper[4869]: I1001 15:05:27.384842 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/0e4f6d72-0c4a-40db-adc3-5b113c7bc499-cert\") pod \"ingress-canary-v6xf7\" (UID: \"0e4f6d72-0c4a-40db-adc3-5b113c7bc499\") " pod="openshift-ingress-canary/ingress-canary-v6xf7" Oct 01 15:05:27 crc kubenswrapper[4869]: I1001 15:05:27.388497 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/d8bb34c7-d9d3-466b-979a-f81019e3cc15-node-bootstrap-token\") pod \"machine-config-server-w5ngh\" (UID: \"d8bb34c7-d9d3-466b-979a-f81019e3cc15\") " pod="openshift-machine-config-operator/machine-config-server-w5ngh" Oct 01 15:05:27 crc kubenswrapper[4869]: I1001 15:05:27.389536 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"mountpoint-dir\" (UniqueName: \"kubernetes.io/host-path/45ac9df8-4d47-44e7-a1bb-bd4d0b8fd560-mountpoint-dir\") pod \"csi-hostpathplugin-9pwb5\" (UID: \"45ac9df8-4d47-44e7-a1bb-bd4d0b8fd560\") " pod="hostpath-provisioner/csi-hostpathplugin-9pwb5" Oct 01 15:05:27 crc kubenswrapper[4869]: I1001 15:05:27.392613 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-8mczm" Oct 01 15:05:27 crc kubenswrapper[4869]: I1001 15:05:27.394545 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/3968ac06-9cc3-4dc8-827f-aff3261f131f-proxy-tls\") pod \"machine-config-controller-84d6567774-bb9hc\" (UID: \"3968ac06-9cc3-4dc8-827f-aff3261f131f\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-bb9hc" Oct 01 15:05:27 crc kubenswrapper[4869]: I1001 15:05:27.399029 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/9ff567e5-6f6b-4b25-a8a0-3aa792f4291d-cni-sysctl-allowlist\") pod \"cni-sysctl-allowlist-ds-xffhm\" (UID: \"9ff567e5-6f6b-4b25-a8a0-3aa792f4291d\") " pod="openshift-multus/cni-sysctl-allowlist-ds-xffhm" Oct 01 15:05:27 crc kubenswrapper[4869]: I1001 15:05:27.404498 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/62ecb2b0-55b3-4344-8237-705ef292ef63-serving-cert\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-xpzff\" (UID: \"62ecb2b0-55b3-4344-8237-705ef292ef63\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-xpzff" Oct 01 15:05:27 crc kubenswrapper[4869]: I1001 15:05:27.406055 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/328e4a8e-5321-43fa-8b1a-6ff72a374b6d-webhook-cert\") pod \"packageserver-d55dfcdfc-wht8b\" (UID: \"328e4a8e-5321-43fa-8b1a-6ff72a374b6d\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-wht8b" Oct 01 15:05:27 crc kubenswrapper[4869]: I1001 15:05:27.416600 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mv4fq\" (UniqueName: \"kubernetes.io/projected/f4295e29-ac36-4236-b679-4cd87ea76347-kube-api-access-mv4fq\") pod \"openshift-controller-manager-operator-756b6f6bc6-jds44\" (UID: \"f4295e29-ac36-4236-b679-4cd87ea76347\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-jds44" Oct 01 15:05:27 crc kubenswrapper[4869]: I1001 15:05:27.416986 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/42cd6256-ae7b-46c8-a910-0d8723fa9351-proxy-tls\") pod \"machine-config-operator-74547568cd-hnx9p\" (UID: \"42cd6256-ae7b-46c8-a910-0d8723fa9351\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-hnx9p" Oct 01 15:05:27 crc kubenswrapper[4869]: I1001 15:05:27.417188 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/fa4dadea-9e0e-431f-b6a7-057e5cec4f9f-serving-cert\") pod \"service-ca-operator-777779d784-jlnv5\" (UID: \"fa4dadea-9e0e-431f-b6a7-057e5cec4f9f\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-jlnv5" Oct 01 15:05:27 crc kubenswrapper[4869]: I1001 15:05:27.417608 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/4c1dcd57-cee2-45ab-9a92-8cdd8b864f98-secret-volume\") pod \"collect-profiles-29322180-8fxq6\" (UID: \"4c1dcd57-cee2-45ab-9a92-8cdd8b864f98\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29322180-8fxq6" Oct 01 15:05:27 crc kubenswrapper[4869]: I1001 15:05:27.417749 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/dcf200aa-ed03-4e96-a638-568a891b276f-etcd-client\") pod \"etcd-operator-b45778765-vjs4q\" (UID: \"dcf200aa-ed03-4e96-a638-568a891b276f\") " pod="openshift-etcd-operator/etcd-operator-b45778765-vjs4q" Oct 01 15:05:27 crc kubenswrapper[4869]: I1001 15:05:27.418021 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/328e4a8e-5321-43fa-8b1a-6ff72a374b6d-apiservice-cert\") pod \"packageserver-d55dfcdfc-wht8b\" (UID: \"328e4a8e-5321-43fa-8b1a-6ff72a374b6d\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-wht8b" Oct 01 15:05:27 crc kubenswrapper[4869]: I1001 15:05:27.418889 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/a4dfc6a1-b623-46cb-b1ff-c6b809e8deaa-stats-auth\") pod \"router-default-5444994796-mdc2f\" (UID: \"a4dfc6a1-b623-46cb-b1ff-c6b809e8deaa\") " pod="openshift-ingress/router-default-5444994796-mdc2f" Oct 01 15:05:27 crc kubenswrapper[4869]: I1001 15:05:27.420833 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/a4dfc6a1-b623-46cb-b1ff-c6b809e8deaa-metrics-certs\") pod \"router-default-5444994796-mdc2f\" (UID: \"a4dfc6a1-b623-46cb-b1ff-c6b809e8deaa\") " pod="openshift-ingress/router-default-5444994796-mdc2f" Oct 01 15:05:27 crc kubenswrapper[4869]: I1001 15:05:27.424751 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/4ac58a76-114b-4e86-924c-ad1f269b36a8-serving-cert\") pod \"kube-storage-version-migrator-operator-b67b599dd-klj2b\" (UID: \"4ac58a76-114b-4e86-924c-ad1f269b36a8\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-klj2b" Oct 01 15:05:27 crc kubenswrapper[4869]: I1001 15:05:27.425271 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/60f75bf5-448e-4770-8eba-26c271028f50-metrics-tls\") pod \"dns-default-nk6rv\" (UID: \"60f75bf5-448e-4770-8eba-26c271028f50\") " pod="openshift-dns/dns-default-nk6rv" Oct 01 15:05:27 crc kubenswrapper[4869]: I1001 15:05:27.433232 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/906a4a45-c846-4d80-a3ec-10070aae888c-metrics-tls\") pod \"ingress-operator-5b745b69d9-gdgpp\" (UID: \"906a4a45-c846-4d80-a3ec-10070aae888c\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-gdgpp" Oct 01 15:05:27 crc kubenswrapper[4869]: I1001 15:05:27.433839 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 01 15:05:27 crc kubenswrapper[4869]: I1001 15:05:27.440247 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/de7935aa-128a-418f-bf83-2b9aabc146ed-package-server-manager-serving-cert\") pod \"package-server-manager-789f6589d5-hjpxg\" (UID: \"de7935aa-128a-418f-bf83-2b9aabc146ed\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-hjpxg" Oct 01 15:05:27 crc kubenswrapper[4869]: I1001 15:05:27.443760 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-f5ldd\" (UniqueName: \"kubernetes.io/projected/79b5f958-f252-4703-b785-05b0d01a6e72-kube-api-access-f5ldd\") pod \"image-registry-697d97f7c8-4zhjs\" (UID: \"79b5f958-f252-4703-b785-05b0d01a6e72\") " pod="openshift-image-registry/image-registry-697d97f7c8-4zhjs" Oct 01 15:05:27 crc kubenswrapper[4869]: I1001 15:05:27.449866 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 01 15:05:27 crc kubenswrapper[4869]: E1001 15:05:27.450318 4869 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-01 15:05:27.950301227 +0000 UTC m=+37.097144343 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 01 15:05:27 crc kubenswrapper[4869]: I1001 15:05:27.465869 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 01 15:05:27 crc kubenswrapper[4869]: I1001 15:05:27.472423 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lqgjg\" (UniqueName: \"kubernetes.io/projected/bd217fac-bac4-44af-8942-0385d47f21d2-kube-api-access-lqgjg\") pod \"multus-admission-controller-857f4d67dd-tgtxf\" (UID: \"bd217fac-bac4-44af-8942-0385d47f21d2\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-tgtxf" Oct 01 15:05:27 crc kubenswrapper[4869]: I1001 15:05:27.477203 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 01 15:05:27 crc kubenswrapper[4869]: I1001 15:05:27.481186 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cvl5m\" (UniqueName: \"kubernetes.io/projected/4636576a-d3da-4491-a146-a6ffe6382a06-kube-api-access-cvl5m\") pod \"console-f9d7485db-jv4xs\" (UID: \"4636576a-d3da-4491-a146-a6ffe6382a06\") " pod="openshift-console/console-f9d7485db-jv4xs" Oct 01 15:05:27 crc kubenswrapper[4869]: I1001 15:05:27.488798 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/downloads-7954f5f757-dxcl2"] Oct 01 15:05:27 crc kubenswrapper[4869]: I1001 15:05:27.491679 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/79b5f958-f252-4703-b785-05b0d01a6e72-bound-sa-token\") pod \"image-registry-697d97f7c8-4zhjs\" (UID: \"79b5f958-f252-4703-b785-05b0d01a6e72\") " pod="openshift-image-registry/image-registry-697d97f7c8-4zhjs" Oct 01 15:05:27 crc kubenswrapper[4869]: I1001 15:05:27.494903 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"certs\" (UniqueName: \"kubernetes.io/secret/d8bb34c7-d9d3-466b-979a-f81019e3cc15-certs\") pod \"machine-config-server-w5ngh\" (UID: \"d8bb34c7-d9d3-466b-979a-f81019e3cc15\") " pod="openshift-machine-config-operator/machine-config-server-w5ngh" Oct 01 15:05:27 crc kubenswrapper[4869]: I1001 15:05:27.499949 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-5zhxq"] Oct 01 15:05:27 crc kubenswrapper[4869]: I1001 15:05:27.501866 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-74hd6\" (UniqueName: \"kubernetes.io/projected/4a5e3470-0f08-44e6-8d4c-f968950c75f2-kube-api-access-74hd6\") pod \"dns-operator-744455d44c-87546\" (UID: \"4a5e3470-0f08-44e6-8d4c-f968950c75f2\") " pod="openshift-dns-operator/dns-operator-744455d44c-87546" Oct 01 15:05:27 crc kubenswrapper[4869]: I1001 15:05:27.509156 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2cn9d\" (UniqueName: \"kubernetes.io/projected/0e4f6d72-0c4a-40db-adc3-5b113c7bc499-kube-api-access-2cn9d\") pod \"ingress-canary-v6xf7\" (UID: \"0e4f6d72-0c4a-40db-adc3-5b113c7bc499\") " pod="openshift-ingress-canary/ingress-canary-v6xf7" Oct 01 15:05:27 crc kubenswrapper[4869]: I1001 15:05:27.509859 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console-operator/console-operator-58897d9998-vg26t"] Oct 01 15:05:27 crc kubenswrapper[4869]: I1001 15:05:27.521009 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mbwsq\" (UniqueName: \"kubernetes.io/projected/d8bb34c7-d9d3-466b-979a-f81019e3cc15-kube-api-access-mbwsq\") pod \"machine-config-server-w5ngh\" (UID: \"d8bb34c7-d9d3-466b-979a-f81019e3cc15\") " pod="openshift-machine-config-operator/machine-config-server-w5ngh" Oct 01 15:05:27 crc kubenswrapper[4869]: I1001 15:05:27.532221 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-server-w5ngh" Oct 01 15:05:27 crc kubenswrapper[4869]: I1001 15:05:27.551383 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4jrmc\" (UniqueName: \"kubernetes.io/projected/a4dfc6a1-b623-46cb-b1ff-c6b809e8deaa-kube-api-access-4jrmc\") pod \"router-default-5444994796-mdc2f\" (UID: \"a4dfc6a1-b623-46cb-b1ff-c6b809e8deaa\") " pod="openshift-ingress/router-default-5444994796-mdc2f" Oct 01 15:05:27 crc kubenswrapper[4869]: I1001 15:05:27.552109 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-4zhjs\" (UID: \"79b5f958-f252-4703-b785-05b0d01a6e72\") " pod="openshift-image-registry/image-registry-697d97f7c8-4zhjs" Oct 01 15:05:27 crc kubenswrapper[4869]: E1001 15:05:27.552727 4869 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-01 15:05:28.052696595 +0000 UTC m=+37.199539711 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-4zhjs" (UID: "79b5f958-f252-4703-b785-05b0d01a6e72") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 01 15:05:27 crc kubenswrapper[4869]: I1001 15:05:27.566642 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zbhz5\" (UniqueName: \"kubernetes.io/projected/9338ac98-e76c-4c74-af46-b646ab637e37-kube-api-access-zbhz5\") pod \"service-ca-9c57cc56f-qd5ft\" (UID: \"9338ac98-e76c-4c74-af46-b646ab637e37\") " pod="openshift-service-ca/service-ca-9c57cc56f-qd5ft" Oct 01 15:05:27 crc kubenswrapper[4869]: I1001 15:05:27.581158 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-canary/ingress-canary-v6xf7" Oct 01 15:05:27 crc kubenswrapper[4869]: I1001 15:05:27.582035 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-f2n6f\" (UniqueName: \"kubernetes.io/projected/906a4a45-c846-4d80-a3ec-10070aae888c-kube-api-access-f2n6f\") pod \"ingress-operator-5b745b69d9-gdgpp\" (UID: \"906a4a45-c846-4d80-a3ec-10070aae888c\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-gdgpp" Oct 01 15:05:27 crc kubenswrapper[4869]: W1001 15:05:27.591210 4869 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod2d8f8fae_d727_4763_bb02_0a74320ba8c4.slice/crio-0ff1295bcd56833ba3c677ae374039f37d5c81519a6c88c042000556ae430739 WatchSource:0}: Error finding container 0ff1295bcd56833ba3c677ae374039f37d5c81519a6c88c042000556ae430739: Status 404 returned error can't find the container with id 0ff1295bcd56833ba3c677ae374039f37d5c81519a6c88c042000556ae430739 Oct 01 15:05:27 crc kubenswrapper[4869]: W1001 15:05:27.594394 4869 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod3635d59c_a52d_465d_8402_80153cd7369b.slice/crio-bb40840dc360e6b13928e139ca1cf6a8e3205b063ad217173aa662a7f34a1e4f WatchSource:0}: Error finding container bb40840dc360e6b13928e139ca1cf6a8e3205b063ad217173aa662a7f34a1e4f: Status 404 returned error can't find the container with id bb40840dc360e6b13928e139ca1cf6a8e3205b063ad217173aa662a7f34a1e4f Oct 01 15:05:27 crc kubenswrapper[4869]: I1001 15:05:27.605542 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-config-operator/openshift-config-operator-7777fb866f-gqrbk" Oct 01 15:05:27 crc kubenswrapper[4869]: I1001 15:05:27.613066 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gj95g\" (UniqueName: \"kubernetes.io/projected/42cd6256-ae7b-46c8-a910-0d8723fa9351-kube-api-access-gj95g\") pod \"machine-config-operator-74547568cd-hnx9p\" (UID: \"42cd6256-ae7b-46c8-a910-0d8723fa9351\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-hnx9p" Oct 01 15:05:27 crc kubenswrapper[4869]: I1001 15:05:27.613471 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-l5vnp" Oct 01 15:05:27 crc kubenswrapper[4869]: I1001 15:05:27.626538 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-f9d7485db-jv4xs" Oct 01 15:05:27 crc kubenswrapper[4869]: I1001 15:05:27.626768 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qnknb\" (UniqueName: \"kubernetes.io/projected/328e4a8e-5321-43fa-8b1a-6ff72a374b6d-kube-api-access-qnknb\") pod \"packageserver-d55dfcdfc-wht8b\" (UID: \"328e4a8e-5321-43fa-8b1a-6ff72a374b6d\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-wht8b" Oct 01 15:05:27 crc kubenswrapper[4869]: I1001 15:05:27.631796 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns-operator/dns-operator-744455d44c-87546" Oct 01 15:05:27 crc kubenswrapper[4869]: I1001 15:05:27.648778 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-jds44" Oct 01 15:05:27 crc kubenswrapper[4869]: I1001 15:05:27.653519 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 01 15:05:27 crc kubenswrapper[4869]: E1001 15:05:27.653645 4869 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-01 15:05:28.153625753 +0000 UTC m=+37.300468869 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 01 15:05:27 crc kubenswrapper[4869]: I1001 15:05:27.653847 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-4zhjs\" (UID: \"79b5f958-f252-4703-b785-05b0d01a6e72\") " pod="openshift-image-registry/image-registry-697d97f7c8-4zhjs" Oct 01 15:05:27 crc kubenswrapper[4869]: E1001 15:05:27.654107 4869 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-01 15:05:28.154099306 +0000 UTC m=+37.300942412 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-4zhjs" (UID: "79b5f958-f252-4703-b785-05b0d01a6e72") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 01 15:05:27 crc kubenswrapper[4869]: I1001 15:05:27.654511 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-89r4r\" (UniqueName: \"kubernetes.io/projected/3968ac06-9cc3-4dc8-827f-aff3261f131f-kube-api-access-89r4r\") pod \"machine-config-controller-84d6567774-bb9hc\" (UID: \"3968ac06-9cc3-4dc8-827f-aff3261f131f\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-bb9hc" Oct 01 15:05:27 crc kubenswrapper[4869]: I1001 15:05:27.664141 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vpnfr\" (UniqueName: \"kubernetes.io/projected/4c1dcd57-cee2-45ab-9a92-8cdd8b864f98-kube-api-access-vpnfr\") pod \"collect-profiles-29322180-8fxq6\" (UID: \"4c1dcd57-cee2-45ab-9a92-8cdd8b864f98\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29322180-8fxq6" Oct 01 15:05:27 crc kubenswrapper[4869]: I1001 15:05:27.688739 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-w7hq4\" (UniqueName: \"kubernetes.io/projected/de7935aa-128a-418f-bf83-2b9aabc146ed-kube-api-access-w7hq4\") pod \"package-server-manager-789f6589d5-hjpxg\" (UID: \"de7935aa-128a-418f-bf83-2b9aabc146ed\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-hjpxg" Oct 01 15:05:27 crc kubenswrapper[4869]: I1001 15:05:27.692273 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-admission-controller-857f4d67dd-tgtxf" Oct 01 15:05:27 crc kubenswrapper[4869]: I1001 15:05:27.698746 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29322180-8fxq6" Oct 01 15:05:27 crc kubenswrapper[4869]: I1001 15:05:27.699828 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/62ecb2b0-55b3-4344-8237-705ef292ef63-kube-api-access\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-xpzff\" (UID: \"62ecb2b0-55b3-4344-8237-705ef292ef63\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-xpzff" Oct 01 15:05:27 crc kubenswrapper[4869]: I1001 15:05:27.718000 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-hjpxg" Oct 01 15:05:27 crc kubenswrapper[4869]: I1001 15:05:27.720756 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sk5qg\" (UniqueName: \"kubernetes.io/projected/2d195bae-5172-4387-869b-086f215963ff-kube-api-access-sk5qg\") pod \"marketplace-operator-79b997595-t5kll\" (UID: \"2d195bae-5172-4387-869b-086f215963ff\") " pod="openshift-marketplace/marketplace-operator-79b997595-t5kll" Oct 01 15:05:27 crc kubenswrapper[4869]: I1001 15:05:27.728804 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-xpzff" Oct 01 15:05:27 crc kubenswrapper[4869]: I1001 15:05:27.748121 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-bb9hc" Oct 01 15:05:27 crc kubenswrapper[4869]: I1001 15:05:27.754560 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 01 15:05:27 crc kubenswrapper[4869]: E1001 15:05:27.754869 4869 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-01 15:05:28.254852869 +0000 UTC m=+37.401695985 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 01 15:05:27 crc kubenswrapper[4869]: I1001 15:05:27.774643 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress/router-default-5444994796-mdc2f" Oct 01 15:05:27 crc kubenswrapper[4869]: I1001 15:05:27.778609 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9m2kq\" (UniqueName: \"kubernetes.io/projected/dcf200aa-ed03-4e96-a638-568a891b276f-kube-api-access-9m2kq\") pod \"etcd-operator-b45778765-vjs4q\" (UID: \"dcf200aa-ed03-4e96-a638-568a891b276f\") " pod="openshift-etcd-operator/etcd-operator-b45778765-vjs4q" Oct 01 15:05:27 crc kubenswrapper[4869]: I1001 15:05:27.779819 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-vkxf8"] Oct 01 15:05:27 crc kubenswrapper[4869]: I1001 15:05:27.791170 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nntzm\" (UniqueName: \"kubernetes.io/projected/5067e051-5c99-4fe9-b4f2-e5dfa8b1c9b4-kube-api-access-nntzm\") pod \"migrator-59844c95c7-zb5c7\" (UID: \"5067e051-5c99-4fe9-b4f2-e5dfa8b1c9b4\") " pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-zb5c7" Oct 01 15:05:27 crc kubenswrapper[4869]: I1001 15:05:27.791811 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-hnx9p" Oct 01 15:05:27 crc kubenswrapper[4869]: I1001 15:05:27.806540 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/906a4a45-c846-4d80-a3ec-10070aae888c-bound-sa-token\") pod \"ingress-operator-5b745b69d9-gdgpp\" (UID: \"906a4a45-c846-4d80-a3ec-10070aae888c\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-gdgpp" Oct 01 15:05:27 crc kubenswrapper[4869]: I1001 15:05:27.806845 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-wht8b" Oct 01 15:05:27 crc kubenswrapper[4869]: I1001 15:05:27.823788 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca/service-ca-9c57cc56f-qd5ft" Oct 01 15:05:27 crc kubenswrapper[4869]: I1001 15:05:27.826318 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9qxfd\" (UniqueName: \"kubernetes.io/projected/60f75bf5-448e-4770-8eba-26c271028f50-kube-api-access-9qxfd\") pod \"dns-default-nk6rv\" (UID: \"60f75bf5-448e-4770-8eba-26c271028f50\") " pod="openshift-dns/dns-default-nk6rv" Oct 01 15:05:27 crc kubenswrapper[4869]: I1001 15:05:27.842994 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gltw6\" (UniqueName: \"kubernetes.io/projected/9ff567e5-6f6b-4b25-a8a0-3aa792f4291d-kube-api-access-gltw6\") pod \"cni-sysctl-allowlist-ds-xffhm\" (UID: \"9ff567e5-6f6b-4b25-a8a0-3aa792f4291d\") " pod="openshift-multus/cni-sysctl-allowlist-ds-xffhm" Oct 01 15:05:27 crc kubenswrapper[4869]: I1001 15:05:27.856772 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-4zhjs\" (UID: \"79b5f958-f252-4703-b785-05b0d01a6e72\") " pod="openshift-image-registry/image-registry-697d97f7c8-4zhjs" Oct 01 15:05:27 crc kubenswrapper[4869]: E1001 15:05:27.857117 4869 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-01 15:05:28.357104383 +0000 UTC m=+37.503947499 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-4zhjs" (UID: "79b5f958-f252-4703-b785-05b0d01a6e72") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 01 15:05:27 crc kubenswrapper[4869]: I1001 15:05:27.860563 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vzrkc\" (UniqueName: \"kubernetes.io/projected/fa4dadea-9e0e-431f-b6a7-057e5cec4f9f-kube-api-access-vzrkc\") pod \"service-ca-operator-777779d784-jlnv5\" (UID: \"fa4dadea-9e0e-431f-b6a7-057e5cec4f9f\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-jlnv5" Oct 01 15:05:27 crc kubenswrapper[4869]: I1001 15:05:27.870457 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/catalog-operator-68c6474976-qp4hv"] Oct 01 15:05:27 crc kubenswrapper[4869]: I1001 15:05:27.882783 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/dns-default-nk6rv" Oct 01 15:05:27 crc kubenswrapper[4869]: I1001 15:05:27.886380 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/cni-sysctl-allowlist-ds-xffhm" Oct 01 15:05:27 crc kubenswrapper[4869]: I1001 15:05:27.930308 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jnvcn\" (UniqueName: \"kubernetes.io/projected/4ac58a76-114b-4e86-924c-ad1f269b36a8-kube-api-access-jnvcn\") pod \"kube-storage-version-migrator-operator-b67b599dd-klj2b\" (UID: \"4ac58a76-114b-4e86-924c-ad1f269b36a8\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-klj2b" Oct 01 15:05:27 crc kubenswrapper[4869]: I1001 15:05:27.931042 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-j7g7t"] Oct 01 15:05:27 crc kubenswrapper[4869]: I1001 15:05:27.934697 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ht69c\" (UniqueName: \"kubernetes.io/projected/45ac9df8-4d47-44e7-a1bb-bd4d0b8fd560-kube-api-access-ht69c\") pod \"csi-hostpathplugin-9pwb5\" (UID: \"45ac9df8-4d47-44e7-a1bb-bd4d0b8fd560\") " pod="hostpath-provisioner/csi-hostpathplugin-9pwb5" Oct 01 15:05:27 crc kubenswrapper[4869]: I1001 15:05:27.962685 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 01 15:05:27 crc kubenswrapper[4869]: E1001 15:05:27.969153 4869 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-01 15:05:28.469105607 +0000 UTC m=+37.615948723 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 01 15:05:27 crc kubenswrapper[4869]: I1001 15:05:27.972627 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-server-w5ngh" event={"ID":"d8bb34c7-d9d3-466b-979a-f81019e3cc15","Type":"ContainerStarted","Data":"2741ca29e9ebda85c864348781d99b5f42d8ea27b4718665903de19a6cad908e"} Oct 01 15:05:27 crc kubenswrapper[4869]: W1001 15:05:27.994272 4869 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod0a95b64f_5e82_446c_8369_7304b2c6ec5d.slice/crio-fc2287299c2172da93312d7b6f57b0132b3660a87a0f315cf97bd8dcdcf22cb5 WatchSource:0}: Error finding container fc2287299c2172da93312d7b6f57b0132b3660a87a0f315cf97bd8dcdcf22cb5: Status 404 returned error can't find the container with id fc2287299c2172da93312d7b6f57b0132b3660a87a0f315cf97bd8dcdcf22cb5 Oct 01 15:05:28 crc kubenswrapper[4869]: I1001 15:05:28.008578 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-t5kll" Oct 01 15:05:28 crc kubenswrapper[4869]: I1001 15:05:28.010885 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-sm2sz"] Oct 01 15:05:28 crc kubenswrapper[4869]: I1001 15:05:28.011364 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-sj2cx"] Oct 01 15:05:28 crc kubenswrapper[4869]: I1001 15:05:28.016210 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console-operator/console-operator-58897d9998-vg26t" event={"ID":"3157872f-8582-4335-af4d-18e4ab38d91c","Type":"ContainerStarted","Data":"52389967bc6543d3e954aecfb7dab1ddbeecdeb40cca3975e8b5af3e33261d71"} Oct 01 15:05:28 crc kubenswrapper[4869]: I1001 15:05:28.019608 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-zb5c7" Oct 01 15:05:28 crc kubenswrapper[4869]: I1001 15:05:28.031820 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/downloads-7954f5f757-dxcl2" event={"ID":"2d8f8fae-d727-4763-bb02-0a74320ba8c4","Type":"ContainerStarted","Data":"0ff1295bcd56833ba3c677ae374039f37d5c81519a6c88c042000556ae430739"} Oct 01 15:05:28 crc kubenswrapper[4869]: I1001 15:05:28.042648 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd-operator/etcd-operator-b45778765-vjs4q" Oct 01 15:05:28 crc kubenswrapper[4869]: I1001 15:05:28.066570 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-klj2b" Oct 01 15:05:28 crc kubenswrapper[4869]: I1001 15:05:28.067068 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-4zhjs\" (UID: \"79b5f958-f252-4703-b785-05b0d01a6e72\") " pod="openshift-image-registry/image-registry-697d97f7c8-4zhjs" Oct 01 15:05:28 crc kubenswrapper[4869]: E1001 15:05:28.067460 4869 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-01 15:05:28.567446106 +0000 UTC m=+37.714289222 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-4zhjs" (UID: "79b5f958-f252-4703-b785-05b0d01a6e72") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 01 15:05:28 crc kubenswrapper[4869]: I1001 15:05:28.074555 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-5zhxq" event={"ID":"3635d59c-a52d-465d-8402-80153cd7369b","Type":"ContainerStarted","Data":"bb40840dc360e6b13928e139ca1cf6a8e3205b063ad217173aa662a7f34a1e4f"} Oct 01 15:05:28 crc kubenswrapper[4869]: I1001 15:05:28.082277 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-gdgpp" Oct 01 15:05:28 crc kubenswrapper[4869]: I1001 15:05:28.110323 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-pdvwx" event={"ID":"e3343b45-e092-46ef-9e1b-6d4d55167c19","Type":"ContainerStarted","Data":"693f1bad7baab144fcc74aba61f6e7daba932e53c1a4217b058ca003a7aef190"} Oct 01 15:05:28 crc kubenswrapper[4869]: I1001 15:05:28.113986 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca-operator/service-ca-operator-777779d784-jlnv5" Oct 01 15:05:28 crc kubenswrapper[4869]: I1001 15:05:28.137154 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-hxvsn" event={"ID":"2424fe66-6e4a-4dc6-b00e-c5a1b01e3f4a","Type":"ContainerStarted","Data":"f378b828733aadb5ef6597d2833a1c8de787bec9cdc858431129f39257486d36"} Oct 01 15:05:28 crc kubenswrapper[4869]: I1001 15:05:28.153769 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-controller-manager/controller-manager-879f6c89f-s8l4m" Oct 01 15:05:28 crc kubenswrapper[4869]: I1001 15:05:28.156762 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="hostpath-provisioner/csi-hostpathplugin-9pwb5" Oct 01 15:05:28 crc kubenswrapper[4869]: I1001 15:05:28.170904 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 01 15:05:28 crc kubenswrapper[4869]: E1001 15:05:28.171564 4869 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-01 15:05:28.671507648 +0000 UTC m=+37.818350764 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 01 15:05:28 crc kubenswrapper[4869]: I1001 15:05:28.171647 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-4zhjs\" (UID: \"79b5f958-f252-4703-b785-05b0d01a6e72\") " pod="openshift-image-registry/image-registry-697d97f7c8-4zhjs" Oct 01 15:05:28 crc kubenswrapper[4869]: E1001 15:05:28.172416 4869 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-01 15:05:28.672408812 +0000 UTC m=+37.819251918 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-4zhjs" (UID: "79b5f958-f252-4703-b785-05b0d01a6e72") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 01 15:05:28 crc kubenswrapper[4869]: I1001 15:05:28.272914 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 01 15:05:28 crc kubenswrapper[4869]: E1001 15:05:28.273121 4869 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-01 15:05:28.773085464 +0000 UTC m=+37.919928580 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 01 15:05:28 crc kubenswrapper[4869]: I1001 15:05:28.273599 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-4zhjs\" (UID: \"79b5f958-f252-4703-b785-05b0d01a6e72\") " pod="openshift-image-registry/image-registry-697d97f7c8-4zhjs" Oct 01 15:05:28 crc kubenswrapper[4869]: E1001 15:05:28.279145 4869 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-01 15:05:28.779129625 +0000 UTC m=+37.925972741 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-4zhjs" (UID: "79b5f958-f252-4703-b785-05b0d01a6e72") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 01 15:05:28 crc kubenswrapper[4869]: I1001 15:05:28.320279 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-canary/ingress-canary-v6xf7"] Oct 01 15:05:28 crc kubenswrapper[4869]: I1001 15:05:28.359755 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-8mczm"] Oct 01 15:05:28 crc kubenswrapper[4869]: I1001 15:05:28.375333 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 01 15:05:28 crc kubenswrapper[4869]: E1001 15:05:28.375611 4869 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-01 15:05:28.875580734 +0000 UTC m=+38.022423850 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 01 15:05:28 crc kubenswrapper[4869]: I1001 15:05:28.429042 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-kv7s4" podStartSLOduration=17.429020982 podStartE2EDuration="17.429020982s" podCreationTimestamp="2025-10-01 15:05:11 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-01 15:05:28.419412935 +0000 UTC m=+37.566256061" watchObservedRunningTime="2025-10-01 15:05:28.429020982 +0000 UTC m=+37.575864098" Oct 01 15:05:28 crc kubenswrapper[4869]: I1001 15:05:28.477832 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-4zhjs\" (UID: \"79b5f958-f252-4703-b785-05b0d01a6e72\") " pod="openshift-image-registry/image-registry-697d97f7c8-4zhjs" Oct 01 15:05:28 crc kubenswrapper[4869]: E1001 15:05:28.478243 4869 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-01 15:05:28.978227408 +0000 UTC m=+38.125070524 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-4zhjs" (UID: "79b5f958-f252-4703-b785-05b0d01a6e72") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 01 15:05:28 crc kubenswrapper[4869]: I1001 15:05:28.552650 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-pdvwx" podStartSLOduration=16.552631507 podStartE2EDuration="16.552631507s" podCreationTimestamp="2025-10-01 15:05:12 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-01 15:05:28.522220174 +0000 UTC m=+37.669063290" watchObservedRunningTime="2025-10-01 15:05:28.552631507 +0000 UTC m=+37.699474623" Oct 01 15:05:28 crc kubenswrapper[4869]: I1001 15:05:28.579749 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 01 15:05:28 crc kubenswrapper[4869]: E1001 15:05:28.580022 4869 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-01 15:05:29.080004269 +0000 UTC m=+38.226847385 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 01 15:05:28 crc kubenswrapper[4869]: I1001 15:05:28.663333 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-xjqf4" podStartSLOduration=17.663314976 podStartE2EDuration="17.663314976s" podCreationTimestamp="2025-10-01 15:05:11 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-01 15:05:28.643924678 +0000 UTC m=+37.790767794" watchObservedRunningTime="2025-10-01 15:05:28.663314976 +0000 UTC m=+37.810158092" Oct 01 15:05:28 crc kubenswrapper[4869]: I1001 15:05:28.681059 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-4zhjs\" (UID: \"79b5f958-f252-4703-b785-05b0d01a6e72\") " pod="openshift-image-registry/image-registry-697d97f7c8-4zhjs" Oct 01 15:05:28 crc kubenswrapper[4869]: E1001 15:05:28.681441 4869 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-01 15:05:29.18142993 +0000 UTC m=+38.328273046 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-4zhjs" (UID: "79b5f958-f252-4703-b785-05b0d01a6e72") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 01 15:05:28 crc kubenswrapper[4869]: I1001 15:05:28.783706 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 01 15:05:28 crc kubenswrapper[4869]: E1001 15:05:28.787418 4869 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-01 15:05:29.287403732 +0000 UTC m=+38.434246848 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 01 15:05:28 crc kubenswrapper[4869]: I1001 15:05:28.813670 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-config-operator/openshift-config-operator-7777fb866f-gqrbk"] Oct 01 15:05:28 crc kubenswrapper[4869]: I1001 15:05:28.888641 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-4zhjs\" (UID: \"79b5f958-f252-4703-b785-05b0d01a6e72\") " pod="openshift-image-registry/image-registry-697d97f7c8-4zhjs" Oct 01 15:05:28 crc kubenswrapper[4869]: E1001 15:05:28.888992 4869 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-01 15:05:29.388978028 +0000 UTC m=+38.535821134 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-4zhjs" (UID: "79b5f958-f252-4703-b785-05b0d01a6e72") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 01 15:05:28 crc kubenswrapper[4869]: I1001 15:05:28.989151 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 01 15:05:28 crc kubenswrapper[4869]: E1001 15:05:28.989595 4869 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-01 15:05:29.489582457 +0000 UTC m=+38.636425573 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 01 15:05:28 crc kubenswrapper[4869]: I1001 15:05:28.989779 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-4zhjs\" (UID: \"79b5f958-f252-4703-b785-05b0d01a6e72\") " pod="openshift-image-registry/image-registry-697d97f7c8-4zhjs" Oct 01 15:05:28 crc kubenswrapper[4869]: E1001 15:05:28.990081 4869 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-01 15:05:29.49007414 +0000 UTC m=+38.636917256 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-4zhjs" (UID: "79b5f958-f252-4703-b785-05b0d01a6e72") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 01 15:05:29 crc kubenswrapper[4869]: I1001 15:05:29.091847 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 01 15:05:29 crc kubenswrapper[4869]: E1001 15:05:29.092139 4869 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-01 15:05:29.592120899 +0000 UTC m=+38.738964015 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 01 15:05:29 crc kubenswrapper[4869]: I1001 15:05:29.100713 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-4zhjs\" (UID: \"79b5f958-f252-4703-b785-05b0d01a6e72\") " pod="openshift-image-registry/image-registry-697d97f7c8-4zhjs" Oct 01 15:05:29 crc kubenswrapper[4869]: E1001 15:05:29.101079 4869 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-01 15:05:29.601067178 +0000 UTC m=+38.747910294 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-4zhjs" (UID: "79b5f958-f252-4703-b785-05b0d01a6e72") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 01 15:05:29 crc kubenswrapper[4869]: I1001 15:05:29.158029 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-j7g7t" event={"ID":"8ffaf4d1-4b33-48a0-ae56-4b116924f58a","Type":"ContainerStarted","Data":"c1931b5afd3e9bec3958e6855745ad399415aea896254c435c0c7005ebbc1185"} Oct 01 15:05:29 crc kubenswrapper[4869]: I1001 15:05:29.158093 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-j7g7t" event={"ID":"8ffaf4d1-4b33-48a0-ae56-4b116924f58a","Type":"ContainerStarted","Data":"dfda2d3a66a3362e41c3673754f745f65c86f7a6e25329adb918100d32382a55"} Oct 01 15:05:29 crc kubenswrapper[4869]: I1001 15:05:29.192152 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-vkxf8" event={"ID":"2ec0b99f-cb20-410b-8142-a3d046ed6578","Type":"ContainerStarted","Data":"bc93115bfbe9037bde611f43f54e52d80a06d049f08390e6ead5bbd5f5a284fe"} Oct 01 15:05:29 crc kubenswrapper[4869]: I1001 15:05:29.192207 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-vkxf8" event={"ID":"2ec0b99f-cb20-410b-8142-a3d046ed6578","Type":"ContainerStarted","Data":"b178acf8b888cd545e3bc943c2a406934e4f4958df3f84f92226c9f409610045"} Oct 01 15:05:29 crc kubenswrapper[4869]: I1001 15:05:29.210692 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 01 15:05:29 crc kubenswrapper[4869]: E1001 15:05:29.211357 4869 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-01 15:05:29.711335106 +0000 UTC m=+38.858178212 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 01 15:05:29 crc kubenswrapper[4869]: I1001 15:05:29.220312 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-authentication-operator/authentication-operator-69f744f599-jzt8h" podStartSLOduration=18.220290175 podStartE2EDuration="18.220290175s" podCreationTimestamp="2025-10-01 15:05:11 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-01 15:05:29.134169853 +0000 UTC m=+38.281012969" watchObservedRunningTime="2025-10-01 15:05:29.220290175 +0000 UTC m=+38.367133291" Oct 01 15:05:29 crc kubenswrapper[4869]: I1001 15:05:29.231539 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-sm2sz" event={"ID":"58c072ca-9e0a-418d-90ba-ea33213d42c7","Type":"ContainerStarted","Data":"fea55893bda1c6e2eede259f3c4e9f8206c2028fc987f1bdbb382f94314c34c9"} Oct 01 15:05:29 crc kubenswrapper[4869]: I1001 15:05:29.290530 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/downloads-7954f5f757-dxcl2" event={"ID":"2d8f8fae-d727-4763-bb02-0a74320ba8c4","Type":"ContainerStarted","Data":"56803e6614f74178c6570417a9029e046bdf6cc63c9725998f635f5ba02168fb"} Oct 01 15:05:29 crc kubenswrapper[4869]: I1001 15:05:29.301242 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console/downloads-7954f5f757-dxcl2" Oct 01 15:05:29 crc kubenswrapper[4869]: I1001 15:05:29.312465 4869 patch_prober.go:28] interesting pod/downloads-7954f5f757-dxcl2 container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.13:8080/\": dial tcp 10.217.0.13:8080: connect: connection refused" start-of-body= Oct 01 15:05:29 crc kubenswrapper[4869]: I1001 15:05:29.312532 4869 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-dxcl2" podUID="2d8f8fae-d727-4763-bb02-0a74320ba8c4" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.13:8080/\": dial tcp 10.217.0.13:8080: connect: connection refused" Oct 01 15:05:29 crc kubenswrapper[4869]: I1001 15:05:29.313501 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-config-operator/openshift-config-operator-7777fb866f-gqrbk" event={"ID":"2f6f6869-bd98-4c21-a184-a3b7bd5f0b5b","Type":"ContainerStarted","Data":"7c285eb881380182b2ba622157ea5ceb25b2ebf4a44e37aef3eedba98f81e6b2"} Oct 01 15:05:29 crc kubenswrapper[4869]: I1001 15:05:29.314121 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-4zhjs\" (UID: \"79b5f958-f252-4703-b785-05b0d01a6e72\") " pod="openshift-image-registry/image-registry-697d97f7c8-4zhjs" Oct 01 15:05:29 crc kubenswrapper[4869]: E1001 15:05:29.315652 4869 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-01 15:05:29.815637574 +0000 UTC m=+38.962480690 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-4zhjs" (UID: "79b5f958-f252-4703-b785-05b0d01a6e72") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 01 15:05:29 crc kubenswrapper[4869]: I1001 15:05:29.334811 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-server-w5ngh" event={"ID":"d8bb34c7-d9d3-466b-979a-f81019e3cc15","Type":"ContainerStarted","Data":"a5935f9a71324d2e6887cb6fef2dfb0f0209b527350ef459270c9bd8e10653c7"} Oct 01 15:05:29 crc kubenswrapper[4869]: I1001 15:05:29.344396 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" event={"ID":"9d751cbb-f2e2-430d-9754-c882a5e924a5","Type":"ContainerStarted","Data":"23d7ba1619936e2ac6c1ad3df994c90672aa97491fa5dbff19c5651b2f171cb0"} Oct 01 15:05:29 crc kubenswrapper[4869]: I1001 15:05:29.346864 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/cni-sysctl-allowlist-ds-xffhm" event={"ID":"9ff567e5-6f6b-4b25-a8a0-3aa792f4291d","Type":"ContainerStarted","Data":"2cc02681b4c213008c8b3fc78fd27a872c10a9029e60ebbb3680e6c52757aae4"} Oct 01 15:05:29 crc kubenswrapper[4869]: I1001 15:05:29.349913 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-qp4hv" event={"ID":"0a95b64f-5e82-446c-8369-7304b2c6ec5d","Type":"ContainerStarted","Data":"0a6930a543d896c894cf634822e000ca62593a41eb77033df8db1b0ef79feb1a"} Oct 01 15:05:29 crc kubenswrapper[4869]: I1001 15:05:29.349937 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-qp4hv" event={"ID":"0a95b64f-5e82-446c-8369-7304b2c6ec5d","Type":"ContainerStarted","Data":"fc2287299c2172da93312d7b6f57b0132b3660a87a0f315cf97bd8dcdcf22cb5"} Oct 01 15:05:29 crc kubenswrapper[4869]: I1001 15:05:29.349951 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-qp4hv" Oct 01 15:05:29 crc kubenswrapper[4869]: I1001 15:05:29.369848 4869 patch_prober.go:28] interesting pod/catalog-operator-68c6474976-qp4hv container/catalog-operator namespace/openshift-operator-lifecycle-manager: Readiness probe status=failure output="Get \"https://10.217.0.18:8443/healthz\": dial tcp 10.217.0.18:8443: connect: connection refused" start-of-body= Oct 01 15:05:29 crc kubenswrapper[4869]: I1001 15:05:29.369986 4869 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-qp4hv" podUID="0a95b64f-5e82-446c-8369-7304b2c6ec5d" containerName="catalog-operator" probeResult="failure" output="Get \"https://10.217.0.18:8443/healthz\": dial tcp 10.217.0.18:8443: connect: connection refused" Oct 01 15:05:29 crc kubenswrapper[4869]: I1001 15:05:29.370056 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" event={"ID":"3b6479f0-333b-4a96-9adf-2099afdc2447","Type":"ContainerStarted","Data":"9704997f0de88939e85f3788b32e3680b611ba2308c74cdf989a160031aa919f"} Oct 01 15:05:29 crc kubenswrapper[4869]: I1001 15:05:29.383329 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-fx7tg" podStartSLOduration=18.383304263 podStartE2EDuration="18.383304263s" podCreationTimestamp="2025-10-01 15:05:11 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-01 15:05:29.370452409 +0000 UTC m=+38.517295555" watchObservedRunningTime="2025-10-01 15:05:29.383304263 +0000 UTC m=+38.530147379" Oct 01 15:05:29 crc kubenswrapper[4869]: I1001 15:05:29.406570 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console-operator/console-operator-58897d9998-vg26t" event={"ID":"3157872f-8582-4335-af4d-18e4ab38d91c","Type":"ContainerStarted","Data":"f4f43a848f850bb47b2857ad474fcf3d59f5c1adb478924cb1cc52d7136b36f6"} Oct 01 15:05:29 crc kubenswrapper[4869]: I1001 15:05:29.407821 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console-operator/console-operator-58897d9998-vg26t" Oct 01 15:05:29 crc kubenswrapper[4869]: I1001 15:05:29.409962 4869 patch_prober.go:28] interesting pod/console-operator-58897d9998-vg26t container/console-operator namespace/openshift-console-operator: Readiness probe status=failure output="Get \"https://10.217.0.15:8443/readyz\": dial tcp 10.217.0.15:8443: connect: connection refused" start-of-body= Oct 01 15:05:29 crc kubenswrapper[4869]: I1001 15:05:29.410012 4869 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console-operator/console-operator-58897d9998-vg26t" podUID="3157872f-8582-4335-af4d-18e4ab38d91c" containerName="console-operator" probeResult="failure" output="Get \"https://10.217.0.15:8443/readyz\": dial tcp 10.217.0.15:8443: connect: connection refused" Oct 01 15:05:29 crc kubenswrapper[4869]: I1001 15:05:29.423508 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 01 15:05:29 crc kubenswrapper[4869]: E1001 15:05:29.423692 4869 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-01 15:05:29.923675362 +0000 UTC m=+39.070518478 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 01 15:05:29 crc kubenswrapper[4869]: I1001 15:05:29.426160 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-sj2cx" event={"ID":"e544db96-88d4-4638-b95a-de0e7c17e2d9","Type":"ContainerStarted","Data":"a4368bdb1e175728bf00ac9ca04737bff0ab202d47b031d5e7bac473676c3137"} Oct 01 15:05:29 crc kubenswrapper[4869]: I1001 15:05:29.447412 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-8mczm" event={"ID":"6ee4ec07-4fc1-4d3e-9b21-e111b2950c5a","Type":"ContainerStarted","Data":"c2ed8b6f10028dabfe097ee76619aa500fc8ff4342fb9d2ba5a40f30e2a92b21"} Oct 01 15:05:29 crc kubenswrapper[4869]: I1001 15:05:29.455170 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress/router-default-5444994796-mdc2f" event={"ID":"a4dfc6a1-b623-46cb-b1ff-c6b809e8deaa","Type":"ContainerStarted","Data":"e5196e4d0fef3a940bf4bc061145fc6e761cc8c8b653724e914beea5b7b594d9"} Oct 01 15:05:29 crc kubenswrapper[4869]: I1001 15:05:29.463139 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-canary/ingress-canary-v6xf7" event={"ID":"0e4f6d72-0c4a-40db-adc3-5b113c7bc499","Type":"ContainerStarted","Data":"b2b850ca61ee2e625ce7729ba535b009c4bdb415e8c3c53b8f736cc589e86494"} Oct 01 15:05:29 crc kubenswrapper[4869]: I1001 15:05:29.472585 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-5zhxq" event={"ID":"3635d59c-a52d-465d-8402-80153cd7369b","Type":"ContainerStarted","Data":"940ffe1670ca371e81078c19ecec12f7298b1df088ba46d33bb5c4b46427acd5"} Oct 01 15:05:29 crc kubenswrapper[4869]: I1001 15:05:29.474194 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-5zhxq" Oct 01 15:05:29 crc kubenswrapper[4869]: I1001 15:05:29.475557 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" event={"ID":"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8","Type":"ContainerStarted","Data":"8960789742e69745f2804a670a94d18b987e861c35500f2baa73dd681881a56d"} Oct 01 15:05:29 crc kubenswrapper[4869]: I1001 15:05:29.480067 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-hxvsn" event={"ID":"2424fe66-6e4a-4dc6-b00e-c5a1b01e3f4a","Type":"ContainerStarted","Data":"8702aa9e11f87c80245596aa5128cd43ab8bbf022669a4cd7d7ba71762ddd5f3"} Oct 01 15:05:29 crc kubenswrapper[4869]: I1001 15:05:29.500183 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-api/machine-api-operator-5694c8668f-jdrcg" podStartSLOduration=17.500158687 podStartE2EDuration="17.500158687s" podCreationTimestamp="2025-10-01 15:05:12 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-01 15:05:29.49277362 +0000 UTC m=+38.639616736" watchObservedRunningTime="2025-10-01 15:05:29.500158687 +0000 UTC m=+38.647001823" Oct 01 15:05:29 crc kubenswrapper[4869]: I1001 15:05:29.526652 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-4zhjs\" (UID: \"79b5f958-f252-4703-b785-05b0d01a6e72\") " pod="openshift-image-registry/image-registry-697d97f7c8-4zhjs" Oct 01 15:05:29 crc kubenswrapper[4869]: E1001 15:05:29.529496 4869 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-01 15:05:30.029485021 +0000 UTC m=+39.176328127 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-4zhjs" (UID: "79b5f958-f252-4703-b785-05b0d01a6e72") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 01 15:05:29 crc kubenswrapper[4869]: I1001 15:05:29.629202 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 01 15:05:29 crc kubenswrapper[4869]: E1001 15:05:29.629870 4869 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-01 15:05:30.129847634 +0000 UTC m=+39.276690750 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 01 15:05:29 crc kubenswrapper[4869]: I1001 15:05:29.735894 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-4zhjs\" (UID: \"79b5f958-f252-4703-b785-05b0d01a6e72\") " pod="openshift-image-registry/image-registry-697d97f7c8-4zhjs" Oct 01 15:05:29 crc kubenswrapper[4869]: E1001 15:05:29.737316 4869 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-01 15:05:30.237303037 +0000 UTC m=+39.384146153 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-4zhjs" (UID: "79b5f958-f252-4703-b785-05b0d01a6e72") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 01 15:05:29 crc kubenswrapper[4869]: I1001 15:05:29.737498 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-5zhxq" Oct 01 15:05:29 crc kubenswrapper[4869]: I1001 15:05:29.808569 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager/controller-manager-879f6c89f-s8l4m" podStartSLOduration=18.808551022 podStartE2EDuration="18.808551022s" podCreationTimestamp="2025-10-01 15:05:11 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-01 15:05:29.805605993 +0000 UTC m=+38.952449119" watchObservedRunningTime="2025-10-01 15:05:29.808551022 +0000 UTC m=+38.955394138" Oct 01 15:05:29 crc kubenswrapper[4869]: I1001 15:05:29.843802 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 01 15:05:29 crc kubenswrapper[4869]: E1001 15:05:29.844250 4869 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-01 15:05:30.344233225 +0000 UTC m=+39.491076341 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 01 15:05:29 crc kubenswrapper[4869]: I1001 15:05:29.846072 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-vkxf8" podStartSLOduration=17.846040944 podStartE2EDuration="17.846040944s" podCreationTimestamp="2025-10-01 15:05:12 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-01 15:05:29.843306451 +0000 UTC m=+38.990149577" watchObservedRunningTime="2025-10-01 15:05:29.846040944 +0000 UTC m=+38.992884080" Oct 01 15:05:29 crc kubenswrapper[4869]: I1001 15:05:29.895814 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console-operator/console-operator-58897d9998-vg26t" podStartSLOduration=18.895788404 podStartE2EDuration="18.895788404s" podCreationTimestamp="2025-10-01 15:05:11 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-01 15:05:29.891014066 +0000 UTC m=+39.037857192" watchObservedRunningTime="2025-10-01 15:05:29.895788404 +0000 UTC m=+39.042631520" Oct 01 15:05:29 crc kubenswrapper[4869]: I1001 15:05:29.946269 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-4zhjs\" (UID: \"79b5f958-f252-4703-b785-05b0d01a6e72\") " pod="openshift-image-registry/image-registry-697d97f7c8-4zhjs" Oct 01 15:05:29 crc kubenswrapper[4869]: E1001 15:05:29.947247 4869 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-01 15:05:30.447226079 +0000 UTC m=+39.594069195 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-4zhjs" (UID: "79b5f958-f252-4703-b785-05b0d01a6e72") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 01 15:05:29 crc kubenswrapper[4869]: I1001 15:05:29.972324 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns-operator/dns-operator-744455d44c-87546"] Oct 01 15:05:29 crc kubenswrapper[4869]: I1001 15:05:29.992547 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-5zhxq" podStartSLOduration=17.99252623 podStartE2EDuration="17.99252623s" podCreationTimestamp="2025-10-01 15:05:12 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-01 15:05:29.98990903 +0000 UTC m=+39.136752146" watchObservedRunningTime="2025-10-01 15:05:29.99252623 +0000 UTC m=+39.139369346" Oct 01 15:05:30 crc kubenswrapper[4869]: I1001 15:05:30.024083 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/multus-admission-controller-857f4d67dd-tgtxf"] Oct 01 15:05:30 crc kubenswrapper[4869]: I1001 15:05:30.034113 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-hjpxg"] Oct 01 15:05:30 crc kubenswrapper[4869]: I1001 15:05:30.047741 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 01 15:05:30 crc kubenswrapper[4869]: E1001 15:05:30.048105 4869 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-01 15:05:30.548089345 +0000 UTC m=+39.694932461 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 01 15:05:30 crc kubenswrapper[4869]: W1001 15:05:30.093918 4869 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podde7935aa_128a_418f_bf83_2b9aabc146ed.slice/crio-d7a4598df166ff08d56937bc402b056cf8f3eb76eaa07f01a59f69b593ca8fe5 WatchSource:0}: Error finding container d7a4598df166ff08d56937bc402b056cf8f3eb76eaa07f01a59f69b593ca8fe5: Status 404 returned error can't find the container with id d7a4598df166ff08d56937bc402b056cf8f3eb76eaa07f01a59f69b593ca8fe5 Oct 01 15:05:30 crc kubenswrapper[4869]: I1001 15:05:30.112335 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-controller-84d6567774-bb9hc"] Oct 01 15:05:30 crc kubenswrapper[4869]: I1001 15:05:30.120145 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-qp4hv" podStartSLOduration=18.120120501 podStartE2EDuration="18.120120501s" podCreationTimestamp="2025-10-01 15:05:12 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-01 15:05:30.029356975 +0000 UTC m=+39.176200081" watchObservedRunningTime="2025-10-01 15:05:30.120120501 +0000 UTC m=+39.266963617" Oct 01 15:05:30 crc kubenswrapper[4869]: I1001 15:05:30.128520 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-j7g7t" podStartSLOduration=19.128505465 podStartE2EDuration="19.128505465s" podCreationTimestamp="2025-10-01 15:05:11 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-01 15:05:30.048693151 +0000 UTC m=+39.195536287" watchObservedRunningTime="2025-10-01 15:05:30.128505465 +0000 UTC m=+39.275348581" Oct 01 15:05:30 crc kubenswrapper[4869]: I1001 15:05:30.132867 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-f9d7485db-jv4xs"] Oct 01 15:05:30 crc kubenswrapper[4869]: I1001 15:05:30.135615 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console/downloads-7954f5f757-dxcl2" podStartSLOduration=19.135597005 podStartE2EDuration="19.135597005s" podCreationTimestamp="2025-10-01 15:05:11 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-01 15:05:30.072705283 +0000 UTC m=+39.219548409" watchObservedRunningTime="2025-10-01 15:05:30.135597005 +0000 UTC m=+39.282440121" Oct 01 15:05:30 crc kubenswrapper[4869]: I1001 15:05:30.148013 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-l5vnp"] Oct 01 15:05:30 crc kubenswrapper[4869]: I1001 15:05:30.148848 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-4zhjs\" (UID: \"79b5f958-f252-4703-b785-05b0d01a6e72\") " pod="openshift-image-registry/image-registry-697d97f7c8-4zhjs" Oct 01 15:05:30 crc kubenswrapper[4869]: E1001 15:05:30.149174 4869 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-01 15:05:30.649161447 +0000 UTC m=+39.796004563 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-4zhjs" (UID: "79b5f958-f252-4703-b785-05b0d01a6e72") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 01 15:05:30 crc kubenswrapper[4869]: I1001 15:05:30.154099 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-jds44"] Oct 01 15:05:30 crc kubenswrapper[4869]: I1001 15:05:30.157863 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-apiserver/apiserver-76f77b778f-hxvsn" podStartSLOduration=19.157845349 podStartE2EDuration="19.157845349s" podCreationTimestamp="2025-10-01 15:05:11 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-01 15:05:30.132749259 +0000 UTC m=+39.279592385" watchObservedRunningTime="2025-10-01 15:05:30.157845349 +0000 UTC m=+39.304688475" Oct 01 15:05:30 crc kubenswrapper[4869]: I1001 15:05:30.165610 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-server-w5ngh" podStartSLOduration=6.165597287 podStartE2EDuration="6.165597287s" podCreationTimestamp="2025-10-01 15:05:24 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-01 15:05:30.154075689 +0000 UTC m=+39.300918805" watchObservedRunningTime="2025-10-01 15:05:30.165597287 +0000 UTC m=+39.312440403" Oct 01 15:05:30 crc kubenswrapper[4869]: I1001 15:05:30.197484 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-xpzff"] Oct 01 15:05:30 crc kubenswrapper[4869]: I1001 15:05:30.259861 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-klj2b"] Oct 01 15:05:30 crc kubenswrapper[4869]: I1001 15:05:30.260323 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 01 15:05:30 crc kubenswrapper[4869]: E1001 15:05:30.260471 4869 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-01 15:05:30.760444072 +0000 UTC m=+39.907287188 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 01 15:05:30 crc kubenswrapper[4869]: I1001 15:05:30.260517 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/328ce213-12ef-40af-a41f-e0079949b82d-metrics-certs\") pod \"network-metrics-daemon-rz7qp\" (UID: \"328ce213-12ef-40af-a41f-e0079949b82d\") " pod="openshift-multus/network-metrics-daemon-rz7qp" Oct 01 15:05:30 crc kubenswrapper[4869]: I1001 15:05:30.260574 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-4zhjs\" (UID: \"79b5f958-f252-4703-b785-05b0d01a6e72\") " pod="openshift-image-registry/image-registry-697d97f7c8-4zhjs" Oct 01 15:05:30 crc kubenswrapper[4869]: E1001 15:05:30.260840 4869 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-01 15:05:30.760827593 +0000 UTC m=+39.907670709 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-4zhjs" (UID: "79b5f958-f252-4703-b785-05b0d01a6e72") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 01 15:05:30 crc kubenswrapper[4869]: I1001 15:05:30.272404 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/328ce213-12ef-40af-a41f-e0079949b82d-metrics-certs\") pod \"network-metrics-daemon-rz7qp\" (UID: \"328ce213-12ef-40af-a41f-e0079949b82d\") " pod="openshift-multus/network-metrics-daemon-rz7qp" Oct 01 15:05:30 crc kubenswrapper[4869]: I1001 15:05:30.287137 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["hostpath-provisioner/csi-hostpathplugin-9pwb5"] Oct 01 15:05:30 crc kubenswrapper[4869]: I1001 15:05:30.287886 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns/dns-default-nk6rv"] Oct 01 15:05:30 crc kubenswrapper[4869]: I1001 15:05:30.291297 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca/service-ca-9c57cc56f-qd5ft"] Oct 01 15:05:30 crc kubenswrapper[4869]: I1001 15:05:30.303818 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca-operator/service-ca-operator-777779d784-jlnv5"] Oct 01 15:05:30 crc kubenswrapper[4869]: I1001 15:05:30.305953 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator/migrator-59844c95c7-zb5c7"] Oct 01 15:05:30 crc kubenswrapper[4869]: I1001 15:05:30.310623 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29322180-8fxq6"] Oct 01 15:05:30 crc kubenswrapper[4869]: I1001 15:05:30.329652 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-operator/ingress-operator-5b745b69d9-gdgpp"] Oct 01 15:05:30 crc kubenswrapper[4869]: W1001 15:05:30.359820 4869 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod45ac9df8_4d47_44e7_a1bb_bd4d0b8fd560.slice/crio-8b502920ada9f7b10626823383b198785992c39463ec517cb262ae4d2cd321ee WatchSource:0}: Error finding container 8b502920ada9f7b10626823383b198785992c39463ec517cb262ae4d2cd321ee: Status 404 returned error can't find the container with id 8b502920ada9f7b10626823383b198785992c39463ec517cb262ae4d2cd321ee Oct 01 15:05:30 crc kubenswrapper[4869]: W1001 15:05:30.360242 4869 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod4c1dcd57_cee2_45ab_9a92_8cdd8b864f98.slice/crio-163bb8225372db29e4028d11979bd2f8a8acf2df2fe78c9ab2504e3a5f3cd6bc WatchSource:0}: Error finding container 163bb8225372db29e4028d11979bd2f8a8acf2df2fe78c9ab2504e3a5f3cd6bc: Status 404 returned error can't find the container with id 163bb8225372db29e4028d11979bd2f8a8acf2df2fe78c9ab2504e3a5f3cd6bc Oct 01 15:05:30 crc kubenswrapper[4869]: I1001 15:05:30.370719 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 01 15:05:30 crc kubenswrapper[4869]: E1001 15:05:30.371026 4869 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-01 15:05:30.871007418 +0000 UTC m=+40.017850534 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 01 15:05:30 crc kubenswrapper[4869]: I1001 15:05:30.374851 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-wht8b"] Oct 01 15:05:30 crc kubenswrapper[4869]: I1001 15:05:30.404978 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-etcd-operator/etcd-operator-b45778765-vjs4q"] Oct 01 15:05:30 crc kubenswrapper[4869]: I1001 15:05:30.411845 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-t5kll"] Oct 01 15:05:30 crc kubenswrapper[4869]: W1001 15:05:30.445588 4869 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod906a4a45_c846_4d80_a3ec_10070aae888c.slice/crio-1b5f042e36a79419e5df465225a64ef8de492a52f5f47feff0ac3f1eed25998a WatchSource:0}: Error finding container 1b5f042e36a79419e5df465225a64ef8de492a52f5f47feff0ac3f1eed25998a: Status 404 returned error can't find the container with id 1b5f042e36a79419e5df465225a64ef8de492a52f5f47feff0ac3f1eed25998a Oct 01 15:05:30 crc kubenswrapper[4869]: I1001 15:05:30.449311 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-operator-74547568cd-hnx9p"] Oct 01 15:05:30 crc kubenswrapper[4869]: I1001 15:05:30.453686 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-rz7qp" Oct 01 15:05:30 crc kubenswrapper[4869]: I1001 15:05:30.472681 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-4zhjs\" (UID: \"79b5f958-f252-4703-b785-05b0d01a6e72\") " pod="openshift-image-registry/image-registry-697d97f7c8-4zhjs" Oct 01 15:05:30 crc kubenswrapper[4869]: E1001 15:05:30.473095 4869 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-01 15:05:30.973073877 +0000 UTC m=+40.119916993 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-4zhjs" (UID: "79b5f958-f252-4703-b785-05b0d01a6e72") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 01 15:05:30 crc kubenswrapper[4869]: W1001 15:05:30.497429 4869 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod42cd6256_ae7b_46c8_a910_0d8723fa9351.slice/crio-277cd50a7977aa0fb63617ad2f9f933e1473d0f0e3f7050b90a7a1d51ee35cbd WatchSource:0}: Error finding container 277cd50a7977aa0fb63617ad2f9f933e1473d0f0e3f7050b90a7a1d51ee35cbd: Status 404 returned error can't find the container with id 277cd50a7977aa0fb63617ad2f9f933e1473d0f0e3f7050b90a7a1d51ee35cbd Oct 01 15:05:30 crc kubenswrapper[4869]: I1001 15:05:30.587821 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-apiserver/apiserver-76f77b778f-hxvsn" Oct 01 15:05:30 crc kubenswrapper[4869]: I1001 15:05:30.587857 4869 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-apiserver/apiserver-76f77b778f-hxvsn" Oct 01 15:05:30 crc kubenswrapper[4869]: I1001 15:05:30.587982 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-f9d7485db-jv4xs" event={"ID":"4636576a-d3da-4491-a146-a6ffe6382a06","Type":"ContainerStarted","Data":"a776cce11b77c47670f2f274c2d6684e3a24e1354fe9cc6baba7884371fbbbfa"} Oct 01 15:05:30 crc kubenswrapper[4869]: I1001 15:05:30.589370 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 01 15:05:30 crc kubenswrapper[4869]: E1001 15:05:30.589938 4869 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-01 15:05:31.08988706 +0000 UTC m=+40.236730176 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 01 15:05:30 crc kubenswrapper[4869]: I1001 15:05:30.591252 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-4zhjs\" (UID: \"79b5f958-f252-4703-b785-05b0d01a6e72\") " pod="openshift-image-registry/image-registry-697d97f7c8-4zhjs" Oct 01 15:05:30 crc kubenswrapper[4869]: E1001 15:05:30.593010 4869 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-01 15:05:31.092989113 +0000 UTC m=+40.239832229 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-4zhjs" (UID: "79b5f958-f252-4703-b785-05b0d01a6e72") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 01 15:05:30 crc kubenswrapper[4869]: I1001 15:05:30.601458 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-gdgpp" event={"ID":"906a4a45-c846-4d80-a3ec-10070aae888c","Type":"ContainerStarted","Data":"1b5f042e36a79419e5df465225a64ef8de492a52f5f47feff0ac3f1eed25998a"} Oct 01 15:05:30 crc kubenswrapper[4869]: I1001 15:05:30.605301 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-8mczm" event={"ID":"6ee4ec07-4fc1-4d3e-9b21-e111b2950c5a","Type":"ContainerStarted","Data":"afb576a00a14d022036fb4e7d4e78b0b6487da741f958ca836ccc7510556a41b"} Oct 01 15:05:30 crc kubenswrapper[4869]: I1001 15:05:30.606846 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-authentication/oauth-openshift-558db77b4-8mczm" Oct 01 15:05:30 crc kubenswrapper[4869]: I1001 15:05:30.608680 4869 patch_prober.go:28] interesting pod/oauth-openshift-558db77b4-8mczm container/oauth-openshift namespace/openshift-authentication: Readiness probe status=failure output="Get \"https://10.217.0.31:6443/healthz\": dial tcp 10.217.0.31:6443: connect: connection refused" start-of-body= Oct 01 15:05:30 crc kubenswrapper[4869]: I1001 15:05:30.608709 4869 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-authentication/oauth-openshift-558db77b4-8mczm" podUID="6ee4ec07-4fc1-4d3e-9b21-e111b2950c5a" containerName="oauth-openshift" probeResult="failure" output="Get \"https://10.217.0.31:6443/healthz\": dial tcp 10.217.0.31:6443: connect: connection refused" Oct 01 15:05:30 crc kubenswrapper[4869]: I1001 15:05:30.620725 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-9pwb5" event={"ID":"45ac9df8-4d47-44e7-a1bb-bd4d0b8fd560","Type":"ContainerStarted","Data":"8b502920ada9f7b10626823383b198785992c39463ec517cb262ae4d2cd321ee"} Oct 01 15:05:30 crc kubenswrapper[4869]: I1001 15:05:30.624770 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-canary/ingress-canary-v6xf7" event={"ID":"0e4f6d72-0c4a-40db-adc3-5b113c7bc499","Type":"ContainerStarted","Data":"cb1be2f92ec409bb36225bf64752ed7dcc6f632e8064be7cfad4b79ef27450a5"} Oct 01 15:05:30 crc kubenswrapper[4869]: I1001 15:05:30.627229 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-klj2b" event={"ID":"4ac58a76-114b-4e86-924c-ad1f269b36a8","Type":"ContainerStarted","Data":"c028ece45389eed598830007bd34f528a562f8fa2a743e965f8852b32f461e2f"} Oct 01 15:05:30 crc kubenswrapper[4869]: I1001 15:05:30.629414 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-xpzff" event={"ID":"62ecb2b0-55b3-4344-8237-705ef292ef63","Type":"ContainerStarted","Data":"e4a846f4f69228f3b741598f2a4dafd43d8106589ed47f2498b62ea1c641c7cd"} Oct 01 15:05:30 crc kubenswrapper[4869]: I1001 15:05:30.639720 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-sj2cx" event={"ID":"e544db96-88d4-4638-b95a-de0e7c17e2d9","Type":"ContainerStarted","Data":"cf05f95111ca151037afc848457f8d81f7e535a9346fb13b82552cd5ab1188ab"} Oct 01 15:05:30 crc kubenswrapper[4869]: I1001 15:05:30.641530 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-authentication/oauth-openshift-558db77b4-8mczm" podStartSLOduration=19.64151534 podStartE2EDuration="19.64151534s" podCreationTimestamp="2025-10-01 15:05:11 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-01 15:05:30.639936608 +0000 UTC m=+39.786779714" watchObservedRunningTime="2025-10-01 15:05:30.64151534 +0000 UTC m=+39.788358456" Oct 01 15:05:30 crc kubenswrapper[4869]: I1001 15:05:30.683751 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-sj2cx" podStartSLOduration=19.683736019 podStartE2EDuration="19.683736019s" podCreationTimestamp="2025-10-01 15:05:11 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-01 15:05:30.681762036 +0000 UTC m=+39.828605152" watchObservedRunningTime="2025-10-01 15:05:30.683736019 +0000 UTC m=+39.830579125" Oct 01 15:05:30 crc kubenswrapper[4869]: I1001 15:05:30.694392 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-bb9hc" event={"ID":"3968ac06-9cc3-4dc8-827f-aff3261f131f","Type":"ContainerStarted","Data":"e32cffaea709e07af6c58fde137a73dc3c59f4a4473f9e9e8043820eae2132b1"} Oct 01 15:05:30 crc kubenswrapper[4869]: I1001 15:05:30.695208 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 01 15:05:30 crc kubenswrapper[4869]: E1001 15:05:30.696383 4869 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-01 15:05:31.196366906 +0000 UTC m=+40.343210022 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 01 15:05:30 crc kubenswrapper[4869]: I1001 15:05:30.720131 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-sm2sz" event={"ID":"58c072ca-9e0a-418d-90ba-ea33213d42c7","Type":"ContainerStarted","Data":"9898c3f889fbaec70345c7cb9d578f731554137fe39b9043f578e70eb34d85bc"} Oct 01 15:05:30 crc kubenswrapper[4869]: I1001 15:05:30.722028 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-sm2sz" Oct 01 15:05:30 crc kubenswrapper[4869]: I1001 15:05:30.725606 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ingress-canary/ingress-canary-v6xf7" podStartSLOduration=6.725565287 podStartE2EDuration="6.725565287s" podCreationTimestamp="2025-10-01 15:05:24 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-01 15:05:30.719776342 +0000 UTC m=+39.866619468" watchObservedRunningTime="2025-10-01 15:05:30.725565287 +0000 UTC m=+39.872408413" Oct 01 15:05:30 crc kubenswrapper[4869]: I1001 15:05:30.726810 4869 patch_prober.go:28] interesting pod/olm-operator-6b444d44fb-sm2sz container/olm-operator namespace/openshift-operator-lifecycle-manager: Readiness probe status=failure output="Get \"https://10.217.0.19:8443/healthz\": dial tcp 10.217.0.19:8443: connect: connection refused" start-of-body= Oct 01 15:05:30 crc kubenswrapper[4869]: I1001 15:05:30.727430 4869 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-sm2sz" podUID="58c072ca-9e0a-418d-90ba-ea33213d42c7" containerName="olm-operator" probeResult="failure" output="Get \"https://10.217.0.19:8443/healthz\": dial tcp 10.217.0.19:8443: connect: connection refused" Oct 01 15:05:30 crc kubenswrapper[4869]: I1001 15:05:30.765447 4869 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-pdvwx" Oct 01 15:05:30 crc kubenswrapper[4869]: I1001 15:05:30.765476 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-pdvwx" Oct 01 15:05:30 crc kubenswrapper[4869]: I1001 15:05:30.769563 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/cni-sysctl-allowlist-ds-xffhm" event={"ID":"9ff567e5-6f6b-4b25-a8a0-3aa792f4291d","Type":"ContainerStarted","Data":"44e089bc1369f6c1a4bcd915248cba1eac03bcf6058f0b48a934fd3e6a73f37a"} Oct 01 15:05:30 crc kubenswrapper[4869]: I1001 15:05:30.770627 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-multus/cni-sysctl-allowlist-ds-xffhm" Oct 01 15:05:30 crc kubenswrapper[4869]: I1001 15:05:30.786569 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-sm2sz" podStartSLOduration=18.786509276 podStartE2EDuration="18.786509276s" podCreationTimestamp="2025-10-01 15:05:12 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-01 15:05:30.755014404 +0000 UTC m=+39.901857510" watchObservedRunningTime="2025-10-01 15:05:30.786509276 +0000 UTC m=+39.933352392" Oct 01 15:05:30 crc kubenswrapper[4869]: I1001 15:05:30.787540 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/cni-sysctl-allowlist-ds-xffhm" podStartSLOduration=6.787535734 podStartE2EDuration="6.787535734s" podCreationTimestamp="2025-10-01 15:05:24 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-01 15:05:30.78738793 +0000 UTC m=+39.934231046" watchObservedRunningTime="2025-10-01 15:05:30.787535734 +0000 UTC m=+39.934378850" Oct 01 15:05:30 crc kubenswrapper[4869]: I1001 15:05:30.793162 4869 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-pdvwx" Oct 01 15:05:30 crc kubenswrapper[4869]: I1001 15:05:30.798660 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-4zhjs\" (UID: \"79b5f958-f252-4703-b785-05b0d01a6e72\") " pod="openshift-image-registry/image-registry-697d97f7c8-4zhjs" Oct 01 15:05:30 crc kubenswrapper[4869]: E1001 15:05:30.800991 4869 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-01 15:05:31.300978313 +0000 UTC m=+40.447821429 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-4zhjs" (UID: "79b5f958-f252-4703-b785-05b0d01a6e72") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 01 15:05:30 crc kubenswrapper[4869]: I1001 15:05:30.812218 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca-operator/service-ca-operator-777779d784-jlnv5" event={"ID":"fa4dadea-9e0e-431f-b6a7-057e5cec4f9f","Type":"ContainerStarted","Data":"e04808982c37c701b3c58a537b6e62337556fcf0ae3402cf3b1dcd4cdacb1182"} Oct 01 15:05:30 crc kubenswrapper[4869]: I1001 15:05:30.816787 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" event={"ID":"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8","Type":"ContainerStarted","Data":"a7f20ad9ac3aaa1fe9cf120b8cecd897339bbfc409a2a36866b7cd1c209c29ab"} Oct 01 15:05:30 crc kubenswrapper[4869]: I1001 15:05:30.828954 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress/router-default-5444994796-mdc2f" event={"ID":"a4dfc6a1-b623-46cb-b1ff-c6b809e8deaa","Type":"ContainerStarted","Data":"394e067f0925c8f5ddc6177261b59597e4209731909a5cb7ea0ab514ff722066"} Oct 01 15:05:30 crc kubenswrapper[4869]: I1001 15:05:30.882723 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/dns-default-nk6rv" event={"ID":"60f75bf5-448e-4770-8eba-26c271028f50","Type":"ContainerStarted","Data":"b240f91ba008ab5e4a3a94f54a41c8e94dbb26e77055e043bc9213d3c5ea2cd3"} Oct 01 15:05:30 crc kubenswrapper[4869]: I1001 15:05:30.884905 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-multus/cni-sysctl-allowlist-ds-xffhm" Oct 01 15:05:30 crc kubenswrapper[4869]: I1001 15:05:30.888973 4869 patch_prober.go:28] interesting pod/apiserver-76f77b778f-hxvsn container/openshift-apiserver namespace/openshift-apiserver: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[+]ping ok Oct 01 15:05:30 crc kubenswrapper[4869]: [+]log ok Oct 01 15:05:30 crc kubenswrapper[4869]: [+]etcd ok Oct 01 15:05:30 crc kubenswrapper[4869]: [+]poststarthook/start-apiserver-admission-initializer ok Oct 01 15:05:30 crc kubenswrapper[4869]: [+]poststarthook/generic-apiserver-start-informers ok Oct 01 15:05:30 crc kubenswrapper[4869]: [-]poststarthook/max-in-flight-filter failed: reason withheld Oct 01 15:05:30 crc kubenswrapper[4869]: [-]poststarthook/storage-object-count-tracker-hook failed: reason withheld Oct 01 15:05:30 crc kubenswrapper[4869]: [-]poststarthook/image.openshift.io-apiserver-caches failed: reason withheld Oct 01 15:05:30 crc kubenswrapper[4869]: [-]poststarthook/authorization.openshift.io-bootstrapclusterroles failed: reason withheld Oct 01 15:05:30 crc kubenswrapper[4869]: [-]poststarthook/authorization.openshift.io-ensurenodebootstrap-sa failed: reason withheld Oct 01 15:05:30 crc kubenswrapper[4869]: [+]poststarthook/project.openshift.io-projectcache ok Oct 01 15:05:30 crc kubenswrapper[4869]: [+]poststarthook/project.openshift.io-projectauthorizationcache ok Oct 01 15:05:30 crc kubenswrapper[4869]: [+]poststarthook/openshift.io-startinformers ok Oct 01 15:05:30 crc kubenswrapper[4869]: [+]poststarthook/openshift.io-restmapperupdater ok Oct 01 15:05:30 crc kubenswrapper[4869]: [+]poststarthook/quota.openshift.io-clusterquotamapping ok Oct 01 15:05:30 crc kubenswrapper[4869]: livez check failed Oct 01 15:05:30 crc kubenswrapper[4869]: I1001 15:05:30.889069 4869 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-apiserver/apiserver-76f77b778f-hxvsn" podUID="2424fe66-6e4a-4dc6-b00e-c5a1b01e3f4a" containerName="openshift-apiserver" probeResult="failure" output="HTTP probe failed with statuscode: 500" Oct 01 15:05:30 crc kubenswrapper[4869]: I1001 15:05:30.899427 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 01 15:05:30 crc kubenswrapper[4869]: I1001 15:05:30.899838 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ingress/router-default-5444994796-mdc2f" podStartSLOduration=19.899819635 podStartE2EDuration="19.899819635s" podCreationTimestamp="2025-10-01 15:05:11 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-01 15:05:30.891416261 +0000 UTC m=+40.038259387" watchObservedRunningTime="2025-10-01 15:05:30.899819635 +0000 UTC m=+40.046662751" Oct 01 15:05:30 crc kubenswrapper[4869]: E1001 15:05:30.901031 4869 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-01 15:05:31.401002387 +0000 UTC m=+40.547845503 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 01 15:05:30 crc kubenswrapper[4869]: I1001 15:05:30.915540 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-zb5c7" event={"ID":"5067e051-5c99-4fe9-b4f2-e5dfa8b1c9b4","Type":"ContainerStarted","Data":"f34151e7c1594938de0e954fc983089c5105f5dc9354c0c339ef517d6712190a"} Oct 01 15:05:30 crc kubenswrapper[4869]: I1001 15:05:30.926886 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29322180-8fxq6" event={"ID":"4c1dcd57-cee2-45ab-9a92-8cdd8b864f98","Type":"ContainerStarted","Data":"163bb8225372db29e4028d11979bd2f8a8acf2df2fe78c9ab2504e3a5f3cd6bc"} Oct 01 15:05:30 crc kubenswrapper[4869]: I1001 15:05:30.955516 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-t5kll" event={"ID":"2d195bae-5172-4387-869b-086f215963ff","Type":"ContainerStarted","Data":"c941b6d52eb8b5a42fa91ad19317a29a2027c4fa6f57538ec5a3e9334c13b07c"} Oct 01 15:05:30 crc kubenswrapper[4869]: I1001 15:05:30.964721 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-admission-controller-857f4d67dd-tgtxf" event={"ID":"bd217fac-bac4-44af-8942-0385d47f21d2","Type":"ContainerStarted","Data":"3f853c269c6f5cdba7c3014e228ce9db16169cb39c61d3f4e61fd34504202345"} Oct 01 15:05:30 crc kubenswrapper[4869]: I1001 15:05:30.970002 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" event={"ID":"3b6479f0-333b-4a96-9adf-2099afdc2447","Type":"ContainerStarted","Data":"a77b1d372a3ea4329ff1c3ca79e8046e3fbe981fb13d5f6c1f533168a0d87668"} Oct 01 15:05:30 crc kubenswrapper[4869]: I1001 15:05:30.970079 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 01 15:05:30 crc kubenswrapper[4869]: I1001 15:05:30.973956 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" event={"ID":"9d751cbb-f2e2-430d-9754-c882a5e924a5","Type":"ContainerStarted","Data":"5e506e14d8a691329ffa957eaa6a71feda35c4244bae46a3a38f1f6d9c94c518"} Oct 01 15:05:30 crc kubenswrapper[4869]: I1001 15:05:30.977723 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-hjpxg" event={"ID":"de7935aa-128a-418f-bf83-2b9aabc146ed","Type":"ContainerStarted","Data":"d7a4598df166ff08d56937bc402b056cf8f3eb76eaa07f01a59f69b593ca8fe5"} Oct 01 15:05:31 crc kubenswrapper[4869]: I1001 15:05:31.014593 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-l5vnp" event={"ID":"9a717f28-31d3-4239-a315-97236362d5cb","Type":"ContainerStarted","Data":"eee533672cd7be9040de8ae992db93b7610d1a307a5d0d6fb4280e8a77e6c923"} Oct 01 15:05:31 crc kubenswrapper[4869]: I1001 15:05:31.015568 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-4zhjs\" (UID: \"79b5f958-f252-4703-b785-05b0d01a6e72\") " pod="openshift-image-registry/image-registry-697d97f7c8-4zhjs" Oct 01 15:05:31 crc kubenswrapper[4869]: E1001 15:05:31.015926 4869 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-01 15:05:31.515914189 +0000 UTC m=+40.662757305 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-4zhjs" (UID: "79b5f958-f252-4703-b785-05b0d01a6e72") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 01 15:05:31 crc kubenswrapper[4869]: I1001 15:05:31.057142 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/network-metrics-daemon-rz7qp"] Oct 01 15:05:31 crc kubenswrapper[4869]: I1001 15:05:31.085338 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-jds44" event={"ID":"f4295e29-ac36-4236-b679-4cd87ea76347","Type":"ContainerStarted","Data":"9945c84f0c790d45105944673e6aadd871471a36f6c77e57a6836ef024a5fc4b"} Oct 01 15:05:31 crc kubenswrapper[4869]: I1001 15:05:31.118105 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-jds44" podStartSLOduration=20.11808488 podStartE2EDuration="20.11808488s" podCreationTimestamp="2025-10-01 15:05:11 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-01 15:05:31.110467567 +0000 UTC m=+40.257310693" watchObservedRunningTime="2025-10-01 15:05:31.11808488 +0000 UTC m=+40.264927996" Oct 01 15:05:31 crc kubenswrapper[4869]: I1001 15:05:31.118477 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 01 15:05:31 crc kubenswrapper[4869]: E1001 15:05:31.119251 4869 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-01 15:05:31.619216681 +0000 UTC m=+40.766059797 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 01 15:05:31 crc kubenswrapper[4869]: I1001 15:05:31.132226 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns-operator/dns-operator-744455d44c-87546" event={"ID":"4a5e3470-0f08-44e6-8d4c-f968950c75f2","Type":"ContainerStarted","Data":"6144723c27e2378636ac27a4acba612c38e45a311faee95c72709a5f5247ea07"} Oct 01 15:05:31 crc kubenswrapper[4869]: I1001 15:05:31.156385 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca/service-ca-9c57cc56f-qd5ft" event={"ID":"9338ac98-e76c-4c74-af46-b646ab637e37","Type":"ContainerStarted","Data":"da7b0bbfff1b004ac481dd0b70317434c793b0a2dffe4947fa38ba906b1fa5bb"} Oct 01 15:05:31 crc kubenswrapper[4869]: I1001 15:05:31.165165 4869 generic.go:334] "Generic (PLEG): container finished" podID="2f6f6869-bd98-4c21-a184-a3b7bd5f0b5b" containerID="0301fe0e5510e84683d6a2aaf424ad3ce8fb3ddecae3fdf848ae64bf360aa02f" exitCode=0 Oct 01 15:05:31 crc kubenswrapper[4869]: I1001 15:05:31.166899 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-config-operator/openshift-config-operator-7777fb866f-gqrbk" event={"ID":"2f6f6869-bd98-4c21-a184-a3b7bd5f0b5b","Type":"ContainerDied","Data":"0301fe0e5510e84683d6a2aaf424ad3ce8fb3ddecae3fdf848ae64bf360aa02f"} Oct 01 15:05:31 crc kubenswrapper[4869]: I1001 15:05:31.171426 4869 patch_prober.go:28] interesting pod/downloads-7954f5f757-dxcl2 container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.13:8080/\": dial tcp 10.217.0.13:8080: connect: connection refused" start-of-body= Oct 01 15:05:31 crc kubenswrapper[4869]: I1001 15:05:31.171731 4869 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-dxcl2" podUID="2d8f8fae-d727-4763-bb02-0a74320ba8c4" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.13:8080/\": dial tcp 10.217.0.13:8080: connect: connection refused" Oct 01 15:05:31 crc kubenswrapper[4869]: I1001 15:05:31.177020 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-pdvwx" Oct 01 15:05:31 crc kubenswrapper[4869]: I1001 15:05:31.179013 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console-operator/console-operator-58897d9998-vg26t" Oct 01 15:05:31 crc kubenswrapper[4869]: I1001 15:05:31.186115 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-qp4hv" Oct 01 15:05:31 crc kubenswrapper[4869]: W1001 15:05:31.209634 4869 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod328ce213_12ef_40af_a41f_e0079949b82d.slice/crio-0af5d7d2e731c6e4f8893bb89974aacea4efced0174ea205551ecc94007e5407 WatchSource:0}: Error finding container 0af5d7d2e731c6e4f8893bb89974aacea4efced0174ea205551ecc94007e5407: Status 404 returned error can't find the container with id 0af5d7d2e731c6e4f8893bb89974aacea4efced0174ea205551ecc94007e5407 Oct 01 15:05:31 crc kubenswrapper[4869]: I1001 15:05:31.235085 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-4zhjs\" (UID: \"79b5f958-f252-4703-b785-05b0d01a6e72\") " pod="openshift-image-registry/image-registry-697d97f7c8-4zhjs" Oct 01 15:05:31 crc kubenswrapper[4869]: E1001 15:05:31.241754 4869 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-01 15:05:31.741737386 +0000 UTC m=+40.888580502 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-4zhjs" (UID: "79b5f958-f252-4703-b785-05b0d01a6e72") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 01 15:05:31 crc kubenswrapper[4869]: I1001 15:05:31.351542 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 01 15:05:31 crc kubenswrapper[4869]: E1001 15:05:31.351937 4869 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-01 15:05:31.851919062 +0000 UTC m=+40.998762178 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 01 15:05:31 crc kubenswrapper[4869]: I1001 15:05:31.455584 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-4zhjs\" (UID: \"79b5f958-f252-4703-b785-05b0d01a6e72\") " pod="openshift-image-registry/image-registry-697d97f7c8-4zhjs" Oct 01 15:05:31 crc kubenswrapper[4869]: E1001 15:05:31.455959 4869 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-01 15:05:31.955944833 +0000 UTC m=+41.102787949 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-4zhjs" (UID: "79b5f958-f252-4703-b785-05b0d01a6e72") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 01 15:05:31 crc kubenswrapper[4869]: I1001 15:05:31.556842 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 01 15:05:31 crc kubenswrapper[4869]: E1001 15:05:31.558409 4869 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-01 15:05:32.058392241 +0000 UTC m=+41.205235357 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 01 15:05:31 crc kubenswrapper[4869]: I1001 15:05:31.572467 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-multus/cni-sysctl-allowlist-ds-xffhm"] Oct 01 15:05:31 crc kubenswrapper[4869]: I1001 15:05:31.659935 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-4zhjs\" (UID: \"79b5f958-f252-4703-b785-05b0d01a6e72\") " pod="openshift-image-registry/image-registry-697d97f7c8-4zhjs" Oct 01 15:05:31 crc kubenswrapper[4869]: E1001 15:05:31.660332 4869 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-01 15:05:32.160316996 +0000 UTC m=+41.307160112 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-4zhjs" (UID: "79b5f958-f252-4703-b785-05b0d01a6e72") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 01 15:05:31 crc kubenswrapper[4869]: I1001 15:05:31.761002 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 01 15:05:31 crc kubenswrapper[4869]: E1001 15:05:31.761297 4869 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-01 15:05:32.261250635 +0000 UTC m=+41.408093751 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 01 15:05:31 crc kubenswrapper[4869]: I1001 15:05:31.761568 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-4zhjs\" (UID: \"79b5f958-f252-4703-b785-05b0d01a6e72\") " pod="openshift-image-registry/image-registry-697d97f7c8-4zhjs" Oct 01 15:05:31 crc kubenswrapper[4869]: E1001 15:05:31.761854 4869 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-01 15:05:32.261843671 +0000 UTC m=+41.408686787 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-4zhjs" (UID: "79b5f958-f252-4703-b785-05b0d01a6e72") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 01 15:05:31 crc kubenswrapper[4869]: I1001 15:05:31.775790 4869 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-ingress/router-default-5444994796-mdc2f" Oct 01 15:05:31 crc kubenswrapper[4869]: I1001 15:05:31.782153 4869 patch_prober.go:28] interesting pod/router-default-5444994796-mdc2f container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Oct 01 15:05:31 crc kubenswrapper[4869]: [-]has-synced failed: reason withheld Oct 01 15:05:31 crc kubenswrapper[4869]: [+]process-running ok Oct 01 15:05:31 crc kubenswrapper[4869]: healthz check failed Oct 01 15:05:31 crc kubenswrapper[4869]: I1001 15:05:31.782187 4869 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-mdc2f" podUID="a4dfc6a1-b623-46cb-b1ff-c6b809e8deaa" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Oct 01 15:05:31 crc kubenswrapper[4869]: I1001 15:05:31.862188 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 01 15:05:31 crc kubenswrapper[4869]: E1001 15:05:31.863172 4869 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-01 15:05:32.363152809 +0000 UTC m=+41.509995925 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 01 15:05:31 crc kubenswrapper[4869]: I1001 15:05:31.872728 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-qf2zt"] Oct 01 15:05:31 crc kubenswrapper[4869]: I1001 15:05:31.874017 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-qf2zt" Oct 01 15:05:31 crc kubenswrapper[4869]: I1001 15:05:31.879289 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"community-operators-dockercfg-dmngl" Oct 01 15:05:31 crc kubenswrapper[4869]: I1001 15:05:31.911569 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-qf2zt"] Oct 01 15:05:31 crc kubenswrapper[4869]: I1001 15:05:31.964677 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/87c849d7-d613-446d-9f2f-bdcf6da7e4e6-utilities\") pod \"community-operators-qf2zt\" (UID: \"87c849d7-d613-446d-9f2f-bdcf6da7e4e6\") " pod="openshift-marketplace/community-operators-qf2zt" Oct 01 15:05:31 crc kubenswrapper[4869]: I1001 15:05:31.965172 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gb7pd\" (UniqueName: \"kubernetes.io/projected/87c849d7-d613-446d-9f2f-bdcf6da7e4e6-kube-api-access-gb7pd\") pod \"community-operators-qf2zt\" (UID: \"87c849d7-d613-446d-9f2f-bdcf6da7e4e6\") " pod="openshift-marketplace/community-operators-qf2zt" Oct 01 15:05:31 crc kubenswrapper[4869]: I1001 15:05:31.965285 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-4zhjs\" (UID: \"79b5f958-f252-4703-b785-05b0d01a6e72\") " pod="openshift-image-registry/image-registry-697d97f7c8-4zhjs" Oct 01 15:05:31 crc kubenswrapper[4869]: I1001 15:05:31.965379 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/87c849d7-d613-446d-9f2f-bdcf6da7e4e6-catalog-content\") pod \"community-operators-qf2zt\" (UID: \"87c849d7-d613-446d-9f2f-bdcf6da7e4e6\") " pod="openshift-marketplace/community-operators-qf2zt" Oct 01 15:05:31 crc kubenswrapper[4869]: E1001 15:05:31.965796 4869 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-01 15:05:32.465782943 +0000 UTC m=+41.612626059 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-4zhjs" (UID: "79b5f958-f252-4703-b785-05b0d01a6e72") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 01 15:05:32 crc kubenswrapper[4869]: I1001 15:05:32.065977 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-262k2"] Oct 01 15:05:32 crc kubenswrapper[4869]: I1001 15:05:32.066182 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 01 15:05:32 crc kubenswrapper[4869]: E1001 15:05:32.066240 4869 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-01 15:05:32.566225778 +0000 UTC m=+41.713068894 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 01 15:05:32 crc kubenswrapper[4869]: I1001 15:05:32.068129 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/87c849d7-d613-446d-9f2f-bdcf6da7e4e6-catalog-content\") pod \"community-operators-qf2zt\" (UID: \"87c849d7-d613-446d-9f2f-bdcf6da7e4e6\") " pod="openshift-marketplace/community-operators-qf2zt" Oct 01 15:05:32 crc kubenswrapper[4869]: I1001 15:05:32.068282 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/87c849d7-d613-446d-9f2f-bdcf6da7e4e6-utilities\") pod \"community-operators-qf2zt\" (UID: \"87c849d7-d613-446d-9f2f-bdcf6da7e4e6\") " pod="openshift-marketplace/community-operators-qf2zt" Oct 01 15:05:32 crc kubenswrapper[4869]: I1001 15:05:32.068365 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gb7pd\" (UniqueName: \"kubernetes.io/projected/87c849d7-d613-446d-9f2f-bdcf6da7e4e6-kube-api-access-gb7pd\") pod \"community-operators-qf2zt\" (UID: \"87c849d7-d613-446d-9f2f-bdcf6da7e4e6\") " pod="openshift-marketplace/community-operators-qf2zt" Oct 01 15:05:32 crc kubenswrapper[4869]: I1001 15:05:32.068407 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-4zhjs\" (UID: \"79b5f958-f252-4703-b785-05b0d01a6e72\") " pod="openshift-image-registry/image-registry-697d97f7c8-4zhjs" Oct 01 15:05:32 crc kubenswrapper[4869]: I1001 15:05:32.068780 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/87c849d7-d613-446d-9f2f-bdcf6da7e4e6-catalog-content\") pod \"community-operators-qf2zt\" (UID: \"87c849d7-d613-446d-9f2f-bdcf6da7e4e6\") " pod="openshift-marketplace/community-operators-qf2zt" Oct 01 15:05:32 crc kubenswrapper[4869]: I1001 15:05:32.069017 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/87c849d7-d613-446d-9f2f-bdcf6da7e4e6-utilities\") pod \"community-operators-qf2zt\" (UID: \"87c849d7-d613-446d-9f2f-bdcf6da7e4e6\") " pod="openshift-marketplace/community-operators-qf2zt" Oct 01 15:05:32 crc kubenswrapper[4869]: E1001 15:05:32.070032 4869 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-01 15:05:32.570017469 +0000 UTC m=+41.716860585 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-4zhjs" (UID: "79b5f958-f252-4703-b785-05b0d01a6e72") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 01 15:05:32 crc kubenswrapper[4869]: I1001 15:05:32.071047 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-262k2" Oct 01 15:05:32 crc kubenswrapper[4869]: I1001 15:05:32.084793 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"certified-operators-dockercfg-4rs5g" Oct 01 15:05:32 crc kubenswrapper[4869]: I1001 15:05:32.088184 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-262k2"] Oct 01 15:05:32 crc kubenswrapper[4869]: I1001 15:05:32.116089 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gb7pd\" (UniqueName: \"kubernetes.io/projected/87c849d7-d613-446d-9f2f-bdcf6da7e4e6-kube-api-access-gb7pd\") pod \"community-operators-qf2zt\" (UID: \"87c849d7-d613-446d-9f2f-bdcf6da7e4e6\") " pod="openshift-marketplace/community-operators-qf2zt" Oct 01 15:05:32 crc kubenswrapper[4869]: I1001 15:05:32.169296 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 01 15:05:32 crc kubenswrapper[4869]: I1001 15:05:32.169489 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f6f8609b-7f80-4b8f-a371-bb1d10396a15-utilities\") pod \"certified-operators-262k2\" (UID: \"f6f8609b-7f80-4b8f-a371-bb1d10396a15\") " pod="openshift-marketplace/certified-operators-262k2" Oct 01 15:05:32 crc kubenswrapper[4869]: I1001 15:05:32.169516 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f6f8609b-7f80-4b8f-a371-bb1d10396a15-catalog-content\") pod \"certified-operators-262k2\" (UID: \"f6f8609b-7f80-4b8f-a371-bb1d10396a15\") " pod="openshift-marketplace/certified-operators-262k2" Oct 01 15:05:32 crc kubenswrapper[4869]: I1001 15:05:32.169539 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vbv92\" (UniqueName: \"kubernetes.io/projected/f6f8609b-7f80-4b8f-a371-bb1d10396a15-kube-api-access-vbv92\") pod \"certified-operators-262k2\" (UID: \"f6f8609b-7f80-4b8f-a371-bb1d10396a15\") " pod="openshift-marketplace/certified-operators-262k2" Oct 01 15:05:32 crc kubenswrapper[4869]: E1001 15:05:32.170242 4869 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-01 15:05:32.670218158 +0000 UTC m=+41.817061274 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 01 15:05:32 crc kubenswrapper[4869]: I1001 15:05:32.211750 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-zb5c7" event={"ID":"5067e051-5c99-4fe9-b4f2-e5dfa8b1c9b4","Type":"ContainerStarted","Data":"72b3f295f551691d00b8cf0b4eeacf63492635fbb6374a54d6e60f748906fbd4"} Oct 01 15:05:32 crc kubenswrapper[4869]: I1001 15:05:32.211798 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-zb5c7" event={"ID":"5067e051-5c99-4fe9-b4f2-e5dfa8b1c9b4","Type":"ContainerStarted","Data":"2291328fbfb41721eb7cfd1213ab54986b68a5ca9284bdc3ce7f6233509bab90"} Oct 01 15:05:32 crc kubenswrapper[4869]: I1001 15:05:32.216381 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-xpzff" event={"ID":"62ecb2b0-55b3-4344-8237-705ef292ef63","Type":"ContainerStarted","Data":"8d02fb29820db5eb1b4481fc57a694aa4cfe3b55bb68468fbf696de80deb96aa"} Oct 01 15:05:32 crc kubenswrapper[4869]: I1001 15:05:32.224582 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-hjpxg" event={"ID":"de7935aa-128a-418f-bf83-2b9aabc146ed","Type":"ContainerStarted","Data":"061d4f65f060b9b3c65eee15722b06160cbba4e3392286aa9050eddc1ad6146a"} Oct 01 15:05:32 crc kubenswrapper[4869]: I1001 15:05:32.224623 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-hjpxg" event={"ID":"de7935aa-128a-418f-bf83-2b9aabc146ed","Type":"ContainerStarted","Data":"299f6dfbaa4b539d89cb5727a8cf596e309536a6cd9269df10a21bd34d8516f0"} Oct 01 15:05:32 crc kubenswrapper[4869]: I1001 15:05:32.225232 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-hjpxg" Oct 01 15:05:32 crc kubenswrapper[4869]: I1001 15:05:32.227142 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns-operator/dns-operator-744455d44c-87546" event={"ID":"4a5e3470-0f08-44e6-8d4c-f968950c75f2","Type":"ContainerStarted","Data":"8ea141dc071e31ac8943520423e4a0e2608c89f21f6ac9832fd4a11e703f5291"} Oct 01 15:05:32 crc kubenswrapper[4869]: I1001 15:05:32.227183 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns-operator/dns-operator-744455d44c-87546" event={"ID":"4a5e3470-0f08-44e6-8d4c-f968950c75f2","Type":"ContainerStarted","Data":"35ac252b360e4a26ab7ad6a9f8f51c9d5ad2b0b8f6cc4a2577c3a71450bc25e4"} Oct 01 15:05:32 crc kubenswrapper[4869]: I1001 15:05:32.235337 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd-operator/etcd-operator-b45778765-vjs4q" event={"ID":"dcf200aa-ed03-4e96-a638-568a891b276f","Type":"ContainerStarted","Data":"86f7013d21785ec29ee9f004b0bc84581282434048659d38c366ed09617e4108"} Oct 01 15:05:32 crc kubenswrapper[4869]: I1001 15:05:32.235390 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd-operator/etcd-operator-b45778765-vjs4q" event={"ID":"dcf200aa-ed03-4e96-a638-568a891b276f","Type":"ContainerStarted","Data":"56d2b6fe2f651194ff2f87c533fe28cf99c09791f1dd2264862db767044302b5"} Oct 01 15:05:32 crc kubenswrapper[4869]: I1001 15:05:32.245157 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-qf2zt" Oct 01 15:05:32 crc kubenswrapper[4869]: I1001 15:05:32.259095 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-f9d7485db-jv4xs" event={"ID":"4636576a-d3da-4491-a146-a6ffe6382a06","Type":"ContainerStarted","Data":"0f194fc8bf95f700b6d1017a54eaa910d245f1c7aed3a567d4cb83421a1ac718"} Oct 01 15:05:32 crc kubenswrapper[4869]: I1001 15:05:32.272428 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-xpzff" podStartSLOduration=20.27241237 podStartE2EDuration="20.27241237s" podCreationTimestamp="2025-10-01 15:05:12 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-01 15:05:32.271852155 +0000 UTC m=+41.418695271" watchObservedRunningTime="2025-10-01 15:05:32.27241237 +0000 UTC m=+41.419255486" Oct 01 15:05:32 crc kubenswrapper[4869]: I1001 15:05:32.274667 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f6f8609b-7f80-4b8f-a371-bb1d10396a15-utilities\") pod \"certified-operators-262k2\" (UID: \"f6f8609b-7f80-4b8f-a371-bb1d10396a15\") " pod="openshift-marketplace/certified-operators-262k2" Oct 01 15:05:32 crc kubenswrapper[4869]: I1001 15:05:32.274705 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f6f8609b-7f80-4b8f-a371-bb1d10396a15-catalog-content\") pod \"certified-operators-262k2\" (UID: \"f6f8609b-7f80-4b8f-a371-bb1d10396a15\") " pod="openshift-marketplace/certified-operators-262k2" Oct 01 15:05:32 crc kubenswrapper[4869]: I1001 15:05:32.274730 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-4zhjs\" (UID: \"79b5f958-f252-4703-b785-05b0d01a6e72\") " pod="openshift-image-registry/image-registry-697d97f7c8-4zhjs" Oct 01 15:05:32 crc kubenswrapper[4869]: I1001 15:05:32.274750 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vbv92\" (UniqueName: \"kubernetes.io/projected/f6f8609b-7f80-4b8f-a371-bb1d10396a15-kube-api-access-vbv92\") pod \"certified-operators-262k2\" (UID: \"f6f8609b-7f80-4b8f-a371-bb1d10396a15\") " pod="openshift-marketplace/certified-operators-262k2" Oct 01 15:05:32 crc kubenswrapper[4869]: I1001 15:05:32.275041 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f6f8609b-7f80-4b8f-a371-bb1d10396a15-utilities\") pod \"certified-operators-262k2\" (UID: \"f6f8609b-7f80-4b8f-a371-bb1d10396a15\") " pod="openshift-marketplace/certified-operators-262k2" Oct 01 15:05:32 crc kubenswrapper[4869]: E1001 15:05:32.275321 4869 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-01 15:05:32.775310397 +0000 UTC m=+41.922153513 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-4zhjs" (UID: "79b5f958-f252-4703-b785-05b0d01a6e72") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 01 15:05:32 crc kubenswrapper[4869]: I1001 15:05:32.277169 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f6f8609b-7f80-4b8f-a371-bb1d10396a15-catalog-content\") pod \"certified-operators-262k2\" (UID: \"f6f8609b-7f80-4b8f-a371-bb1d10396a15\") " pod="openshift-marketplace/certified-operators-262k2" Oct 01 15:05:32 crc kubenswrapper[4869]: I1001 15:05:32.282327 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-l5vnp" event={"ID":"9a717f28-31d3-4239-a315-97236362d5cb","Type":"ContainerStarted","Data":"53b8ecf7dc61ff2cf710c0f13c56d9e2b358a114b8238214fb2656ac6e4a8393"} Oct 01 15:05:32 crc kubenswrapper[4869]: I1001 15:05:32.334465 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-klj2b" event={"ID":"4ac58a76-114b-4e86-924c-ad1f269b36a8","Type":"ContainerStarted","Data":"0bed54645e69f72c166861d72c18ad42820c06ed14b7358d3eee1ca302c9a222"} Oct 01 15:05:32 crc kubenswrapper[4869]: I1001 15:05:32.347922 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vbv92\" (UniqueName: \"kubernetes.io/projected/f6f8609b-7f80-4b8f-a371-bb1d10396a15-kube-api-access-vbv92\") pod \"certified-operators-262k2\" (UID: \"f6f8609b-7f80-4b8f-a371-bb1d10396a15\") " pod="openshift-marketplace/certified-operators-262k2" Oct 01 15:05:32 crc kubenswrapper[4869]: I1001 15:05:32.358152 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-bb9hc" event={"ID":"3968ac06-9cc3-4dc8-827f-aff3261f131f","Type":"ContainerStarted","Data":"255fcab1323bff09de363b26808667861d10da8e487910748a40236092be71c0"} Oct 01 15:05:32 crc kubenswrapper[4869]: I1001 15:05:32.358959 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-bb9hc" event={"ID":"3968ac06-9cc3-4dc8-827f-aff3261f131f","Type":"ContainerStarted","Data":"3c7564d2bc89727a34d45726e16c8e7c1057ec041281d9b5df131b1f85f6b7c2"} Oct 01 15:05:32 crc kubenswrapper[4869]: I1001 15:05:32.359084 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-q7dfv"] Oct 01 15:05:32 crc kubenswrapper[4869]: I1001 15:05:32.360398 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-q7dfv" Oct 01 15:05:32 crc kubenswrapper[4869]: I1001 15:05:32.369383 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-q7dfv"] Oct 01 15:05:32 crc kubenswrapper[4869]: I1001 15:05:32.370823 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-jds44" event={"ID":"f4295e29-ac36-4236-b679-4cd87ea76347","Type":"ContainerStarted","Data":"66ca70a8d84fa084ebd68028a8fa3c28cafe2f51bbb7925eee5ab08fd8545d43"} Oct 01 15:05:32 crc kubenswrapper[4869]: I1001 15:05:32.383278 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 01 15:05:32 crc kubenswrapper[4869]: E1001 15:05:32.385125 4869 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-01 15:05:32.885104482 +0000 UTC m=+42.031947598 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 01 15:05:32 crc kubenswrapper[4869]: I1001 15:05:32.403048 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-zb5c7" podStartSLOduration=20.403034131 podStartE2EDuration="20.403034131s" podCreationTimestamp="2025-10-01 15:05:12 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-01 15:05:32.40073878 +0000 UTC m=+41.547581896" watchObservedRunningTime="2025-10-01 15:05:32.403034131 +0000 UTC m=+41.549877247" Oct 01 15:05:32 crc kubenswrapper[4869]: I1001 15:05:32.404779 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/network-metrics-daemon-rz7qp" event={"ID":"328ce213-12ef-40af-a41f-e0079949b82d","Type":"ContainerStarted","Data":"0af5d7d2e731c6e4f8893bb89974aacea4efced0174ea205551ecc94007e5407"} Oct 01 15:05:32 crc kubenswrapper[4869]: I1001 15:05:32.446446 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca-operator/service-ca-operator-777779d784-jlnv5" event={"ID":"fa4dadea-9e0e-431f-b6a7-057e5cec4f9f","Type":"ContainerStarted","Data":"d537822adfe95dd2cecb2d17957e9006ab97133b38d0ea42710d07d2f7c290ed"} Oct 01 15:05:32 crc kubenswrapper[4869]: I1001 15:05:32.460212 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-g22pf"] Oct 01 15:05:32 crc kubenswrapper[4869]: I1001 15:05:32.469940 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-g22pf" Oct 01 15:05:32 crc kubenswrapper[4869]: I1001 15:05:32.515920 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-szl8n\" (UniqueName: \"kubernetes.io/projected/08693370-8f83-4bb1-8e2f-83a2ece2c9dd-kube-api-access-szl8n\") pod \"community-operators-q7dfv\" (UID: \"08693370-8f83-4bb1-8e2f-83a2ece2c9dd\") " pod="openshift-marketplace/community-operators-q7dfv" Oct 01 15:05:32 crc kubenswrapper[4869]: I1001 15:05:32.515996 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-4zhjs\" (UID: \"79b5f958-f252-4703-b785-05b0d01a6e72\") " pod="openshift-image-registry/image-registry-697d97f7c8-4zhjs" Oct 01 15:05:32 crc kubenswrapper[4869]: I1001 15:05:32.516057 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/08693370-8f83-4bb1-8e2f-83a2ece2c9dd-utilities\") pod \"community-operators-q7dfv\" (UID: \"08693370-8f83-4bb1-8e2f-83a2ece2c9dd\") " pod="openshift-marketplace/community-operators-q7dfv" Oct 01 15:05:32 crc kubenswrapper[4869]: I1001 15:05:32.518621 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/08693370-8f83-4bb1-8e2f-83a2ece2c9dd-catalog-content\") pod \"community-operators-q7dfv\" (UID: \"08693370-8f83-4bb1-8e2f-83a2ece2c9dd\") " pod="openshift-marketplace/community-operators-q7dfv" Oct 01 15:05:32 crc kubenswrapper[4869]: E1001 15:05:32.519966 4869 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-01 15:05:33.019931556 +0000 UTC m=+42.166774672 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-4zhjs" (UID: "79b5f958-f252-4703-b785-05b0d01a6e72") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 01 15:05:32 crc kubenswrapper[4869]: I1001 15:05:32.521033 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-g22pf"] Oct 01 15:05:32 crc kubenswrapper[4869]: I1001 15:05:32.542514 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-262k2" Oct 01 15:05:32 crc kubenswrapper[4869]: I1001 15:05:32.543421 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-klj2b" podStartSLOduration=20.530013396 podStartE2EDuration="20.530013396s" podCreationTimestamp="2025-10-01 15:05:12 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-01 15:05:32.511580213 +0000 UTC m=+41.658423339" watchObservedRunningTime="2025-10-01 15:05:32.530013396 +0000 UTC m=+41.676856512" Oct 01 15:05:32 crc kubenswrapper[4869]: I1001 15:05:32.552814 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-gdgpp" event={"ID":"906a4a45-c846-4d80-a3ec-10070aae888c","Type":"ContainerStarted","Data":"0b0c6b4e8c666f1daa9152d013b5d6aa60a1b0b9109bae7f8fefd8e7ebfc17fe"} Oct 01 15:05:32 crc kubenswrapper[4869]: I1001 15:05:32.552849 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-gdgpp" event={"ID":"906a4a45-c846-4d80-a3ec-10070aae888c","Type":"ContainerStarted","Data":"cb4b63ab94322932f9e3bea26b898c2caae63834c9cff68e8d657b60317b878f"} Oct 01 15:05:32 crc kubenswrapper[4869]: I1001 15:05:32.567459 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console/console-f9d7485db-jv4xs" podStartSLOduration=21.567435856 podStartE2EDuration="21.567435856s" podCreationTimestamp="2025-10-01 15:05:11 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-01 15:05:32.548736156 +0000 UTC m=+41.695579272" watchObservedRunningTime="2025-10-01 15:05:32.567435856 +0000 UTC m=+41.714278972" Oct 01 15:05:32 crc kubenswrapper[4869]: I1001 15:05:32.594935 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-etcd-operator/etcd-operator-b45778765-vjs4q" podStartSLOduration=21.594918251 podStartE2EDuration="21.594918251s" podCreationTimestamp="2025-10-01 15:05:11 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-01 15:05:32.576752495 +0000 UTC m=+41.723595611" watchObservedRunningTime="2025-10-01 15:05:32.594918251 +0000 UTC m=+41.741761367" Oct 01 15:05:32 crc kubenswrapper[4869]: I1001 15:05:32.595873 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/dns-default-nk6rv" event={"ID":"60f75bf5-448e-4770-8eba-26c271028f50","Type":"ContainerStarted","Data":"8a6f547c2d856d8b514083205d420b5734d67d373329dde884ec0370aee82707"} Oct 01 15:05:32 crc kubenswrapper[4869]: I1001 15:05:32.597059 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-dns/dns-default-nk6rv" Oct 01 15:05:32 crc kubenswrapper[4869]: I1001 15:05:32.621950 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-wht8b" event={"ID":"328e4a8e-5321-43fa-8b1a-6ff72a374b6d","Type":"ContainerStarted","Data":"a3d3514ae3d31db1fa64108180774d836122cea2298c49ec0d9038365fec587b"} Oct 01 15:05:32 crc kubenswrapper[4869]: I1001 15:05:32.622034 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-wht8b" event={"ID":"328e4a8e-5321-43fa-8b1a-6ff72a374b6d","Type":"ContainerStarted","Data":"f1fbc3970d3cb855bb9942644ef2cfe65bbf920e86c73b60a2a3880a00ed72db"} Oct 01 15:05:32 crc kubenswrapper[4869]: I1001 15:05:32.622436 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 01 15:05:32 crc kubenswrapper[4869]: I1001 15:05:32.622954 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-wht8b" Oct 01 15:05:32 crc kubenswrapper[4869]: I1001 15:05:32.623379 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9jgjl\" (UniqueName: \"kubernetes.io/projected/61d7fda4-0848-4beb-b7ad-7c361fe37595-kube-api-access-9jgjl\") pod \"certified-operators-g22pf\" (UID: \"61d7fda4-0848-4beb-b7ad-7c361fe37595\") " pod="openshift-marketplace/certified-operators-g22pf" Oct 01 15:05:32 crc kubenswrapper[4869]: I1001 15:05:32.623412 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/08693370-8f83-4bb1-8e2f-83a2ece2c9dd-utilities\") pod \"community-operators-q7dfv\" (UID: \"08693370-8f83-4bb1-8e2f-83a2ece2c9dd\") " pod="openshift-marketplace/community-operators-q7dfv" Oct 01 15:05:32 crc kubenswrapper[4869]: I1001 15:05:32.623434 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/08693370-8f83-4bb1-8e2f-83a2ece2c9dd-catalog-content\") pod \"community-operators-q7dfv\" (UID: \"08693370-8f83-4bb1-8e2f-83a2ece2c9dd\") " pod="openshift-marketplace/community-operators-q7dfv" Oct 01 15:05:32 crc kubenswrapper[4869]: I1001 15:05:32.623474 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/61d7fda4-0848-4beb-b7ad-7c361fe37595-utilities\") pod \"certified-operators-g22pf\" (UID: \"61d7fda4-0848-4beb-b7ad-7c361fe37595\") " pod="openshift-marketplace/certified-operators-g22pf" Oct 01 15:05:32 crc kubenswrapper[4869]: I1001 15:05:32.623509 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/61d7fda4-0848-4beb-b7ad-7c361fe37595-catalog-content\") pod \"certified-operators-g22pf\" (UID: \"61d7fda4-0848-4beb-b7ad-7c361fe37595\") " pod="openshift-marketplace/certified-operators-g22pf" Oct 01 15:05:32 crc kubenswrapper[4869]: E1001 15:05:32.623554 4869 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-01 15:05:33.123532366 +0000 UTC m=+42.270375472 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 01 15:05:32 crc kubenswrapper[4869]: I1001 15:05:32.624580 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/08693370-8f83-4bb1-8e2f-83a2ece2c9dd-catalog-content\") pod \"community-operators-q7dfv\" (UID: \"08693370-8f83-4bb1-8e2f-83a2ece2c9dd\") " pod="openshift-marketplace/community-operators-q7dfv" Oct 01 15:05:32 crc kubenswrapper[4869]: I1001 15:05:32.624702 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/08693370-8f83-4bb1-8e2f-83a2ece2c9dd-utilities\") pod \"community-operators-q7dfv\" (UID: \"08693370-8f83-4bb1-8e2f-83a2ece2c9dd\") " pod="openshift-marketplace/community-operators-q7dfv" Oct 01 15:05:32 crc kubenswrapper[4869]: I1001 15:05:32.625843 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-szl8n\" (UniqueName: \"kubernetes.io/projected/08693370-8f83-4bb1-8e2f-83a2ece2c9dd-kube-api-access-szl8n\") pod \"community-operators-q7dfv\" (UID: \"08693370-8f83-4bb1-8e2f-83a2ece2c9dd\") " pod="openshift-marketplace/community-operators-q7dfv" Oct 01 15:05:32 crc kubenswrapper[4869]: I1001 15:05:32.636785 4869 patch_prober.go:28] interesting pod/packageserver-d55dfcdfc-wht8b container/packageserver namespace/openshift-operator-lifecycle-manager: Readiness probe status=failure output="Get \"https://10.217.0.24:5443/healthz\": dial tcp 10.217.0.24:5443: connect: connection refused" start-of-body= Oct 01 15:05:32 crc kubenswrapper[4869]: I1001 15:05:32.636849 4869 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-wht8b" podUID="328e4a8e-5321-43fa-8b1a-6ff72a374b6d" containerName="packageserver" probeResult="failure" output="Get \"https://10.217.0.24:5443/healthz\": dial tcp 10.217.0.24:5443: connect: connection refused" Oct 01 15:05:32 crc kubenswrapper[4869]: I1001 15:05:32.650442 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-t5kll" event={"ID":"2d195bae-5172-4387-869b-086f215963ff","Type":"ContainerStarted","Data":"aa96b28348f96b32cfacf6e83e427c3bff779f8a42f91f2b49d611ddf30525f5"} Oct 01 15:05:32 crc kubenswrapper[4869]: I1001 15:05:32.651409 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/marketplace-operator-79b997595-t5kll" Oct 01 15:05:32 crc kubenswrapper[4869]: I1001 15:05:32.655683 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca/service-ca-9c57cc56f-qd5ft" event={"ID":"9338ac98-e76c-4c74-af46-b646ab637e37","Type":"ContainerStarted","Data":"7c96788c4cb855ba5d60c8e72aa895754b278d3673b8934c484854ba24a3ac0f"} Oct 01 15:05:32 crc kubenswrapper[4869]: I1001 15:05:32.662607 4869 patch_prober.go:28] interesting pod/marketplace-operator-79b997595-t5kll container/marketplace-operator namespace/openshift-marketplace: Readiness probe status=failure output="Get \"http://10.217.0.21:8080/healthz\": dial tcp 10.217.0.21:8080: connect: connection refused" start-of-body= Oct 01 15:05:32 crc kubenswrapper[4869]: I1001 15:05:32.662663 4869 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-marketplace/marketplace-operator-79b997595-t5kll" podUID="2d195bae-5172-4387-869b-086f215963ff" containerName="marketplace-operator" probeResult="failure" output="Get \"http://10.217.0.21:8080/healthz\": dial tcp 10.217.0.21:8080: connect: connection refused" Oct 01 15:05:32 crc kubenswrapper[4869]: I1001 15:05:32.666078 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-config-operator/openshift-config-operator-7777fb866f-gqrbk" event={"ID":"2f6f6869-bd98-4c21-a184-a3b7bd5f0b5b","Type":"ContainerStarted","Data":"94f23d6c12f2e858518533550157de59a254e90f5d54d5ed7d1febd7f5dd59a3"} Oct 01 15:05:32 crc kubenswrapper[4869]: I1001 15:05:32.667441 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-config-operator/openshift-config-operator-7777fb866f-gqrbk" Oct 01 15:05:32 crc kubenswrapper[4869]: I1001 15:05:32.668713 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-hnx9p" event={"ID":"42cd6256-ae7b-46c8-a910-0d8723fa9351","Type":"ContainerStarted","Data":"3b0ab118912d2578dcd03ab259c6669f436d0d2a228534ff4c8f4e64fcd4cbb8"} Oct 01 15:05:32 crc kubenswrapper[4869]: I1001 15:05:32.668759 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-hnx9p" event={"ID":"42cd6256-ae7b-46c8-a910-0d8723fa9351","Type":"ContainerStarted","Data":"019082973fa5a7d8791c7317e0a9aaf14b470b9cc21bc624596dacedf5886012"} Oct 01 15:05:32 crc kubenswrapper[4869]: I1001 15:05:32.668773 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-hnx9p" event={"ID":"42cd6256-ae7b-46c8-a910-0d8723fa9351","Type":"ContainerStarted","Data":"277cd50a7977aa0fb63617ad2f9f933e1473d0f0e3f7050b90a7a1d51ee35cbd"} Oct 01 15:05:32 crc kubenswrapper[4869]: I1001 15:05:32.670412 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29322180-8fxq6" event={"ID":"4c1dcd57-cee2-45ab-9a92-8cdd8b864f98","Type":"ContainerStarted","Data":"b9697e7378c7c6e7287e428875324fbb361baef8c62aa101b17c958416a63273"} Oct 01 15:05:32 crc kubenswrapper[4869]: I1001 15:05:32.671547 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-szl8n\" (UniqueName: \"kubernetes.io/projected/08693370-8f83-4bb1-8e2f-83a2ece2c9dd-kube-api-access-szl8n\") pod \"community-operators-q7dfv\" (UID: \"08693370-8f83-4bb1-8e2f-83a2ece2c9dd\") " pod="openshift-marketplace/community-operators-q7dfv" Oct 01 15:05:32 crc kubenswrapper[4869]: I1001 15:05:32.680511 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-l5vnp" podStartSLOduration=20.680495619 podStartE2EDuration="20.680495619s" podCreationTimestamp="2025-10-01 15:05:12 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-01 15:05:32.595821295 +0000 UTC m=+41.742664411" watchObservedRunningTime="2025-10-01 15:05:32.680495619 +0000 UTC m=+41.827338735" Oct 01 15:05:32 crc kubenswrapper[4869]: I1001 15:05:32.695780 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-hjpxg" podStartSLOduration=20.695753587 podStartE2EDuration="20.695753587s" podCreationTimestamp="2025-10-01 15:05:12 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-01 15:05:32.691042231 +0000 UTC m=+41.837885347" watchObservedRunningTime="2025-10-01 15:05:32.695753587 +0000 UTC m=+41.842596703" Oct 01 15:05:32 crc kubenswrapper[4869]: I1001 15:05:32.697417 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-admission-controller-857f4d67dd-tgtxf" event={"ID":"bd217fac-bac4-44af-8942-0385d47f21d2","Type":"ContainerStarted","Data":"0c670e1300aed4b79dcf26633f396cd75a258ac5c915754ca1b1acdf669d0545"} Oct 01 15:05:32 crc kubenswrapper[4869]: I1001 15:05:32.711167 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-authentication/oauth-openshift-558db77b4-8mczm" Oct 01 15:05:32 crc kubenswrapper[4869]: I1001 15:05:32.711747 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-sm2sz" Oct 01 15:05:32 crc kubenswrapper[4869]: I1001 15:05:32.721191 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-q7dfv" Oct 01 15:05:32 crc kubenswrapper[4869]: I1001 15:05:32.723462 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-bb9hc" podStartSLOduration=20.723442607 podStartE2EDuration="20.723442607s" podCreationTimestamp="2025-10-01 15:05:12 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-01 15:05:32.717073277 +0000 UTC m=+41.863916393" watchObservedRunningTime="2025-10-01 15:05:32.723442607 +0000 UTC m=+41.870285723" Oct 01 15:05:32 crc kubenswrapper[4869]: I1001 15:05:32.728456 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/61d7fda4-0848-4beb-b7ad-7c361fe37595-catalog-content\") pod \"certified-operators-g22pf\" (UID: \"61d7fda4-0848-4beb-b7ad-7c361fe37595\") " pod="openshift-marketplace/certified-operators-g22pf" Oct 01 15:05:32 crc kubenswrapper[4869]: I1001 15:05:32.728551 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-4zhjs\" (UID: \"79b5f958-f252-4703-b785-05b0d01a6e72\") " pod="openshift-image-registry/image-registry-697d97f7c8-4zhjs" Oct 01 15:05:32 crc kubenswrapper[4869]: I1001 15:05:32.728596 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9jgjl\" (UniqueName: \"kubernetes.io/projected/61d7fda4-0848-4beb-b7ad-7c361fe37595-kube-api-access-9jgjl\") pod \"certified-operators-g22pf\" (UID: \"61d7fda4-0848-4beb-b7ad-7c361fe37595\") " pod="openshift-marketplace/certified-operators-g22pf" Oct 01 15:05:32 crc kubenswrapper[4869]: I1001 15:05:32.728644 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/61d7fda4-0848-4beb-b7ad-7c361fe37595-utilities\") pod \"certified-operators-g22pf\" (UID: \"61d7fda4-0848-4beb-b7ad-7c361fe37595\") " pod="openshift-marketplace/certified-operators-g22pf" Oct 01 15:05:32 crc kubenswrapper[4869]: E1001 15:05:32.729558 4869 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-01 15:05:33.22954328 +0000 UTC m=+42.376386396 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-4zhjs" (UID: "79b5f958-f252-4703-b785-05b0d01a6e72") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 01 15:05:32 crc kubenswrapper[4869]: I1001 15:05:32.730579 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/61d7fda4-0848-4beb-b7ad-7c361fe37595-utilities\") pod \"certified-operators-g22pf\" (UID: \"61d7fda4-0848-4beb-b7ad-7c361fe37595\") " pod="openshift-marketplace/certified-operators-g22pf" Oct 01 15:05:32 crc kubenswrapper[4869]: I1001 15:05:32.732386 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/61d7fda4-0848-4beb-b7ad-7c361fe37595-catalog-content\") pod \"certified-operators-g22pf\" (UID: \"61d7fda4-0848-4beb-b7ad-7c361fe37595\") " pod="openshift-marketplace/certified-operators-g22pf" Oct 01 15:05:32 crc kubenswrapper[4869]: I1001 15:05:32.771760 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-dns-operator/dns-operator-744455d44c-87546" podStartSLOduration=21.771746378 podStartE2EDuration="21.771746378s" podCreationTimestamp="2025-10-01 15:05:11 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-01 15:05:32.76883573 +0000 UTC m=+41.915678856" watchObservedRunningTime="2025-10-01 15:05:32.771746378 +0000 UTC m=+41.918589494" Oct 01 15:05:32 crc kubenswrapper[4869]: I1001 15:05:32.773285 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9jgjl\" (UniqueName: \"kubernetes.io/projected/61d7fda4-0848-4beb-b7ad-7c361fe37595-kube-api-access-9jgjl\") pod \"certified-operators-g22pf\" (UID: \"61d7fda4-0848-4beb-b7ad-7c361fe37595\") " pod="openshift-marketplace/certified-operators-g22pf" Oct 01 15:05:32 crc kubenswrapper[4869]: I1001 15:05:32.821427 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-g22pf" Oct 01 15:05:32 crc kubenswrapper[4869]: I1001 15:05:32.836947 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-wht8b" podStartSLOduration=20.836925211 podStartE2EDuration="20.836925211s" podCreationTimestamp="2025-10-01 15:05:12 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-01 15:05:32.798607426 +0000 UTC m=+41.945450542" watchObservedRunningTime="2025-10-01 15:05:32.836925211 +0000 UTC m=+41.983768327" Oct 01 15:05:32 crc kubenswrapper[4869]: I1001 15:05:32.843208 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 01 15:05:32 crc kubenswrapper[4869]: E1001 15:05:32.843994 4869 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-01 15:05:33.343975219 +0000 UTC m=+42.490818325 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 01 15:05:32 crc kubenswrapper[4869]: I1001 15:05:32.844612 4869 patch_prober.go:28] interesting pod/router-default-5444994796-mdc2f container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Oct 01 15:05:32 crc kubenswrapper[4869]: [-]has-synced failed: reason withheld Oct 01 15:05:32 crc kubenswrapper[4869]: [+]process-running ok Oct 01 15:05:32 crc kubenswrapper[4869]: healthz check failed Oct 01 15:05:32 crc kubenswrapper[4869]: I1001 15:05:32.844656 4869 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-mdc2f" podUID="a4dfc6a1-b623-46cb-b1ff-c6b809e8deaa" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Oct 01 15:05:32 crc kubenswrapper[4869]: I1001 15:05:32.848960 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-4zhjs\" (UID: \"79b5f958-f252-4703-b785-05b0d01a6e72\") " pod="openshift-image-registry/image-registry-697d97f7c8-4zhjs" Oct 01 15:05:32 crc kubenswrapper[4869]: E1001 15:05:32.868747 4869 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-01 15:05:33.368732231 +0000 UTC m=+42.515575347 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-4zhjs" (UID: "79b5f958-f252-4703-b785-05b0d01a6e72") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 01 15:05:32 crc kubenswrapper[4869]: I1001 15:05:32.928304 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/collect-profiles-29322180-8fxq6" podStartSLOduration=21.928287493 podStartE2EDuration="21.928287493s" podCreationTimestamp="2025-10-01 15:05:11 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-01 15:05:32.926607288 +0000 UTC m=+42.073450404" watchObservedRunningTime="2025-10-01 15:05:32.928287493 +0000 UTC m=+42.075130619" Oct 01 15:05:32 crc kubenswrapper[4869]: I1001 15:05:32.971927 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 01 15:05:32 crc kubenswrapper[4869]: E1001 15:05:32.972298 4869 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-01 15:05:33.472283609 +0000 UTC m=+42.619126725 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 01 15:05:33 crc kubenswrapper[4869]: I1001 15:05:33.016352 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/multus-admission-controller-857f4d67dd-tgtxf" podStartSLOduration=21.016337557 podStartE2EDuration="21.016337557s" podCreationTimestamp="2025-10-01 15:05:12 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-01 15:05:33.015740461 +0000 UTC m=+42.162583577" watchObservedRunningTime="2025-10-01 15:05:33.016337557 +0000 UTC m=+42.163180673" Oct 01 15:05:33 crc kubenswrapper[4869]: I1001 15:05:33.017825 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/marketplace-operator-79b997595-t5kll" podStartSLOduration=21.017819177 podStartE2EDuration="21.017819177s" podCreationTimestamp="2025-10-01 15:05:12 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-01 15:05:32.971324594 +0000 UTC m=+42.118167720" watchObservedRunningTime="2025-10-01 15:05:33.017819177 +0000 UTC m=+42.164662293" Oct 01 15:05:33 crc kubenswrapper[4869]: I1001 15:05:33.045112 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-dns/dns-default-nk6rv" podStartSLOduration=9.045097536 podStartE2EDuration="9.045097536s" podCreationTimestamp="2025-10-01 15:05:24 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-01 15:05:33.041695065 +0000 UTC m=+42.188538191" watchObservedRunningTime="2025-10-01 15:05:33.045097536 +0000 UTC m=+42.191940652" Oct 01 15:05:33 crc kubenswrapper[4869]: I1001 15:05:33.075192 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-4zhjs\" (UID: \"79b5f958-f252-4703-b785-05b0d01a6e72\") " pod="openshift-image-registry/image-registry-697d97f7c8-4zhjs" Oct 01 15:05:33 crc kubenswrapper[4869]: E1001 15:05:33.075532 4869 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-01 15:05:33.575520309 +0000 UTC m=+42.722363425 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-4zhjs" (UID: "79b5f958-f252-4703-b785-05b0d01a6e72") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 01 15:05:33 crc kubenswrapper[4869]: I1001 15:05:33.177103 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 01 15:05:33 crc kubenswrapper[4869]: E1001 15:05:33.177652 4869 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-01 15:05:33.677637949 +0000 UTC m=+42.824481065 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 01 15:05:33 crc kubenswrapper[4869]: I1001 15:05:33.216663 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-service-ca/service-ca-9c57cc56f-qd5ft" podStartSLOduration=21.216636972 podStartE2EDuration="21.216636972s" podCreationTimestamp="2025-10-01 15:05:12 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-01 15:05:33.162158435 +0000 UTC m=+42.309001551" watchObservedRunningTime="2025-10-01 15:05:33.216636972 +0000 UTC m=+42.363480088" Oct 01 15:05:33 crc kubenswrapper[4869]: I1001 15:05:33.218324 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-qf2zt"] Oct 01 15:05:33 crc kubenswrapper[4869]: I1001 15:05:33.225761 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-config-operator/openshift-config-operator-7777fb866f-gqrbk" podStartSLOduration=22.225735895 podStartE2EDuration="22.225735895s" podCreationTimestamp="2025-10-01 15:05:11 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-01 15:05:33.204060446 +0000 UTC m=+42.350903562" watchObservedRunningTime="2025-10-01 15:05:33.225735895 +0000 UTC m=+42.372579011" Oct 01 15:05:33 crc kubenswrapper[4869]: I1001 15:05:33.273037 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-hnx9p" podStartSLOduration=21.273021899 podStartE2EDuration="21.273021899s" podCreationTimestamp="2025-10-01 15:05:12 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-01 15:05:33.24650259 +0000 UTC m=+42.393345706" watchObservedRunningTime="2025-10-01 15:05:33.273021899 +0000 UTC m=+42.419865005" Oct 01 15:05:33 crc kubenswrapper[4869]: I1001 15:05:33.279894 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-4zhjs\" (UID: \"79b5f958-f252-4703-b785-05b0d01a6e72\") " pod="openshift-image-registry/image-registry-697d97f7c8-4zhjs" Oct 01 15:05:33 crc kubenswrapper[4869]: E1001 15:05:33.280157 4869 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-01 15:05:33.78014487 +0000 UTC m=+42.926987976 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-4zhjs" (UID: "79b5f958-f252-4703-b785-05b0d01a6e72") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 01 15:05:33 crc kubenswrapper[4869]: I1001 15:05:33.306861 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-service-ca-operator/service-ca-operator-777779d784-jlnv5" podStartSLOduration=21.306840993 podStartE2EDuration="21.306840993s" podCreationTimestamp="2025-10-01 15:05:12 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-01 15:05:33.304721637 +0000 UTC m=+42.451564763" watchObservedRunningTime="2025-10-01 15:05:33.306840993 +0000 UTC m=+42.453684109" Oct 01 15:05:33 crc kubenswrapper[4869]: I1001 15:05:33.357150 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-gdgpp" podStartSLOduration=22.357130838 podStartE2EDuration="22.357130838s" podCreationTimestamp="2025-10-01 15:05:11 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-01 15:05:33.341837739 +0000 UTC m=+42.488680855" watchObservedRunningTime="2025-10-01 15:05:33.357130838 +0000 UTC m=+42.503973954" Oct 01 15:05:33 crc kubenswrapper[4869]: I1001 15:05:33.382769 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 01 15:05:33 crc kubenswrapper[4869]: E1001 15:05:33.383139 4869 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-01 15:05:33.883113022 +0000 UTC m=+43.029956138 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 01 15:05:33 crc kubenswrapper[4869]: I1001 15:05:33.475192 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-262k2"] Oct 01 15:05:33 crc kubenswrapper[4869]: I1001 15:05:33.486061 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-4zhjs\" (UID: \"79b5f958-f252-4703-b785-05b0d01a6e72\") " pod="openshift-image-registry/image-registry-697d97f7c8-4zhjs" Oct 01 15:05:33 crc kubenswrapper[4869]: E1001 15:05:33.486581 4869 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-01 15:05:33.986568038 +0000 UTC m=+43.133411154 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-4zhjs" (UID: "79b5f958-f252-4703-b785-05b0d01a6e72") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 01 15:05:33 crc kubenswrapper[4869]: I1001 15:05:33.540348 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-q7dfv"] Oct 01 15:05:33 crc kubenswrapper[4869]: W1001 15:05:33.554470 4869 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf6f8609b_7f80_4b8f_a371_bb1d10396a15.slice/crio-fc8de042cac333c18ee3d6e072bd6281ec19b61a76f5ac6e56f77b905b7a2251 WatchSource:0}: Error finding container fc8de042cac333c18ee3d6e072bd6281ec19b61a76f5ac6e56f77b905b7a2251: Status 404 returned error can't find the container with id fc8de042cac333c18ee3d6e072bd6281ec19b61a76f5ac6e56f77b905b7a2251 Oct 01 15:05:33 crc kubenswrapper[4869]: I1001 15:05:33.586758 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 01 15:05:33 crc kubenswrapper[4869]: E1001 15:05:33.587057 4869 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-01 15:05:34.087041134 +0000 UTC m=+43.233884250 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 01 15:05:33 crc kubenswrapper[4869]: I1001 15:05:33.644132 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-g22pf"] Oct 01 15:05:33 crc kubenswrapper[4869]: W1001 15:05:33.690596 4869 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod61d7fda4_0848_4beb_b7ad_7c361fe37595.slice/crio-d11b99210822f3cb9bb34f1bc54c4cb7e1fc623f237b268995131f5f02627068 WatchSource:0}: Error finding container d11b99210822f3cb9bb34f1bc54c4cb7e1fc623f237b268995131f5f02627068: Status 404 returned error can't find the container with id d11b99210822f3cb9bb34f1bc54c4cb7e1fc623f237b268995131f5f02627068 Oct 01 15:05:33 crc kubenswrapper[4869]: I1001 15:05:33.691989 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-4zhjs\" (UID: \"79b5f958-f252-4703-b785-05b0d01a6e72\") " pod="openshift-image-registry/image-registry-697d97f7c8-4zhjs" Oct 01 15:05:33 crc kubenswrapper[4869]: E1001 15:05:33.692359 4869 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-01 15:05:34.192345759 +0000 UTC m=+43.339188875 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-4zhjs" (UID: "79b5f958-f252-4703-b785-05b0d01a6e72") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 01 15:05:33 crc kubenswrapper[4869]: I1001 15:05:33.775356 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-q7dfv" event={"ID":"08693370-8f83-4bb1-8e2f-83a2ece2c9dd","Type":"ContainerStarted","Data":"efd4f1a81bc09150ba17e6a08e11be307d4cdfb1382a4e746855246f366a804a"} Oct 01 15:05:33 crc kubenswrapper[4869]: I1001 15:05:33.781088 4869 patch_prober.go:28] interesting pod/router-default-5444994796-mdc2f container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Oct 01 15:05:33 crc kubenswrapper[4869]: [-]has-synced failed: reason withheld Oct 01 15:05:33 crc kubenswrapper[4869]: [+]process-running ok Oct 01 15:05:33 crc kubenswrapper[4869]: healthz check failed Oct 01 15:05:33 crc kubenswrapper[4869]: I1001 15:05:33.781138 4869 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-mdc2f" podUID="a4dfc6a1-b623-46cb-b1ff-c6b809e8deaa" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Oct 01 15:05:33 crc kubenswrapper[4869]: I1001 15:05:33.796937 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-9pwb5" event={"ID":"45ac9df8-4d47-44e7-a1bb-bd4d0b8fd560","Type":"ContainerStarted","Data":"6cf08095a8cfc678f274f362a2ed1d09727ff008bbe6e38c71b52f3034e03648"} Oct 01 15:05:33 crc kubenswrapper[4869]: I1001 15:05:33.801770 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 01 15:05:33 crc kubenswrapper[4869]: E1001 15:05:33.802130 4869 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-01 15:05:34.302114884 +0000 UTC m=+43.448958000 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 01 15:05:33 crc kubenswrapper[4869]: I1001 15:05:33.820288 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-admission-controller-857f4d67dd-tgtxf" event={"ID":"bd217fac-bac4-44af-8942-0385d47f21d2","Type":"ContainerStarted","Data":"a239d6e9092facf6e1a5fbdb73bad1f15c8e33e61fe4924d3e47ed01ad21652f"} Oct 01 15:05:33 crc kubenswrapper[4869]: I1001 15:05:33.826421 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/network-metrics-daemon-rz7qp" event={"ID":"328ce213-12ef-40af-a41f-e0079949b82d","Type":"ContainerStarted","Data":"df78419b9699cbd4394f09f73450f69a253a918eb2dc43ad2b69c4418b643a05"} Oct 01 15:05:33 crc kubenswrapper[4869]: I1001 15:05:33.826457 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/network-metrics-daemon-rz7qp" event={"ID":"328ce213-12ef-40af-a41f-e0079949b82d","Type":"ContainerStarted","Data":"b5c7e9440800bc033e3aed9cb9cd9857a86be3339e90a320c277f9337d574281"} Oct 01 15:05:33 crc kubenswrapper[4869]: I1001 15:05:33.828241 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/dns-default-nk6rv" event={"ID":"60f75bf5-448e-4770-8eba-26c271028f50","Type":"ContainerStarted","Data":"df0f9194d649673f07e7536014401d5c0c6800ddff274ade9c118e03ff5a8184"} Oct 01 15:05:33 crc kubenswrapper[4869]: I1001 15:05:33.830720 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-g22pf" event={"ID":"61d7fda4-0848-4beb-b7ad-7c361fe37595","Type":"ContainerStarted","Data":"d11b99210822f3cb9bb34f1bc54c4cb7e1fc623f237b268995131f5f02627068"} Oct 01 15:05:33 crc kubenswrapper[4869]: I1001 15:05:33.831648 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-262k2" event={"ID":"f6f8609b-7f80-4b8f-a371-bb1d10396a15","Type":"ContainerStarted","Data":"fc8de042cac333c18ee3d6e072bd6281ec19b61a76f5ac6e56f77b905b7a2251"} Oct 01 15:05:33 crc kubenswrapper[4869]: I1001 15:05:33.833025 4869 generic.go:334] "Generic (PLEG): container finished" podID="87c849d7-d613-446d-9f2f-bdcf6da7e4e6" containerID="2464ac7ae10423f08930a1b33f60af97c97e44ce0a6b6c6be1cc9b7ff7c6dbc4" exitCode=0 Oct 01 15:05:33 crc kubenswrapper[4869]: I1001 15:05:33.833856 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-qf2zt" event={"ID":"87c849d7-d613-446d-9f2f-bdcf6da7e4e6","Type":"ContainerDied","Data":"2464ac7ae10423f08930a1b33f60af97c97e44ce0a6b6c6be1cc9b7ff7c6dbc4"} Oct 01 15:05:33 crc kubenswrapper[4869]: I1001 15:05:33.833879 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-qf2zt" event={"ID":"87c849d7-d613-446d-9f2f-bdcf6da7e4e6","Type":"ContainerStarted","Data":"443568e766c749eb77d704ea5f0cd76ba1cda514ae736ea92dbc8c8a8f57c687"} Oct 01 15:05:33 crc kubenswrapper[4869]: I1001 15:05:33.837520 4869 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Oct 01 15:05:33 crc kubenswrapper[4869]: I1001 15:05:33.837970 4869 patch_prober.go:28] interesting pod/marketplace-operator-79b997595-t5kll container/marketplace-operator namespace/openshift-marketplace: Readiness probe status=failure output="Get \"http://10.217.0.21:8080/healthz\": dial tcp 10.217.0.21:8080: connect: connection refused" start-of-body= Oct 01 15:05:33 crc kubenswrapper[4869]: I1001 15:05:33.838000 4869 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-marketplace/marketplace-operator-79b997595-t5kll" podUID="2d195bae-5172-4387-869b-086f215963ff" containerName="marketplace-operator" probeResult="failure" output="Get \"http://10.217.0.21:8080/healthz\": dial tcp 10.217.0.21:8080: connect: connection refused" Oct 01 15:05:33 crc kubenswrapper[4869]: I1001 15:05:33.841632 4869 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-multus/cni-sysctl-allowlist-ds-xffhm" podUID="9ff567e5-6f6b-4b25-a8a0-3aa792f4291d" containerName="kube-multus-additional-cni-plugins" containerID="cri-o://44e089bc1369f6c1a4bcd915248cba1eac03bcf6058f0b48a934fd3e6a73f37a" gracePeriod=30 Oct 01 15:05:33 crc kubenswrapper[4869]: I1001 15:05:33.847428 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-wht8b" Oct 01 15:05:33 crc kubenswrapper[4869]: I1001 15:05:33.881840 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/network-metrics-daemon-rz7qp" podStartSLOduration=22.881823725 podStartE2EDuration="22.881823725s" podCreationTimestamp="2025-10-01 15:05:11 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-01 15:05:33.879915664 +0000 UTC m=+43.026758790" watchObservedRunningTime="2025-10-01 15:05:33.881823725 +0000 UTC m=+43.028666841" Oct 01 15:05:33 crc kubenswrapper[4869]: I1001 15:05:33.896507 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-bcf4v"] Oct 01 15:05:33 crc kubenswrapper[4869]: I1001 15:05:33.897676 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-bcf4v" Oct 01 15:05:33 crc kubenswrapper[4869]: I1001 15:05:33.905095 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-marketplace-dockercfg-x2ctb" Oct 01 15:05:33 crc kubenswrapper[4869]: I1001 15:05:33.906203 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-bcf4v"] Oct 01 15:05:33 crc kubenswrapper[4869]: I1001 15:05:33.906427 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-4zhjs\" (UID: \"79b5f958-f252-4703-b785-05b0d01a6e72\") " pod="openshift-image-registry/image-registry-697d97f7c8-4zhjs" Oct 01 15:05:33 crc kubenswrapper[4869]: E1001 15:05:33.912528 4869 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-01 15:05:34.412512755 +0000 UTC m=+43.559355871 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-4zhjs" (UID: "79b5f958-f252-4703-b785-05b0d01a6e72") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 01 15:05:34 crc kubenswrapper[4869]: I1001 15:05:34.013075 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 01 15:05:34 crc kubenswrapper[4869]: E1001 15:05:34.013358 4869 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-01 15:05:34.513331591 +0000 UTC m=+43.660174707 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 01 15:05:34 crc kubenswrapper[4869]: I1001 15:05:34.013611 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-f877n\" (UniqueName: \"kubernetes.io/projected/1b0f8e72-9451-4c28-8e39-9e8c94096b80-kube-api-access-f877n\") pod \"redhat-marketplace-bcf4v\" (UID: \"1b0f8e72-9451-4c28-8e39-9e8c94096b80\") " pod="openshift-marketplace/redhat-marketplace-bcf4v" Oct 01 15:05:34 crc kubenswrapper[4869]: I1001 15:05:34.013642 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1b0f8e72-9451-4c28-8e39-9e8c94096b80-catalog-content\") pod \"redhat-marketplace-bcf4v\" (UID: \"1b0f8e72-9451-4c28-8e39-9e8c94096b80\") " pod="openshift-marketplace/redhat-marketplace-bcf4v" Oct 01 15:05:34 crc kubenswrapper[4869]: I1001 15:05:34.013667 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1b0f8e72-9451-4c28-8e39-9e8c94096b80-utilities\") pod \"redhat-marketplace-bcf4v\" (UID: \"1b0f8e72-9451-4c28-8e39-9e8c94096b80\") " pod="openshift-marketplace/redhat-marketplace-bcf4v" Oct 01 15:05:34 crc kubenswrapper[4869]: E1001 15:05:34.014102 4869 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-01 15:05:34.514089781 +0000 UTC m=+43.660932897 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-4zhjs" (UID: "79b5f958-f252-4703-b785-05b0d01a6e72") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 01 15:05:34 crc kubenswrapper[4869]: I1001 15:05:34.014274 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-4zhjs\" (UID: \"79b5f958-f252-4703-b785-05b0d01a6e72\") " pod="openshift-image-registry/image-registry-697d97f7c8-4zhjs" Oct 01 15:05:34 crc kubenswrapper[4869]: I1001 15:05:34.115396 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 01 15:05:34 crc kubenswrapper[4869]: E1001 15:05:34.115580 4869 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-01 15:05:34.615550703 +0000 UTC m=+43.762393819 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 01 15:05:34 crc kubenswrapper[4869]: I1001 15:05:34.115664 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-f877n\" (UniqueName: \"kubernetes.io/projected/1b0f8e72-9451-4c28-8e39-9e8c94096b80-kube-api-access-f877n\") pod \"redhat-marketplace-bcf4v\" (UID: \"1b0f8e72-9451-4c28-8e39-9e8c94096b80\") " pod="openshift-marketplace/redhat-marketplace-bcf4v" Oct 01 15:05:34 crc kubenswrapper[4869]: I1001 15:05:34.115705 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1b0f8e72-9451-4c28-8e39-9e8c94096b80-catalog-content\") pod \"redhat-marketplace-bcf4v\" (UID: \"1b0f8e72-9451-4c28-8e39-9e8c94096b80\") " pod="openshift-marketplace/redhat-marketplace-bcf4v" Oct 01 15:05:34 crc kubenswrapper[4869]: I1001 15:05:34.115735 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1b0f8e72-9451-4c28-8e39-9e8c94096b80-utilities\") pod \"redhat-marketplace-bcf4v\" (UID: \"1b0f8e72-9451-4c28-8e39-9e8c94096b80\") " pod="openshift-marketplace/redhat-marketplace-bcf4v" Oct 01 15:05:34 crc kubenswrapper[4869]: I1001 15:05:34.115796 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-4zhjs\" (UID: \"79b5f958-f252-4703-b785-05b0d01a6e72\") " pod="openshift-image-registry/image-registry-697d97f7c8-4zhjs" Oct 01 15:05:34 crc kubenswrapper[4869]: E1001 15:05:34.116078 4869 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-01 15:05:34.616064867 +0000 UTC m=+43.762907983 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-4zhjs" (UID: "79b5f958-f252-4703-b785-05b0d01a6e72") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 01 15:05:34 crc kubenswrapper[4869]: I1001 15:05:34.116111 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1b0f8e72-9451-4c28-8e39-9e8c94096b80-catalog-content\") pod \"redhat-marketplace-bcf4v\" (UID: \"1b0f8e72-9451-4c28-8e39-9e8c94096b80\") " pod="openshift-marketplace/redhat-marketplace-bcf4v" Oct 01 15:05:34 crc kubenswrapper[4869]: I1001 15:05:34.116355 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1b0f8e72-9451-4c28-8e39-9e8c94096b80-utilities\") pod \"redhat-marketplace-bcf4v\" (UID: \"1b0f8e72-9451-4c28-8e39-9e8c94096b80\") " pod="openshift-marketplace/redhat-marketplace-bcf4v" Oct 01 15:05:34 crc kubenswrapper[4869]: I1001 15:05:34.143108 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-f877n\" (UniqueName: \"kubernetes.io/projected/1b0f8e72-9451-4c28-8e39-9e8c94096b80-kube-api-access-f877n\") pod \"redhat-marketplace-bcf4v\" (UID: \"1b0f8e72-9451-4c28-8e39-9e8c94096b80\") " pod="openshift-marketplace/redhat-marketplace-bcf4v" Oct 01 15:05:34 crc kubenswrapper[4869]: I1001 15:05:34.217143 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 01 15:05:34 crc kubenswrapper[4869]: E1001 15:05:34.217486 4869 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-01 15:05:34.717467648 +0000 UTC m=+43.864310764 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 01 15:05:34 crc kubenswrapper[4869]: I1001 15:05:34.258024 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-qxb5h"] Oct 01 15:05:34 crc kubenswrapper[4869]: I1001 15:05:34.259397 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-qxb5h" Oct 01 15:05:34 crc kubenswrapper[4869]: I1001 15:05:34.272973 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-qxb5h"] Oct 01 15:05:34 crc kubenswrapper[4869]: I1001 15:05:34.319159 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9956d\" (UniqueName: \"kubernetes.io/projected/a0c02ede-8eef-4d4e-a277-bcda77228c29-kube-api-access-9956d\") pod \"redhat-marketplace-qxb5h\" (UID: \"a0c02ede-8eef-4d4e-a277-bcda77228c29\") " pod="openshift-marketplace/redhat-marketplace-qxb5h" Oct 01 15:05:34 crc kubenswrapper[4869]: I1001 15:05:34.319202 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a0c02ede-8eef-4d4e-a277-bcda77228c29-catalog-content\") pod \"redhat-marketplace-qxb5h\" (UID: \"a0c02ede-8eef-4d4e-a277-bcda77228c29\") " pod="openshift-marketplace/redhat-marketplace-qxb5h" Oct 01 15:05:34 crc kubenswrapper[4869]: I1001 15:05:34.319240 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a0c02ede-8eef-4d4e-a277-bcda77228c29-utilities\") pod \"redhat-marketplace-qxb5h\" (UID: \"a0c02ede-8eef-4d4e-a277-bcda77228c29\") " pod="openshift-marketplace/redhat-marketplace-qxb5h" Oct 01 15:05:34 crc kubenswrapper[4869]: I1001 15:05:34.319314 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-4zhjs\" (UID: \"79b5f958-f252-4703-b785-05b0d01a6e72\") " pod="openshift-image-registry/image-registry-697d97f7c8-4zhjs" Oct 01 15:05:34 crc kubenswrapper[4869]: E1001 15:05:34.319603 4869 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-01 15:05:34.819590708 +0000 UTC m=+43.966433824 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-4zhjs" (UID: "79b5f958-f252-4703-b785-05b0d01a6e72") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 01 15:05:34 crc kubenswrapper[4869]: I1001 15:05:34.319887 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-bcf4v" Oct 01 15:05:34 crc kubenswrapper[4869]: I1001 15:05:34.420461 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 01 15:05:34 crc kubenswrapper[4869]: E1001 15:05:34.420633 4869 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-01 15:05:34.920607909 +0000 UTC m=+44.067451025 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 01 15:05:34 crc kubenswrapper[4869]: I1001 15:05:34.420743 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a0c02ede-8eef-4d4e-a277-bcda77228c29-utilities\") pod \"redhat-marketplace-qxb5h\" (UID: \"a0c02ede-8eef-4d4e-a277-bcda77228c29\") " pod="openshift-marketplace/redhat-marketplace-qxb5h" Oct 01 15:05:34 crc kubenswrapper[4869]: I1001 15:05:34.420849 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-4zhjs\" (UID: \"79b5f958-f252-4703-b785-05b0d01a6e72\") " pod="openshift-image-registry/image-registry-697d97f7c8-4zhjs" Oct 01 15:05:34 crc kubenswrapper[4869]: I1001 15:05:34.420901 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9956d\" (UniqueName: \"kubernetes.io/projected/a0c02ede-8eef-4d4e-a277-bcda77228c29-kube-api-access-9956d\") pod \"redhat-marketplace-qxb5h\" (UID: \"a0c02ede-8eef-4d4e-a277-bcda77228c29\") " pod="openshift-marketplace/redhat-marketplace-qxb5h" Oct 01 15:05:34 crc kubenswrapper[4869]: I1001 15:05:34.420940 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a0c02ede-8eef-4d4e-a277-bcda77228c29-catalog-content\") pod \"redhat-marketplace-qxb5h\" (UID: \"a0c02ede-8eef-4d4e-a277-bcda77228c29\") " pod="openshift-marketplace/redhat-marketplace-qxb5h" Oct 01 15:05:34 crc kubenswrapper[4869]: I1001 15:05:34.421214 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a0c02ede-8eef-4d4e-a277-bcda77228c29-utilities\") pod \"redhat-marketplace-qxb5h\" (UID: \"a0c02ede-8eef-4d4e-a277-bcda77228c29\") " pod="openshift-marketplace/redhat-marketplace-qxb5h" Oct 01 15:05:34 crc kubenswrapper[4869]: E1001 15:05:34.421455 4869 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-01 15:05:34.921447351 +0000 UTC m=+44.068290467 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-4zhjs" (UID: "79b5f958-f252-4703-b785-05b0d01a6e72") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 01 15:05:34 crc kubenswrapper[4869]: I1001 15:05:34.423612 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a0c02ede-8eef-4d4e-a277-bcda77228c29-catalog-content\") pod \"redhat-marketplace-qxb5h\" (UID: \"a0c02ede-8eef-4d4e-a277-bcda77228c29\") " pod="openshift-marketplace/redhat-marketplace-qxb5h" Oct 01 15:05:34 crc kubenswrapper[4869]: I1001 15:05:34.441115 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9956d\" (UniqueName: \"kubernetes.io/projected/a0c02ede-8eef-4d4e-a277-bcda77228c29-kube-api-access-9956d\") pod \"redhat-marketplace-qxb5h\" (UID: \"a0c02ede-8eef-4d4e-a277-bcda77228c29\") " pod="openshift-marketplace/redhat-marketplace-qxb5h" Oct 01 15:05:34 crc kubenswrapper[4869]: I1001 15:05:34.522441 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 01 15:05:34 crc kubenswrapper[4869]: E1001 15:05:34.522661 4869 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-01 15:05:35.022643497 +0000 UTC m=+44.169486603 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 01 15:05:34 crc kubenswrapper[4869]: I1001 15:05:34.523010 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-4zhjs\" (UID: \"79b5f958-f252-4703-b785-05b0d01a6e72\") " pod="openshift-image-registry/image-registry-697d97f7c8-4zhjs" Oct 01 15:05:34 crc kubenswrapper[4869]: E1001 15:05:34.523388 4869 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-01 15:05:35.023379226 +0000 UTC m=+44.170222342 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-4zhjs" (UID: "79b5f958-f252-4703-b785-05b0d01a6e72") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 01 15:05:34 crc kubenswrapper[4869]: I1001 15:05:34.579613 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-qxb5h" Oct 01 15:05:34 crc kubenswrapper[4869]: I1001 15:05:34.587811 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-bcf4v"] Oct 01 15:05:34 crc kubenswrapper[4869]: W1001 15:05:34.618322 4869 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod1b0f8e72_9451_4c28_8e39_9e8c94096b80.slice/crio-6a00783b2c2395eb51be85a38eaf093df51f3a6cf804a95bce08fbdae35d553c WatchSource:0}: Error finding container 6a00783b2c2395eb51be85a38eaf093df51f3a6cf804a95bce08fbdae35d553c: Status 404 returned error can't find the container with id 6a00783b2c2395eb51be85a38eaf093df51f3a6cf804a95bce08fbdae35d553c Oct 01 15:05:34 crc kubenswrapper[4869]: I1001 15:05:34.623522 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 01 15:05:34 crc kubenswrapper[4869]: E1001 15:05:34.623699 4869 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-01 15:05:35.123651977 +0000 UTC m=+44.270495093 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 01 15:05:34 crc kubenswrapper[4869]: I1001 15:05:34.623762 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-4zhjs\" (UID: \"79b5f958-f252-4703-b785-05b0d01a6e72\") " pod="openshift-image-registry/image-registry-697d97f7c8-4zhjs" Oct 01 15:05:34 crc kubenswrapper[4869]: E1001 15:05:34.624005 4869 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-01 15:05:35.123993816 +0000 UTC m=+44.270836932 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-4zhjs" (UID: "79b5f958-f252-4703-b785-05b0d01a6e72") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 01 15:05:34 crc kubenswrapper[4869]: I1001 15:05:34.726060 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 01 15:05:34 crc kubenswrapper[4869]: E1001 15:05:34.726528 4869 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-01 15:05:35.226510977 +0000 UTC m=+44.373354083 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 01 15:05:34 crc kubenswrapper[4869]: I1001 15:05:34.750380 4869 plugin_watcher.go:194] "Adding socket path or updating timestamp to desired state cache" path="/var/lib/kubelet/plugins_registry/kubevirt.io.hostpath-provisioner-reg.sock" Oct 01 15:05:34 crc kubenswrapper[4869]: I1001 15:05:34.779520 4869 patch_prober.go:28] interesting pod/router-default-5444994796-mdc2f container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Oct 01 15:05:34 crc kubenswrapper[4869]: [-]has-synced failed: reason withheld Oct 01 15:05:34 crc kubenswrapper[4869]: [+]process-running ok Oct 01 15:05:34 crc kubenswrapper[4869]: healthz check failed Oct 01 15:05:34 crc kubenswrapper[4869]: I1001 15:05:34.779778 4869 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-mdc2f" podUID="a4dfc6a1-b623-46cb-b1ff-c6b809e8deaa" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Oct 01 15:05:34 crc kubenswrapper[4869]: I1001 15:05:34.814383 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-qxb5h"] Oct 01 15:05:34 crc kubenswrapper[4869]: I1001 15:05:34.829189 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-4zhjs\" (UID: \"79b5f958-f252-4703-b785-05b0d01a6e72\") " pod="openshift-image-registry/image-registry-697d97f7c8-4zhjs" Oct 01 15:05:34 crc kubenswrapper[4869]: E1001 15:05:34.829517 4869 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-01 15:05:35.32950561 +0000 UTC m=+44.476348726 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-4zhjs" (UID: "79b5f958-f252-4703-b785-05b0d01a6e72") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 01 15:05:34 crc kubenswrapper[4869]: I1001 15:05:34.856923 4869 generic.go:334] "Generic (PLEG): container finished" podID="f6f8609b-7f80-4b8f-a371-bb1d10396a15" containerID="13586bd6881cf500aeb36a4b8acab801edf57b584a7848ff9784afcba0abad7f" exitCode=0 Oct 01 15:05:34 crc kubenswrapper[4869]: I1001 15:05:34.857337 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-262k2" event={"ID":"f6f8609b-7f80-4b8f-a371-bb1d10396a15","Type":"ContainerDied","Data":"13586bd6881cf500aeb36a4b8acab801edf57b584a7848ff9784afcba0abad7f"} Oct 01 15:05:34 crc kubenswrapper[4869]: I1001 15:05:34.864217 4869 generic.go:334] "Generic (PLEG): container finished" podID="1b0f8e72-9451-4c28-8e39-9e8c94096b80" containerID="27b6aac3f6cbc1b4d61941e52a5acbe636a346e2e1b6089319caf8c4c0f7963b" exitCode=0 Oct 01 15:05:34 crc kubenswrapper[4869]: I1001 15:05:34.864463 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-bcf4v" event={"ID":"1b0f8e72-9451-4c28-8e39-9e8c94096b80","Type":"ContainerDied","Data":"27b6aac3f6cbc1b4d61941e52a5acbe636a346e2e1b6089319caf8c4c0f7963b"} Oct 01 15:05:34 crc kubenswrapper[4869]: I1001 15:05:34.864507 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-bcf4v" event={"ID":"1b0f8e72-9451-4c28-8e39-9e8c94096b80","Type":"ContainerStarted","Data":"6a00783b2c2395eb51be85a38eaf093df51f3a6cf804a95bce08fbdae35d553c"} Oct 01 15:05:34 crc kubenswrapper[4869]: I1001 15:05:34.867743 4869 generic.go:334] "Generic (PLEG): container finished" podID="08693370-8f83-4bb1-8e2f-83a2ece2c9dd" containerID="d620af3444aac50fc7f4066d147e6e91e320f66965795f87556eaa667a1a0bb6" exitCode=0 Oct 01 15:05:34 crc kubenswrapper[4869]: I1001 15:05:34.867808 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-q7dfv" event={"ID":"08693370-8f83-4bb1-8e2f-83a2ece2c9dd","Type":"ContainerDied","Data":"d620af3444aac50fc7f4066d147e6e91e320f66965795f87556eaa667a1a0bb6"} Oct 01 15:05:34 crc kubenswrapper[4869]: I1001 15:05:34.871659 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-9pwb5" event={"ID":"45ac9df8-4d47-44e7-a1bb-bd4d0b8fd560","Type":"ContainerStarted","Data":"bd44077350a3ce467a9bd0c959bc9ec48d1964eab8fd1e9376c090881aba0391"} Oct 01 15:05:34 crc kubenswrapper[4869]: I1001 15:05:34.871691 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-9pwb5" event={"ID":"45ac9df8-4d47-44e7-a1bb-bd4d0b8fd560","Type":"ContainerStarted","Data":"a8572675c58744e85de0cb92a998f47c2687e0a7f74585a9ffbe5e17c7dd958a"} Oct 01 15:05:34 crc kubenswrapper[4869]: I1001 15:05:34.876917 4869 generic.go:334] "Generic (PLEG): container finished" podID="61d7fda4-0848-4beb-b7ad-7c361fe37595" containerID="af0abb9116574ec1f0efff2d221182fa86aaf2f53e7d2328da5a64f71cff4c5c" exitCode=0 Oct 01 15:05:34 crc kubenswrapper[4869]: I1001 15:05:34.877034 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-g22pf" event={"ID":"61d7fda4-0848-4beb-b7ad-7c361fe37595","Type":"ContainerDied","Data":"af0abb9116574ec1f0efff2d221182fa86aaf2f53e7d2328da5a64f71cff4c5c"} Oct 01 15:05:34 crc kubenswrapper[4869]: I1001 15:05:34.882563 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/marketplace-operator-79b997595-t5kll" Oct 01 15:05:34 crc kubenswrapper[4869]: I1001 15:05:34.930401 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 01 15:05:34 crc kubenswrapper[4869]: E1001 15:05:34.930537 4869 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-01 15:05:35.430515591 +0000 UTC m=+44.577358707 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 01 15:05:34 crc kubenswrapper[4869]: I1001 15:05:34.932478 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-4zhjs\" (UID: \"79b5f958-f252-4703-b785-05b0d01a6e72\") " pod="openshift-image-registry/image-registry-697d97f7c8-4zhjs" Oct 01 15:05:34 crc kubenswrapper[4869]: E1001 15:05:34.933318 4869 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-01 15:05:35.433290475 +0000 UTC m=+44.580133671 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-4zhjs" (UID: "79b5f958-f252-4703-b785-05b0d01a6e72") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 01 15:05:35 crc kubenswrapper[4869]: I1001 15:05:35.036223 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 01 15:05:35 crc kubenswrapper[4869]: E1001 15:05:35.036427 4869 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-01 15:05:35.536408601 +0000 UTC m=+44.683251717 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 01 15:05:35 crc kubenswrapper[4869]: I1001 15:05:35.036996 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-4zhjs\" (UID: \"79b5f958-f252-4703-b785-05b0d01a6e72\") " pod="openshift-image-registry/image-registry-697d97f7c8-4zhjs" Oct 01 15:05:35 crc kubenswrapper[4869]: E1001 15:05:35.040550 4869 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-01 15:05:35.540490271 +0000 UTC m=+44.687333507 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-4zhjs" (UID: "79b5f958-f252-4703-b785-05b0d01a6e72") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 01 15:05:35 crc kubenswrapper[4869]: I1001 15:05:35.138178 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 01 15:05:35 crc kubenswrapper[4869]: E1001 15:05:35.138341 4869 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-01 15:05:35.638316996 +0000 UTC m=+44.785160112 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 01 15:05:35 crc kubenswrapper[4869]: I1001 15:05:35.138449 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-4zhjs\" (UID: \"79b5f958-f252-4703-b785-05b0d01a6e72\") " pod="openshift-image-registry/image-registry-697d97f7c8-4zhjs" Oct 01 15:05:35 crc kubenswrapper[4869]: E1001 15:05:35.138752 4869 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-01 15:05:35.638744907 +0000 UTC m=+44.785588023 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-4zhjs" (UID: "79b5f958-f252-4703-b785-05b0d01a6e72") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 01 15:05:35 crc kubenswrapper[4869]: I1001 15:05:35.250905 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 01 15:05:35 crc kubenswrapper[4869]: E1001 15:05:35.251385 4869 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-01 15:05:35.751351708 +0000 UTC m=+44.898194824 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 01 15:05:35 crc kubenswrapper[4869]: I1001 15:05:35.251519 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-4zhjs\" (UID: \"79b5f958-f252-4703-b785-05b0d01a6e72\") " pod="openshift-image-registry/image-registry-697d97f7c8-4zhjs" Oct 01 15:05:35 crc kubenswrapper[4869]: E1001 15:05:35.251961 4869 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-01 15:05:35.751943754 +0000 UTC m=+44.898786870 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-4zhjs" (UID: "79b5f958-f252-4703-b785-05b0d01a6e72") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 01 15:05:35 crc kubenswrapper[4869]: I1001 15:05:35.262181 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-xvckx"] Oct 01 15:05:35 crc kubenswrapper[4869]: I1001 15:05:35.264795 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-xvckx" Oct 01 15:05:35 crc kubenswrapper[4869]: I1001 15:05:35.265688 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-xvckx"] Oct 01 15:05:35 crc kubenswrapper[4869]: I1001 15:05:35.266892 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-operators-dockercfg-ct8rh" Oct 01 15:05:35 crc kubenswrapper[4869]: I1001 15:05:35.352507 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 01 15:05:35 crc kubenswrapper[4869]: E1001 15:05:35.352647 4869 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-01 15:05:35.852617415 +0000 UTC m=+44.999460521 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 01 15:05:35 crc kubenswrapper[4869]: I1001 15:05:35.352762 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3d37704b-7106-4dcc-b91a-1de81e03d6a9-utilities\") pod \"redhat-operators-xvckx\" (UID: \"3d37704b-7106-4dcc-b91a-1de81e03d6a9\") " pod="openshift-marketplace/redhat-operators-xvckx" Oct 01 15:05:35 crc kubenswrapper[4869]: I1001 15:05:35.352829 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-h2sfg\" (UniqueName: \"kubernetes.io/projected/3d37704b-7106-4dcc-b91a-1de81e03d6a9-kube-api-access-h2sfg\") pod \"redhat-operators-xvckx\" (UID: \"3d37704b-7106-4dcc-b91a-1de81e03d6a9\") " pod="openshift-marketplace/redhat-operators-xvckx" Oct 01 15:05:35 crc kubenswrapper[4869]: I1001 15:05:35.353021 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3d37704b-7106-4dcc-b91a-1de81e03d6a9-catalog-content\") pod \"redhat-operators-xvckx\" (UID: \"3d37704b-7106-4dcc-b91a-1de81e03d6a9\") " pod="openshift-marketplace/redhat-operators-xvckx" Oct 01 15:05:35 crc kubenswrapper[4869]: I1001 15:05:35.353116 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-4zhjs\" (UID: \"79b5f958-f252-4703-b785-05b0d01a6e72\") " pod="openshift-image-registry/image-registry-697d97f7c8-4zhjs" Oct 01 15:05:35 crc kubenswrapper[4869]: E1001 15:05:35.355059 4869 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-01 15:05:35.855000259 +0000 UTC m=+45.001843375 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-4zhjs" (UID: "79b5f958-f252-4703-b785-05b0d01a6e72") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 01 15:05:35 crc kubenswrapper[4869]: I1001 15:05:35.454356 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 01 15:05:35 crc kubenswrapper[4869]: E1001 15:05:35.454533 4869 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-01 15:05:35.954503399 +0000 UTC m=+45.101346515 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 01 15:05:35 crc kubenswrapper[4869]: I1001 15:05:35.454579 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3d37704b-7106-4dcc-b91a-1de81e03d6a9-utilities\") pod \"redhat-operators-xvckx\" (UID: \"3d37704b-7106-4dcc-b91a-1de81e03d6a9\") " pod="openshift-marketplace/redhat-operators-xvckx" Oct 01 15:05:35 crc kubenswrapper[4869]: I1001 15:05:35.454680 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-h2sfg\" (UniqueName: \"kubernetes.io/projected/3d37704b-7106-4dcc-b91a-1de81e03d6a9-kube-api-access-h2sfg\") pod \"redhat-operators-xvckx\" (UID: \"3d37704b-7106-4dcc-b91a-1de81e03d6a9\") " pod="openshift-marketplace/redhat-operators-xvckx" Oct 01 15:05:35 crc kubenswrapper[4869]: I1001 15:05:35.454733 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3d37704b-7106-4dcc-b91a-1de81e03d6a9-catalog-content\") pod \"redhat-operators-xvckx\" (UID: \"3d37704b-7106-4dcc-b91a-1de81e03d6a9\") " pod="openshift-marketplace/redhat-operators-xvckx" Oct 01 15:05:35 crc kubenswrapper[4869]: I1001 15:05:35.454768 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-4zhjs\" (UID: \"79b5f958-f252-4703-b785-05b0d01a6e72\") " pod="openshift-image-registry/image-registry-697d97f7c8-4zhjs" Oct 01 15:05:35 crc kubenswrapper[4869]: I1001 15:05:35.455127 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3d37704b-7106-4dcc-b91a-1de81e03d6a9-utilities\") pod \"redhat-operators-xvckx\" (UID: \"3d37704b-7106-4dcc-b91a-1de81e03d6a9\") " pod="openshift-marketplace/redhat-operators-xvckx" Oct 01 15:05:35 crc kubenswrapper[4869]: I1001 15:05:35.455211 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3d37704b-7106-4dcc-b91a-1de81e03d6a9-catalog-content\") pod \"redhat-operators-xvckx\" (UID: \"3d37704b-7106-4dcc-b91a-1de81e03d6a9\") " pod="openshift-marketplace/redhat-operators-xvckx" Oct 01 15:05:35 crc kubenswrapper[4869]: E1001 15:05:35.455235 4869 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-01 15:05:35.955219608 +0000 UTC m=+45.102062724 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-4zhjs" (UID: "79b5f958-f252-4703-b785-05b0d01a6e72") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 01 15:05:35 crc kubenswrapper[4869]: I1001 15:05:35.479215 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-h2sfg\" (UniqueName: \"kubernetes.io/projected/3d37704b-7106-4dcc-b91a-1de81e03d6a9-kube-api-access-h2sfg\") pod \"redhat-operators-xvckx\" (UID: \"3d37704b-7106-4dcc-b91a-1de81e03d6a9\") " pod="openshift-marketplace/redhat-operators-xvckx" Oct 01 15:05:35 crc kubenswrapper[4869]: I1001 15:05:35.555577 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 01 15:05:35 crc kubenswrapper[4869]: E1001 15:05:35.555813 4869 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-01 15:05:36.055787016 +0000 UTC m=+45.202630132 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 01 15:05:35 crc kubenswrapper[4869]: I1001 15:05:35.555908 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-4zhjs\" (UID: \"79b5f958-f252-4703-b785-05b0d01a6e72\") " pod="openshift-image-registry/image-registry-697d97f7c8-4zhjs" Oct 01 15:05:35 crc kubenswrapper[4869]: E1001 15:05:35.556202 4869 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-01 15:05:36.056194287 +0000 UTC m=+45.203037403 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-4zhjs" (UID: "79b5f958-f252-4703-b785-05b0d01a6e72") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 01 15:05:35 crc kubenswrapper[4869]: I1001 15:05:35.592175 4869 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-apiserver/apiserver-76f77b778f-hxvsn" Oct 01 15:05:35 crc kubenswrapper[4869]: I1001 15:05:35.596856 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-apiserver/apiserver-76f77b778f-hxvsn" Oct 01 15:05:35 crc kubenswrapper[4869]: I1001 15:05:35.601989 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-xvckx" Oct 01 15:05:35 crc kubenswrapper[4869]: I1001 15:05:35.656196 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-mv2bb"] Oct 01 15:05:35 crc kubenswrapper[4869]: I1001 15:05:35.657126 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 01 15:05:35 crc kubenswrapper[4869]: E1001 15:05:35.657778 4869 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-01 15:05:36.157747642 +0000 UTC m=+45.304590758 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 01 15:05:35 crc kubenswrapper[4869]: I1001 15:05:35.657908 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-mv2bb" Oct 01 15:05:35 crc kubenswrapper[4869]: I1001 15:05:35.659739 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-4zhjs\" (UID: \"79b5f958-f252-4703-b785-05b0d01a6e72\") " pod="openshift-image-registry/image-registry-697d97f7c8-4zhjs" Oct 01 15:05:35 crc kubenswrapper[4869]: E1001 15:05:35.661642 4869 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-01 15:05:36.161629236 +0000 UTC m=+45.308472352 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-4zhjs" (UID: "79b5f958-f252-4703-b785-05b0d01a6e72") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 01 15:05:35 crc kubenswrapper[4869]: I1001 15:05:35.668430 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-mv2bb"] Oct 01 15:05:35 crc kubenswrapper[4869]: I1001 15:05:35.695621 4869 reconciler.go:161] "OperationExecutor.RegisterPlugin started" plugin={"SocketPath":"/var/lib/kubelet/plugins_registry/kubevirt.io.hostpath-provisioner-reg.sock","Timestamp":"2025-10-01T15:05:34.750408826Z","Handler":null,"Name":""} Oct 01 15:05:35 crc kubenswrapper[4869]: I1001 15:05:35.721948 4869 csi_plugin.go:100] kubernetes.io/csi: Trying to validate a new CSI Driver with name: kubevirt.io.hostpath-provisioner endpoint: /var/lib/kubelet/plugins/csi-hostpath/csi.sock versions: 1.0.0 Oct 01 15:05:35 crc kubenswrapper[4869]: I1001 15:05:35.721988 4869 csi_plugin.go:113] kubernetes.io/csi: Register new plugin with name: kubevirt.io.hostpath-provisioner at endpoint: /var/lib/kubelet/plugins/csi-hostpath/csi.sock Oct 01 15:05:35 crc kubenswrapper[4869]: I1001 15:05:35.737683 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-controller-manager/revision-pruner-9-crc"] Oct 01 15:05:35 crc kubenswrapper[4869]: I1001 15:05:35.738828 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Oct 01 15:05:35 crc kubenswrapper[4869]: I1001 15:05:35.743940 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager"/"installer-sa-dockercfg-kjl2n" Oct 01 15:05:35 crc kubenswrapper[4869]: I1001 15:05:35.744300 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager"/"kube-root-ca.crt" Oct 01 15:05:35 crc kubenswrapper[4869]: I1001 15:05:35.753248 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager/revision-pruner-9-crc"] Oct 01 15:05:35 crc kubenswrapper[4869]: I1001 15:05:35.760795 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 01 15:05:35 crc kubenswrapper[4869]: I1001 15:05:35.761102 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5b7pk\" (UniqueName: \"kubernetes.io/projected/c1e7add4-dd2c-47c1-afee-e8dc937be3a8-kube-api-access-5b7pk\") pod \"redhat-operators-mv2bb\" (UID: \"c1e7add4-dd2c-47c1-afee-e8dc937be3a8\") " pod="openshift-marketplace/redhat-operators-mv2bb" Oct 01 15:05:35 crc kubenswrapper[4869]: I1001 15:05:35.761186 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c1e7add4-dd2c-47c1-afee-e8dc937be3a8-catalog-content\") pod \"redhat-operators-mv2bb\" (UID: \"c1e7add4-dd2c-47c1-afee-e8dc937be3a8\") " pod="openshift-marketplace/redhat-operators-mv2bb" Oct 01 15:05:35 crc kubenswrapper[4869]: I1001 15:05:35.761226 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c1e7add4-dd2c-47c1-afee-e8dc937be3a8-utilities\") pod \"redhat-operators-mv2bb\" (UID: \"c1e7add4-dd2c-47c1-afee-e8dc937be3a8\") " pod="openshift-marketplace/redhat-operators-mv2bb" Oct 01 15:05:35 crc kubenswrapper[4869]: I1001 15:05:35.797894 4869 patch_prober.go:28] interesting pod/router-default-5444994796-mdc2f container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Oct 01 15:05:35 crc kubenswrapper[4869]: [-]has-synced failed: reason withheld Oct 01 15:05:35 crc kubenswrapper[4869]: [+]process-running ok Oct 01 15:05:35 crc kubenswrapper[4869]: healthz check failed Oct 01 15:05:35 crc kubenswrapper[4869]: I1001 15:05:35.798252 4869 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-mdc2f" podUID="a4dfc6a1-b623-46cb-b1ff-c6b809e8deaa" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Oct 01 15:05:35 crc kubenswrapper[4869]: I1001 15:05:35.830163 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (OuterVolumeSpecName: "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8". PluginName "kubernetes.io/csi", VolumeGidValue "" Oct 01 15:05:35 crc kubenswrapper[4869]: I1001 15:05:35.864937 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/c4c7f452-4e1f-4445-90ef-ed07112ae2f1-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"c4c7f452-4e1f-4445-90ef-ed07112ae2f1\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Oct 01 15:05:35 crc kubenswrapper[4869]: I1001 15:05:35.864991 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-4zhjs\" (UID: \"79b5f958-f252-4703-b785-05b0d01a6e72\") " pod="openshift-image-registry/image-registry-697d97f7c8-4zhjs" Oct 01 15:05:35 crc kubenswrapper[4869]: I1001 15:05:35.865014 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/c4c7f452-4e1f-4445-90ef-ed07112ae2f1-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"c4c7f452-4e1f-4445-90ef-ed07112ae2f1\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Oct 01 15:05:35 crc kubenswrapper[4869]: I1001 15:05:35.865042 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c1e7add4-dd2c-47c1-afee-e8dc937be3a8-catalog-content\") pod \"redhat-operators-mv2bb\" (UID: \"c1e7add4-dd2c-47c1-afee-e8dc937be3a8\") " pod="openshift-marketplace/redhat-operators-mv2bb" Oct 01 15:05:35 crc kubenswrapper[4869]: I1001 15:05:35.865077 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c1e7add4-dd2c-47c1-afee-e8dc937be3a8-utilities\") pod \"redhat-operators-mv2bb\" (UID: \"c1e7add4-dd2c-47c1-afee-e8dc937be3a8\") " pod="openshift-marketplace/redhat-operators-mv2bb" Oct 01 15:05:35 crc kubenswrapper[4869]: I1001 15:05:35.865118 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5b7pk\" (UniqueName: \"kubernetes.io/projected/c1e7add4-dd2c-47c1-afee-e8dc937be3a8-kube-api-access-5b7pk\") pod \"redhat-operators-mv2bb\" (UID: \"c1e7add4-dd2c-47c1-afee-e8dc937be3a8\") " pod="openshift-marketplace/redhat-operators-mv2bb" Oct 01 15:05:35 crc kubenswrapper[4869]: I1001 15:05:35.866053 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c1e7add4-dd2c-47c1-afee-e8dc937be3a8-catalog-content\") pod \"redhat-operators-mv2bb\" (UID: \"c1e7add4-dd2c-47c1-afee-e8dc937be3a8\") " pod="openshift-marketplace/redhat-operators-mv2bb" Oct 01 15:05:35 crc kubenswrapper[4869]: I1001 15:05:35.866295 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c1e7add4-dd2c-47c1-afee-e8dc937be3a8-utilities\") pod \"redhat-operators-mv2bb\" (UID: \"c1e7add4-dd2c-47c1-afee-e8dc937be3a8\") " pod="openshift-marketplace/redhat-operators-mv2bb" Oct 01 15:05:35 crc kubenswrapper[4869]: I1001 15:05:35.883762 4869 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Oct 01 15:05:35 crc kubenswrapper[4869]: I1001 15:05:35.883794 4869 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-4zhjs\" (UID: \"79b5f958-f252-4703-b785-05b0d01a6e72\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/globalmount\"" pod="openshift-image-registry/image-registry-697d97f7c8-4zhjs" Oct 01 15:05:35 crc kubenswrapper[4869]: I1001 15:05:35.890616 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5b7pk\" (UniqueName: \"kubernetes.io/projected/c1e7add4-dd2c-47c1-afee-e8dc937be3a8-kube-api-access-5b7pk\") pod \"redhat-operators-mv2bb\" (UID: \"c1e7add4-dd2c-47c1-afee-e8dc937be3a8\") " pod="openshift-marketplace/redhat-operators-mv2bb" Oct 01 15:05:35 crc kubenswrapper[4869]: I1001 15:05:35.913982 4869 generic.go:334] "Generic (PLEG): container finished" podID="a0c02ede-8eef-4d4e-a277-bcda77228c29" containerID="462206422cc7124651e76efa953eaca34c538e69cd80c94425a6061bc7b0be0e" exitCode=0 Oct 01 15:05:35 crc kubenswrapper[4869]: I1001 15:05:35.914037 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-qxb5h" event={"ID":"a0c02ede-8eef-4d4e-a277-bcda77228c29","Type":"ContainerDied","Data":"462206422cc7124651e76efa953eaca34c538e69cd80c94425a6061bc7b0be0e"} Oct 01 15:05:35 crc kubenswrapper[4869]: I1001 15:05:35.914063 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-qxb5h" event={"ID":"a0c02ede-8eef-4d4e-a277-bcda77228c29","Type":"ContainerStarted","Data":"80a58603e24d81cbd49e54b75293f38de157c0af3079f1003d293de01a2765b3"} Oct 01 15:05:35 crc kubenswrapper[4869]: I1001 15:05:35.923019 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-9pwb5" event={"ID":"45ac9df8-4d47-44e7-a1bb-bd4d0b8fd560","Type":"ContainerStarted","Data":"a439a6e334d077bf78a545689f3aced74f4a83ba7f7d3e725bcfafd6365dbb6c"} Oct 01 15:05:35 crc kubenswrapper[4869]: I1001 15:05:35.947851 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-4zhjs\" (UID: \"79b5f958-f252-4703-b785-05b0d01a6e72\") " pod="openshift-image-registry/image-registry-697d97f7c8-4zhjs" Oct 01 15:05:35 crc kubenswrapper[4869]: I1001 15:05:35.961127 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="hostpath-provisioner/csi-hostpathplugin-9pwb5" podStartSLOduration=11.961086440999999 podStartE2EDuration="11.961086441s" podCreationTimestamp="2025-10-01 15:05:24 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-01 15:05:35.96069016 +0000 UTC m=+45.107533296" watchObservedRunningTime="2025-10-01 15:05:35.961086441 +0000 UTC m=+45.107929557" Oct 01 15:05:35 crc kubenswrapper[4869]: I1001 15:05:35.966485 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/c4c7f452-4e1f-4445-90ef-ed07112ae2f1-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"c4c7f452-4e1f-4445-90ef-ed07112ae2f1\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Oct 01 15:05:35 crc kubenswrapper[4869]: I1001 15:05:35.966641 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/c4c7f452-4e1f-4445-90ef-ed07112ae2f1-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"c4c7f452-4e1f-4445-90ef-ed07112ae2f1\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Oct 01 15:05:35 crc kubenswrapper[4869]: I1001 15:05:35.974093 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/c4c7f452-4e1f-4445-90ef-ed07112ae2f1-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"c4c7f452-4e1f-4445-90ef-ed07112ae2f1\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Oct 01 15:05:35 crc kubenswrapper[4869]: I1001 15:05:35.996372 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-4zhjs" Oct 01 15:05:36 crc kubenswrapper[4869]: I1001 15:05:36.006427 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/c4c7f452-4e1f-4445-90ef-ed07112ae2f1-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"c4c7f452-4e1f-4445-90ef-ed07112ae2f1\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Oct 01 15:05:36 crc kubenswrapper[4869]: I1001 15:05:36.015484 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-mv2bb" Oct 01 15:05:36 crc kubenswrapper[4869]: I1001 15:05:36.027491 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-xvckx"] Oct 01 15:05:36 crc kubenswrapper[4869]: W1001 15:05:36.076836 4869 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod3d37704b_7106_4dcc_b91a_1de81e03d6a9.slice/crio-8d4c57fe0e9ca89d859c76e89b0c9f4644a7c8ee8fe224ad4603a2904e641750 WatchSource:0}: Error finding container 8d4c57fe0e9ca89d859c76e89b0c9f4644a7c8ee8fe224ad4603a2904e641750: Status 404 returned error can't find the container with id 8d4c57fe0e9ca89d859c76e89b0c9f4644a7c8ee8fe224ad4603a2904e641750 Oct 01 15:05:36 crc kubenswrapper[4869]: I1001 15:05:36.080772 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Oct 01 15:05:36 crc kubenswrapper[4869]: I1001 15:05:36.538886 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-4zhjs"] Oct 01 15:05:36 crc kubenswrapper[4869]: I1001 15:05:36.614749 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-config-operator/openshift-config-operator-7777fb866f-gqrbk" Oct 01 15:05:36 crc kubenswrapper[4869]: I1001 15:05:36.648646 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager/revision-pruner-9-crc"] Oct 01 15:05:36 crc kubenswrapper[4869]: I1001 15:05:36.704982 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-mv2bb"] Oct 01 15:05:36 crc kubenswrapper[4869]: W1001 15:05:36.720660 4869 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podc1e7add4_dd2c_47c1_afee_e8dc937be3a8.slice/crio-1ab2f152cff2240ed7e93c1fd9cea3d3f76cf6148686c4d2139f9cdf5ed05f16 WatchSource:0}: Error finding container 1ab2f152cff2240ed7e93c1fd9cea3d3f76cf6148686c4d2139f9cdf5ed05f16: Status 404 returned error can't find the container with id 1ab2f152cff2240ed7e93c1fd9cea3d3f76cf6148686c4d2139f9cdf5ed05f16 Oct 01 15:05:36 crc kubenswrapper[4869]: I1001 15:05:36.787368 4869 patch_prober.go:28] interesting pod/router-default-5444994796-mdc2f container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Oct 01 15:05:36 crc kubenswrapper[4869]: [-]has-synced failed: reason withheld Oct 01 15:05:36 crc kubenswrapper[4869]: [+]process-running ok Oct 01 15:05:36 crc kubenswrapper[4869]: healthz check failed Oct 01 15:05:36 crc kubenswrapper[4869]: I1001 15:05:36.787802 4869 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-mdc2f" podUID="a4dfc6a1-b623-46cb-b1ff-c6b809e8deaa" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Oct 01 15:05:36 crc kubenswrapper[4869]: I1001 15:05:36.952332 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/revision-pruner-9-crc" event={"ID":"c4c7f452-4e1f-4445-90ef-ed07112ae2f1","Type":"ContainerStarted","Data":"167abe7834bedc3ece374fd2f8cdb090b795f6b003a0500431a617992de29f26"} Oct 01 15:05:36 crc kubenswrapper[4869]: I1001 15:05:36.962614 4869 generic.go:334] "Generic (PLEG): container finished" podID="3d37704b-7106-4dcc-b91a-1de81e03d6a9" containerID="eff29f31cf7005108ec7269a92445ed7b765afa376cce5debe729f7aac109324" exitCode=0 Oct 01 15:05:36 crc kubenswrapper[4869]: I1001 15:05:36.963507 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-xvckx" event={"ID":"3d37704b-7106-4dcc-b91a-1de81e03d6a9","Type":"ContainerDied","Data":"eff29f31cf7005108ec7269a92445ed7b765afa376cce5debe729f7aac109324"} Oct 01 15:05:36 crc kubenswrapper[4869]: I1001 15:05:36.963662 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-xvckx" event={"ID":"3d37704b-7106-4dcc-b91a-1de81e03d6a9","Type":"ContainerStarted","Data":"8d4c57fe0e9ca89d859c76e89b0c9f4644a7c8ee8fe224ad4603a2904e641750"} Oct 01 15:05:36 crc kubenswrapper[4869]: I1001 15:05:36.976205 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-4zhjs" event={"ID":"79b5f958-f252-4703-b785-05b0d01a6e72","Type":"ContainerStarted","Data":"fdafdb3adfe9df9cf41c835d1421b9d5508d4e91abdac9bc1f398d9683e02490"} Oct 01 15:05:36 crc kubenswrapper[4869]: I1001 15:05:36.980695 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-mv2bb" event={"ID":"c1e7add4-dd2c-47c1-afee-e8dc937be3a8","Type":"ContainerStarted","Data":"1ab2f152cff2240ed7e93c1fd9cea3d3f76cf6148686c4d2139f9cdf5ed05f16"} Oct 01 15:05:36 crc kubenswrapper[4869]: I1001 15:05:36.982464 4869 patch_prober.go:28] interesting pod/downloads-7954f5f757-dxcl2 container/download-server namespace/openshift-console: Liveness probe status=failure output="Get \"http://10.217.0.13:8080/\": dial tcp 10.217.0.13:8080: connect: connection refused" start-of-body= Oct 01 15:05:36 crc kubenswrapper[4869]: I1001 15:05:36.982523 4869 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-console/downloads-7954f5f757-dxcl2" podUID="2d8f8fae-d727-4763-bb02-0a74320ba8c4" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.13:8080/\": dial tcp 10.217.0.13:8080: connect: connection refused" Oct 01 15:05:36 crc kubenswrapper[4869]: I1001 15:05:36.982548 4869 patch_prober.go:28] interesting pod/downloads-7954f5f757-dxcl2 container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.13:8080/\": dial tcp 10.217.0.13:8080: connect: connection refused" start-of-body= Oct 01 15:05:36 crc kubenswrapper[4869]: I1001 15:05:36.982599 4869 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-dxcl2" podUID="2d8f8fae-d727-4763-bb02-0a74320ba8c4" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.13:8080/\": dial tcp 10.217.0.13:8080: connect: connection refused" Oct 01 15:05:37 crc kubenswrapper[4869]: I1001 15:05:37.635518 4869 patch_prober.go:28] interesting pod/console-f9d7485db-jv4xs container/console namespace/openshift-console: Startup probe status=failure output="Get \"https://10.217.0.16:8443/health\": dial tcp 10.217.0.16:8443: connect: connection refused" start-of-body= Oct 01 15:05:37 crc kubenswrapper[4869]: I1001 15:05:37.635899 4869 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-console/console-f9d7485db-jv4xs" podUID="4636576a-d3da-4491-a146-a6ffe6382a06" containerName="console" probeResult="failure" output="Get \"https://10.217.0.16:8443/health\": dial tcp 10.217.0.16:8443: connect: connection refused" Oct 01 15:05:37 crc kubenswrapper[4869]: I1001 15:05:37.659144 4869 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8f668bae-612b-4b75-9490-919e737c6a3b" path="/var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes" Oct 01 15:05:37 crc kubenswrapper[4869]: I1001 15:05:37.660124 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console/console-f9d7485db-jv4xs" Oct 01 15:05:37 crc kubenswrapper[4869]: I1001 15:05:37.660169 4869 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-console/console-f9d7485db-jv4xs" Oct 01 15:05:37 crc kubenswrapper[4869]: I1001 15:05:37.776836 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ingress/router-default-5444994796-mdc2f" Oct 01 15:05:37 crc kubenswrapper[4869]: I1001 15:05:37.784436 4869 patch_prober.go:28] interesting pod/router-default-5444994796-mdc2f container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Oct 01 15:05:37 crc kubenswrapper[4869]: [-]has-synced failed: reason withheld Oct 01 15:05:37 crc kubenswrapper[4869]: [+]process-running ok Oct 01 15:05:37 crc kubenswrapper[4869]: healthz check failed Oct 01 15:05:37 crc kubenswrapper[4869]: I1001 15:05:37.784499 4869 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-mdc2f" podUID="a4dfc6a1-b623-46cb-b1ff-c6b809e8deaa" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Oct 01 15:05:37 crc kubenswrapper[4869]: E1001 15:05:37.920966 4869 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="44e089bc1369f6c1a4bcd915248cba1eac03bcf6058f0b48a934fd3e6a73f37a" cmd=["/bin/bash","-c","test -f /ready/ready"] Oct 01 15:05:37 crc kubenswrapper[4869]: E1001 15:05:37.928668 4869 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="44e089bc1369f6c1a4bcd915248cba1eac03bcf6058f0b48a934fd3e6a73f37a" cmd=["/bin/bash","-c","test -f /ready/ready"] Oct 01 15:05:37 crc kubenswrapper[4869]: E1001 15:05:37.930492 4869 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="44e089bc1369f6c1a4bcd915248cba1eac03bcf6058f0b48a934fd3e6a73f37a" cmd=["/bin/bash","-c","test -f /ready/ready"] Oct 01 15:05:37 crc kubenswrapper[4869]: E1001 15:05:37.930523 4869 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openshift-multus/cni-sysctl-allowlist-ds-xffhm" podUID="9ff567e5-6f6b-4b25-a8a0-3aa792f4291d" containerName="kube-multus-additional-cni-plugins" Oct 01 15:05:38 crc kubenswrapper[4869]: I1001 15:05:38.027389 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-4zhjs" event={"ID":"79b5f958-f252-4703-b785-05b0d01a6e72","Type":"ContainerStarted","Data":"fcc73b0228a2a59b4c6372461cc50cb0cb9d077f35f62f827a155eab5b60ed3b"} Oct 01 15:05:38 crc kubenswrapper[4869]: I1001 15:05:38.028161 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-image-registry/image-registry-697d97f7c8-4zhjs" Oct 01 15:05:38 crc kubenswrapper[4869]: I1001 15:05:38.039881 4869 generic.go:334] "Generic (PLEG): container finished" podID="c1e7add4-dd2c-47c1-afee-e8dc937be3a8" containerID="547c7b7716e0a4dd9a9790d183ff1e84e1989a2c0e1aa029ac1339a78be818d2" exitCode=0 Oct 01 15:05:38 crc kubenswrapper[4869]: I1001 15:05:38.040052 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-mv2bb" event={"ID":"c1e7add4-dd2c-47c1-afee-e8dc937be3a8","Type":"ContainerDied","Data":"547c7b7716e0a4dd9a9790d183ff1e84e1989a2c0e1aa029ac1339a78be818d2"} Oct 01 15:05:38 crc kubenswrapper[4869]: I1001 15:05:38.047921 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/revision-pruner-9-crc" event={"ID":"c4c7f452-4e1f-4445-90ef-ed07112ae2f1","Type":"ContainerStarted","Data":"19dc283554482ba2adcd8b1d56e61fbd04b91307454d0504bbebd466288372b1"} Oct 01 15:05:38 crc kubenswrapper[4869]: I1001 15:05:38.058451 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/image-registry-697d97f7c8-4zhjs" podStartSLOduration=27.058428301 podStartE2EDuration="27.058428301s" podCreationTimestamp="2025-10-01 15:05:11 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-01 15:05:38.054780503 +0000 UTC m=+47.201623619" watchObservedRunningTime="2025-10-01 15:05:38.058428301 +0000 UTC m=+47.205271417" Oct 01 15:05:38 crc kubenswrapper[4869]: I1001 15:05:38.095161 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-controller-manager/revision-pruner-9-crc" podStartSLOduration=3.095140732 podStartE2EDuration="3.095140732s" podCreationTimestamp="2025-10-01 15:05:35 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-01 15:05:38.094809183 +0000 UTC m=+47.241652299" watchObservedRunningTime="2025-10-01 15:05:38.095140732 +0000 UTC m=+47.241983858" Oct 01 15:05:38 crc kubenswrapper[4869]: I1001 15:05:38.784932 4869 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-ingress/router-default-5444994796-mdc2f" Oct 01 15:05:38 crc kubenswrapper[4869]: I1001 15:05:38.787782 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ingress/router-default-5444994796-mdc2f" Oct 01 15:05:39 crc kubenswrapper[4869]: I1001 15:05:39.068250 4869 generic.go:334] "Generic (PLEG): container finished" podID="c4c7f452-4e1f-4445-90ef-ed07112ae2f1" containerID="19dc283554482ba2adcd8b1d56e61fbd04b91307454d0504bbebd466288372b1" exitCode=0 Oct 01 15:05:39 crc kubenswrapper[4869]: I1001 15:05:39.069100 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/revision-pruner-9-crc" event={"ID":"c4c7f452-4e1f-4445-90ef-ed07112ae2f1","Type":"ContainerDied","Data":"19dc283554482ba2adcd8b1d56e61fbd04b91307454d0504bbebd466288372b1"} Oct 01 15:05:40 crc kubenswrapper[4869]: I1001 15:05:40.134953 4869 generic.go:334] "Generic (PLEG): container finished" podID="4c1dcd57-cee2-45ab-9a92-8cdd8b864f98" containerID="b9697e7378c7c6e7287e428875324fbb361baef8c62aa101b17c958416a63273" exitCode=0 Oct 01 15:05:40 crc kubenswrapper[4869]: I1001 15:05:40.135011 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29322180-8fxq6" event={"ID":"4c1dcd57-cee2-45ab-9a92-8cdd8b864f98","Type":"ContainerDied","Data":"b9697e7378c7c6e7287e428875324fbb361baef8c62aa101b17c958416a63273"} Oct 01 15:05:40 crc kubenswrapper[4869]: I1001 15:05:40.478969 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Oct 01 15:05:40 crc kubenswrapper[4869]: I1001 15:05:40.590499 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/c4c7f452-4e1f-4445-90ef-ed07112ae2f1-kubelet-dir\") pod \"c4c7f452-4e1f-4445-90ef-ed07112ae2f1\" (UID: \"c4c7f452-4e1f-4445-90ef-ed07112ae2f1\") " Oct 01 15:05:40 crc kubenswrapper[4869]: I1001 15:05:40.590602 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/c4c7f452-4e1f-4445-90ef-ed07112ae2f1-kube-api-access\") pod \"c4c7f452-4e1f-4445-90ef-ed07112ae2f1\" (UID: \"c4c7f452-4e1f-4445-90ef-ed07112ae2f1\") " Oct 01 15:05:40 crc kubenswrapper[4869]: I1001 15:05:40.590614 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/c4c7f452-4e1f-4445-90ef-ed07112ae2f1-kubelet-dir" (OuterVolumeSpecName: "kubelet-dir") pod "c4c7f452-4e1f-4445-90ef-ed07112ae2f1" (UID: "c4c7f452-4e1f-4445-90ef-ed07112ae2f1"). InnerVolumeSpecName "kubelet-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 01 15:05:40 crc kubenswrapper[4869]: I1001 15:05:40.592020 4869 reconciler_common.go:293] "Volume detached for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/c4c7f452-4e1f-4445-90ef-ed07112ae2f1-kubelet-dir\") on node \"crc\" DevicePath \"\"" Oct 01 15:05:40 crc kubenswrapper[4869]: I1001 15:05:40.614557 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c4c7f452-4e1f-4445-90ef-ed07112ae2f1-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "c4c7f452-4e1f-4445-90ef-ed07112ae2f1" (UID: "c4c7f452-4e1f-4445-90ef-ed07112ae2f1"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 15:05:40 crc kubenswrapper[4869]: I1001 15:05:40.694209 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/c4c7f452-4e1f-4445-90ef-ed07112ae2f1-kube-api-access\") on node \"crc\" DevicePath \"\"" Oct 01 15:05:41 crc kubenswrapper[4869]: I1001 15:05:41.146282 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Oct 01 15:05:41 crc kubenswrapper[4869]: I1001 15:05:41.146397 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/revision-pruner-9-crc" event={"ID":"c4c7f452-4e1f-4445-90ef-ed07112ae2f1","Type":"ContainerDied","Data":"167abe7834bedc3ece374fd2f8cdb090b795f6b003a0500431a617992de29f26"} Oct 01 15:05:41 crc kubenswrapper[4869]: I1001 15:05:41.146432 4869 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="167abe7834bedc3ece374fd2f8cdb090b795f6b003a0500431a617992de29f26" Oct 01 15:05:41 crc kubenswrapper[4869]: I1001 15:05:41.798165 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/revision-pruner-8-crc"] Oct 01 15:05:41 crc kubenswrapper[4869]: E1001 15:05:41.800156 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c4c7f452-4e1f-4445-90ef-ed07112ae2f1" containerName="pruner" Oct 01 15:05:41 crc kubenswrapper[4869]: I1001 15:05:41.800174 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="c4c7f452-4e1f-4445-90ef-ed07112ae2f1" containerName="pruner" Oct 01 15:05:41 crc kubenswrapper[4869]: I1001 15:05:41.800290 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="c4c7f452-4e1f-4445-90ef-ed07112ae2f1" containerName="pruner" Oct 01 15:05:41 crc kubenswrapper[4869]: I1001 15:05:41.800642 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Oct 01 15:05:41 crc kubenswrapper[4869]: I1001 15:05:41.806340 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver"/"installer-sa-dockercfg-5pr6n" Oct 01 15:05:41 crc kubenswrapper[4869]: I1001 15:05:41.806863 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver"/"kube-root-ca.crt" Oct 01 15:05:41 crc kubenswrapper[4869]: I1001 15:05:41.815521 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/revision-pruner-8-crc"] Oct 01 15:05:41 crc kubenswrapper[4869]: I1001 15:05:41.916105 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/03706444-4f26-489d-a0af-457d9bbaec6c-kube-api-access\") pod \"revision-pruner-8-crc\" (UID: \"03706444-4f26-489d-a0af-457d9bbaec6c\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Oct 01 15:05:41 crc kubenswrapper[4869]: I1001 15:05:41.916195 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/03706444-4f26-489d-a0af-457d9bbaec6c-kubelet-dir\") pod \"revision-pruner-8-crc\" (UID: \"03706444-4f26-489d-a0af-457d9bbaec6c\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Oct 01 15:05:42 crc kubenswrapper[4869]: I1001 15:05:42.017638 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/03706444-4f26-489d-a0af-457d9bbaec6c-kubelet-dir\") pod \"revision-pruner-8-crc\" (UID: \"03706444-4f26-489d-a0af-457d9bbaec6c\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Oct 01 15:05:42 crc kubenswrapper[4869]: I1001 15:05:42.017714 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/03706444-4f26-489d-a0af-457d9bbaec6c-kube-api-access\") pod \"revision-pruner-8-crc\" (UID: \"03706444-4f26-489d-a0af-457d9bbaec6c\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Oct 01 15:05:42 crc kubenswrapper[4869]: I1001 15:05:42.017883 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/03706444-4f26-489d-a0af-457d9bbaec6c-kubelet-dir\") pod \"revision-pruner-8-crc\" (UID: \"03706444-4f26-489d-a0af-457d9bbaec6c\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Oct 01 15:05:42 crc kubenswrapper[4869]: I1001 15:05:42.073487 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/03706444-4f26-489d-a0af-457d9bbaec6c-kube-api-access\") pod \"revision-pruner-8-crc\" (UID: \"03706444-4f26-489d-a0af-457d9bbaec6c\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Oct 01 15:05:42 crc kubenswrapper[4869]: I1001 15:05:42.115416 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Oct 01 15:05:42 crc kubenswrapper[4869]: I1001 15:05:42.886388 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-dns/dns-default-nk6rv" Oct 01 15:05:43 crc kubenswrapper[4869]: I1001 15:05:43.601629 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-27gqg" Oct 01 15:05:46 crc kubenswrapper[4869]: I1001 15:05:46.987412 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console/downloads-7954f5f757-dxcl2" Oct 01 15:05:47 crc kubenswrapper[4869]: I1001 15:05:47.064966 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Oct 01 15:05:47 crc kubenswrapper[4869]: I1001 15:05:47.090328 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-scheduler/openshift-kube-scheduler-crc"] Oct 01 15:05:47 crc kubenswrapper[4869]: I1001 15:05:47.635146 4869 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-console/console-f9d7485db-jv4xs" Oct 01 15:05:47 crc kubenswrapper[4869]: I1001 15:05:47.638711 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console/console-f9d7485db-jv4xs" Oct 01 15:05:47 crc kubenswrapper[4869]: I1001 15:05:47.652849 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" podStartSLOduration=0.652827454 podStartE2EDuration="652.827454ms" podCreationTimestamp="2025-10-01 15:05:47 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-01 15:05:47.646365071 +0000 UTC m=+56.793208197" watchObservedRunningTime="2025-10-01 15:05:47.652827454 +0000 UTC m=+56.799670570" Oct 01 15:05:47 crc kubenswrapper[4869]: E1001 15:05:47.898748 4869 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="44e089bc1369f6c1a4bcd915248cba1eac03bcf6058f0b48a934fd3e6a73f37a" cmd=["/bin/bash","-c","test -f /ready/ready"] Oct 01 15:05:47 crc kubenswrapper[4869]: E1001 15:05:47.900441 4869 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="44e089bc1369f6c1a4bcd915248cba1eac03bcf6058f0b48a934fd3e6a73f37a" cmd=["/bin/bash","-c","test -f /ready/ready"] Oct 01 15:05:47 crc kubenswrapper[4869]: E1001 15:05:47.902049 4869 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="44e089bc1369f6c1a4bcd915248cba1eac03bcf6058f0b48a934fd3e6a73f37a" cmd=["/bin/bash","-c","test -f /ready/ready"] Oct 01 15:05:47 crc kubenswrapper[4869]: E1001 15:05:47.902086 4869 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openshift-multus/cni-sysctl-allowlist-ds-xffhm" podUID="9ff567e5-6f6b-4b25-a8a0-3aa792f4291d" containerName="kube-multus-additional-cni-plugins" Oct 01 15:05:52 crc kubenswrapper[4869]: I1001 15:05:52.833075 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29322180-8fxq6" Oct 01 15:05:53 crc kubenswrapper[4869]: I1001 15:05:53.007978 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/4c1dcd57-cee2-45ab-9a92-8cdd8b864f98-secret-volume\") pod \"4c1dcd57-cee2-45ab-9a92-8cdd8b864f98\" (UID: \"4c1dcd57-cee2-45ab-9a92-8cdd8b864f98\") " Oct 01 15:05:53 crc kubenswrapper[4869]: I1001 15:05:53.008234 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vpnfr\" (UniqueName: \"kubernetes.io/projected/4c1dcd57-cee2-45ab-9a92-8cdd8b864f98-kube-api-access-vpnfr\") pod \"4c1dcd57-cee2-45ab-9a92-8cdd8b864f98\" (UID: \"4c1dcd57-cee2-45ab-9a92-8cdd8b864f98\") " Oct 01 15:05:53 crc kubenswrapper[4869]: I1001 15:05:53.008319 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/4c1dcd57-cee2-45ab-9a92-8cdd8b864f98-config-volume\") pod \"4c1dcd57-cee2-45ab-9a92-8cdd8b864f98\" (UID: \"4c1dcd57-cee2-45ab-9a92-8cdd8b864f98\") " Oct 01 15:05:53 crc kubenswrapper[4869]: I1001 15:05:53.009195 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4c1dcd57-cee2-45ab-9a92-8cdd8b864f98-config-volume" (OuterVolumeSpecName: "config-volume") pod "4c1dcd57-cee2-45ab-9a92-8cdd8b864f98" (UID: "4c1dcd57-cee2-45ab-9a92-8cdd8b864f98"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 15:05:53 crc kubenswrapper[4869]: I1001 15:05:53.016203 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4c1dcd57-cee2-45ab-9a92-8cdd8b864f98-kube-api-access-vpnfr" (OuterVolumeSpecName: "kube-api-access-vpnfr") pod "4c1dcd57-cee2-45ab-9a92-8cdd8b864f98" (UID: "4c1dcd57-cee2-45ab-9a92-8cdd8b864f98"). InnerVolumeSpecName "kube-api-access-vpnfr". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 15:05:53 crc kubenswrapper[4869]: I1001 15:05:53.021346 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4c1dcd57-cee2-45ab-9a92-8cdd8b864f98-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "4c1dcd57-cee2-45ab-9a92-8cdd8b864f98" (UID: "4c1dcd57-cee2-45ab-9a92-8cdd8b864f98"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 15:05:53 crc kubenswrapper[4869]: I1001 15:05:53.109729 4869 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/4c1dcd57-cee2-45ab-9a92-8cdd8b864f98-secret-volume\") on node \"crc\" DevicePath \"\"" Oct 01 15:05:53 crc kubenswrapper[4869]: I1001 15:05:53.109765 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vpnfr\" (UniqueName: \"kubernetes.io/projected/4c1dcd57-cee2-45ab-9a92-8cdd8b864f98-kube-api-access-vpnfr\") on node \"crc\" DevicePath \"\"" Oct 01 15:05:53 crc kubenswrapper[4869]: I1001 15:05:53.109775 4869 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/4c1dcd57-cee2-45ab-9a92-8cdd8b864f98-config-volume\") on node \"crc\" DevicePath \"\"" Oct 01 15:05:53 crc kubenswrapper[4869]: I1001 15:05:53.263584 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29322180-8fxq6" event={"ID":"4c1dcd57-cee2-45ab-9a92-8cdd8b864f98","Type":"ContainerDied","Data":"163bb8225372db29e4028d11979bd2f8a8acf2df2fe78c9ab2504e3a5f3cd6bc"} Oct 01 15:05:53 crc kubenswrapper[4869]: I1001 15:05:53.263642 4869 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="163bb8225372db29e4028d11979bd2f8a8acf2df2fe78c9ab2504e3a5f3cd6bc" Oct 01 15:05:53 crc kubenswrapper[4869]: I1001 15:05:53.263699 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29322180-8fxq6" Oct 01 15:05:56 crc kubenswrapper[4869]: I1001 15:05:56.003310 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-image-registry/image-registry-697d97f7c8-4zhjs" Oct 01 15:05:57 crc kubenswrapper[4869]: E1001 15:05:57.894756 4869 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="44e089bc1369f6c1a4bcd915248cba1eac03bcf6058f0b48a934fd3e6a73f37a" cmd=["/bin/bash","-c","test -f /ready/ready"] Oct 01 15:05:57 crc kubenswrapper[4869]: E1001 15:05:57.896647 4869 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="44e089bc1369f6c1a4bcd915248cba1eac03bcf6058f0b48a934fd3e6a73f37a" cmd=["/bin/bash","-c","test -f /ready/ready"] Oct 01 15:05:57 crc kubenswrapper[4869]: E1001 15:05:57.897927 4869 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="44e089bc1369f6c1a4bcd915248cba1eac03bcf6058f0b48a934fd3e6a73f37a" cmd=["/bin/bash","-c","test -f /ready/ready"] Oct 01 15:05:57 crc kubenswrapper[4869]: E1001 15:05:57.897959 4869 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openshift-multus/cni-sysctl-allowlist-ds-xffhm" podUID="9ff567e5-6f6b-4b25-a8a0-3aa792f4291d" containerName="kube-multus-additional-cni-plugins" Oct 01 15:06:04 crc kubenswrapper[4869]: I1001 15:06:04.327756 4869 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_cni-sysctl-allowlist-ds-xffhm_9ff567e5-6f6b-4b25-a8a0-3aa792f4291d/kube-multus-additional-cni-plugins/0.log" Oct 01 15:06:04 crc kubenswrapper[4869]: I1001 15:06:04.328404 4869 generic.go:334] "Generic (PLEG): container finished" podID="9ff567e5-6f6b-4b25-a8a0-3aa792f4291d" containerID="44e089bc1369f6c1a4bcd915248cba1eac03bcf6058f0b48a934fd3e6a73f37a" exitCode=137 Oct 01 15:06:04 crc kubenswrapper[4869]: I1001 15:06:04.328439 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/cni-sysctl-allowlist-ds-xffhm" event={"ID":"9ff567e5-6f6b-4b25-a8a0-3aa792f4291d","Type":"ContainerDied","Data":"44e089bc1369f6c1a4bcd915248cba1eac03bcf6058f0b48a934fd3e6a73f37a"} Oct 01 15:06:07 crc kubenswrapper[4869]: I1001 15:06:07.543684 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 01 15:06:07 crc kubenswrapper[4869]: I1001 15:06:07.728460 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-hjpxg" Oct 01 15:06:07 crc kubenswrapper[4869]: E1001 15:06:07.893025 4869 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 44e089bc1369f6c1a4bcd915248cba1eac03bcf6058f0b48a934fd3e6a73f37a is running failed: container process not found" containerID="44e089bc1369f6c1a4bcd915248cba1eac03bcf6058f0b48a934fd3e6a73f37a" cmd=["/bin/bash","-c","test -f /ready/ready"] Oct 01 15:06:07 crc kubenswrapper[4869]: E1001 15:06:07.893758 4869 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 44e089bc1369f6c1a4bcd915248cba1eac03bcf6058f0b48a934fd3e6a73f37a is running failed: container process not found" containerID="44e089bc1369f6c1a4bcd915248cba1eac03bcf6058f0b48a934fd3e6a73f37a" cmd=["/bin/bash","-c","test -f /ready/ready"] Oct 01 15:06:07 crc kubenswrapper[4869]: E1001 15:06:07.894317 4869 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 44e089bc1369f6c1a4bcd915248cba1eac03bcf6058f0b48a934fd3e6a73f37a is running failed: container process not found" containerID="44e089bc1369f6c1a4bcd915248cba1eac03bcf6058f0b48a934fd3e6a73f37a" cmd=["/bin/bash","-c","test -f /ready/ready"] Oct 01 15:06:07 crc kubenswrapper[4869]: E1001 15:06:07.894352 4869 prober.go:104] "Probe errored" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 44e089bc1369f6c1a4bcd915248cba1eac03bcf6058f0b48a934fd3e6a73f37a is running failed: container process not found" probeType="Readiness" pod="openshift-multus/cni-sysctl-allowlist-ds-xffhm" podUID="9ff567e5-6f6b-4b25-a8a0-3aa792f4291d" containerName="kube-multus-additional-cni-plugins" Oct 01 15:06:09 crc kubenswrapper[4869]: E1001 15:06:09.186876 4869 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/community-operator-index:v4.18" Oct 01 15:06:09 crc kubenswrapper[4869]: E1001 15:06:09.187069 4869 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/community-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-gb7pd,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod community-operators-qf2zt_openshift-marketplace(87c849d7-d613-446d-9f2f-bdcf6da7e4e6): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Oct 01 15:06:09 crc kubenswrapper[4869]: E1001 15:06:09.188531 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/community-operators-qf2zt" podUID="87c849d7-d613-446d-9f2f-bdcf6da7e4e6" Oct 01 15:06:10 crc kubenswrapper[4869]: E1001 15:06:10.577659 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"\"" pod="openshift-marketplace/community-operators-qf2zt" podUID="87c849d7-d613-446d-9f2f-bdcf6da7e4e6" Oct 01 15:06:10 crc kubenswrapper[4869]: E1001 15:06:10.680436 4869 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/certified-operator-index:v4.18" Oct 01 15:06:10 crc kubenswrapper[4869]: E1001 15:06:10.681389 4869 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/certified-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-9jgjl,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod certified-operators-g22pf_openshift-marketplace(61d7fda4-0848-4beb-b7ad-7c361fe37595): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Oct 01 15:06:10 crc kubenswrapper[4869]: E1001 15:06:10.682669 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/certified-operators-g22pf" podUID="61d7fda4-0848-4beb-b7ad-7c361fe37595" Oct 01 15:06:11 crc kubenswrapper[4869]: E1001 15:06:11.148657 4869 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/community-operator-index:v4.18" Oct 01 15:06:11 crc kubenswrapper[4869]: E1001 15:06:11.148844 4869 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/community-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-szl8n,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod community-operators-q7dfv_openshift-marketplace(08693370-8f83-4bb1-8e2f-83a2ece2c9dd): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Oct 01 15:06:11 crc kubenswrapper[4869]: E1001 15:06:11.150076 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/community-operators-q7dfv" podUID="08693370-8f83-4bb1-8e2f-83a2ece2c9dd" Oct 01 15:06:11 crc kubenswrapper[4869]: I1001 15:06:11.599062 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/kube-rbac-proxy-crio-crc"] Oct 01 15:06:11 crc kubenswrapper[4869]: I1001 15:06:11.703828 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-etcd/etcd-crc"] Oct 01 15:06:14 crc kubenswrapper[4869]: E1001 15:06:14.119236 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"\"" pod="openshift-marketplace/certified-operators-g22pf" podUID="61d7fda4-0848-4beb-b7ad-7c361fe37595" Oct 01 15:06:14 crc kubenswrapper[4869]: E1001 15:06:14.119242 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"\"" pod="openshift-marketplace/community-operators-q7dfv" podUID="08693370-8f83-4bb1-8e2f-83a2ece2c9dd" Oct 01 15:06:16 crc kubenswrapper[4869]: E1001 15:06:16.077799 4869 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/redhat-operator-index:v4.18" Oct 01 15:06:16 crc kubenswrapper[4869]: E1001 15:06:16.078332 4869 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/redhat-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-h2sfg,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod redhat-operators-xvckx_openshift-marketplace(3d37704b-7106-4dcc-b91a-1de81e03d6a9): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Oct 01 15:06:16 crc kubenswrapper[4869]: E1001 15:06:16.080037 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/redhat-operators-xvckx" podUID="3d37704b-7106-4dcc-b91a-1de81e03d6a9" Oct 01 15:06:16 crc kubenswrapper[4869]: I1001 15:06:16.088111 4869 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_cni-sysctl-allowlist-ds-xffhm_9ff567e5-6f6b-4b25-a8a0-3aa792f4291d/kube-multus-additional-cni-plugins/0.log" Oct 01 15:06:16 crc kubenswrapper[4869]: I1001 15:06:16.088185 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-multus/cni-sysctl-allowlist-ds-xffhm" Oct 01 15:06:16 crc kubenswrapper[4869]: I1001 15:06:16.107680 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" podStartSLOduration=5.107657232 podStartE2EDuration="5.107657232s" podCreationTimestamp="2025-10-01 15:06:11 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-01 15:06:16.101296884 +0000 UTC m=+85.248140010" watchObservedRunningTime="2025-10-01 15:06:16.107657232 +0000 UTC m=+85.254500368" Oct 01 15:06:16 crc kubenswrapper[4869]: E1001 15:06:16.144052 4869 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/redhat-marketplace-index:v4.18" Oct 01 15:06:16 crc kubenswrapper[4869]: E1001 15:06:16.144254 4869 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/redhat-marketplace-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-9956d,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod redhat-marketplace-qxb5h_openshift-marketplace(a0c02ede-8eef-4d4e-a277-bcda77228c29): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Oct 01 15:06:16 crc kubenswrapper[4869]: E1001 15:06:16.151782 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/redhat-marketplace-qxb5h" podUID="a0c02ede-8eef-4d4e-a277-bcda77228c29" Oct 01 15:06:16 crc kubenswrapper[4869]: E1001 15:06:16.164870 4869 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/redhat-marketplace-index:v4.18" Oct 01 15:06:16 crc kubenswrapper[4869]: E1001 15:06:16.165479 4869 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/redhat-marketplace-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-f877n,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod redhat-marketplace-bcf4v_openshift-marketplace(1b0f8e72-9451-4c28-8e39-9e8c94096b80): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Oct 01 15:06:16 crc kubenswrapper[4869]: E1001 15:06:16.166967 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/redhat-marketplace-bcf4v" podUID="1b0f8e72-9451-4c28-8e39-9e8c94096b80" Oct 01 15:06:16 crc kubenswrapper[4869]: I1001 15:06:16.176977 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-etcd/etcd-crc" podStartSLOduration=5.176948457 podStartE2EDuration="5.176948457s" podCreationTimestamp="2025-10-01 15:06:11 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-01 15:06:16.164661022 +0000 UTC m=+85.311504168" watchObservedRunningTime="2025-10-01 15:06:16.176948457 +0000 UTC m=+85.323791563" Oct 01 15:06:16 crc kubenswrapper[4869]: E1001 15:06:16.206275 4869 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/redhat-operator-index:v4.18" Oct 01 15:06:16 crc kubenswrapper[4869]: E1001 15:06:16.206418 4869 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/redhat-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-5b7pk,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod redhat-operators-mv2bb_openshift-marketplace(c1e7add4-dd2c-47c1-afee-e8dc937be3a8): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Oct 01 15:06:16 crc kubenswrapper[4869]: E1001 15:06:16.208006 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/redhat-operators-mv2bb" podUID="c1e7add4-dd2c-47c1-afee-e8dc937be3a8" Oct 01 15:06:16 crc kubenswrapper[4869]: I1001 15:06:16.234602 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ready\" (UniqueName: \"kubernetes.io/empty-dir/9ff567e5-6f6b-4b25-a8a0-3aa792f4291d-ready\") pod \"9ff567e5-6f6b-4b25-a8a0-3aa792f4291d\" (UID: \"9ff567e5-6f6b-4b25-a8a0-3aa792f4291d\") " Oct 01 15:06:16 crc kubenswrapper[4869]: I1001 15:06:16.234646 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/9ff567e5-6f6b-4b25-a8a0-3aa792f4291d-cni-sysctl-allowlist\") pod \"9ff567e5-6f6b-4b25-a8a0-3aa792f4291d\" (UID: \"9ff567e5-6f6b-4b25-a8a0-3aa792f4291d\") " Oct 01 15:06:16 crc kubenswrapper[4869]: I1001 15:06:16.234672 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gltw6\" (UniqueName: \"kubernetes.io/projected/9ff567e5-6f6b-4b25-a8a0-3aa792f4291d-kube-api-access-gltw6\") pod \"9ff567e5-6f6b-4b25-a8a0-3aa792f4291d\" (UID: \"9ff567e5-6f6b-4b25-a8a0-3aa792f4291d\") " Oct 01 15:06:16 crc kubenswrapper[4869]: I1001 15:06:16.234759 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"tuning-conf-dir\" (UniqueName: \"kubernetes.io/host-path/9ff567e5-6f6b-4b25-a8a0-3aa792f4291d-tuning-conf-dir\") pod \"9ff567e5-6f6b-4b25-a8a0-3aa792f4291d\" (UID: \"9ff567e5-6f6b-4b25-a8a0-3aa792f4291d\") " Oct 01 15:06:16 crc kubenswrapper[4869]: I1001 15:06:16.235009 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/9ff567e5-6f6b-4b25-a8a0-3aa792f4291d-tuning-conf-dir" (OuterVolumeSpecName: "tuning-conf-dir") pod "9ff567e5-6f6b-4b25-a8a0-3aa792f4291d" (UID: "9ff567e5-6f6b-4b25-a8a0-3aa792f4291d"). InnerVolumeSpecName "tuning-conf-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 01 15:06:16 crc kubenswrapper[4869]: I1001 15:06:16.235167 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/9ff567e5-6f6b-4b25-a8a0-3aa792f4291d-ready" (OuterVolumeSpecName: "ready") pod "9ff567e5-6f6b-4b25-a8a0-3aa792f4291d" (UID: "9ff567e5-6f6b-4b25-a8a0-3aa792f4291d"). InnerVolumeSpecName "ready". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 15:06:16 crc kubenswrapper[4869]: I1001 15:06:16.236144 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9ff567e5-6f6b-4b25-a8a0-3aa792f4291d-cni-sysctl-allowlist" (OuterVolumeSpecName: "cni-sysctl-allowlist") pod "9ff567e5-6f6b-4b25-a8a0-3aa792f4291d" (UID: "9ff567e5-6f6b-4b25-a8a0-3aa792f4291d"). InnerVolumeSpecName "cni-sysctl-allowlist". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 15:06:16 crc kubenswrapper[4869]: I1001 15:06:16.248405 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9ff567e5-6f6b-4b25-a8a0-3aa792f4291d-kube-api-access-gltw6" (OuterVolumeSpecName: "kube-api-access-gltw6") pod "9ff567e5-6f6b-4b25-a8a0-3aa792f4291d" (UID: "9ff567e5-6f6b-4b25-a8a0-3aa792f4291d"). InnerVolumeSpecName "kube-api-access-gltw6". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 15:06:16 crc kubenswrapper[4869]: I1001 15:06:16.267239 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/revision-pruner-8-crc"] Oct 01 15:06:16 crc kubenswrapper[4869]: E1001 15:06:16.267390 4869 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/certified-operator-index:v4.18" Oct 01 15:06:16 crc kubenswrapper[4869]: E1001 15:06:16.267517 4869 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/certified-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-vbv92,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod certified-operators-262k2_openshift-marketplace(f6f8609b-7f80-4b8f-a371-bb1d10396a15): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Oct 01 15:06:16 crc kubenswrapper[4869]: E1001 15:06:16.268687 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/certified-operators-262k2" podUID="f6f8609b-7f80-4b8f-a371-bb1d10396a15" Oct 01 15:06:16 crc kubenswrapper[4869]: I1001 15:06:16.336293 4869 reconciler_common.go:293] "Volume detached for volume \"ready\" (UniqueName: \"kubernetes.io/empty-dir/9ff567e5-6f6b-4b25-a8a0-3aa792f4291d-ready\") on node \"crc\" DevicePath \"\"" Oct 01 15:06:16 crc kubenswrapper[4869]: I1001 15:06:16.336319 4869 reconciler_common.go:293] "Volume detached for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/9ff567e5-6f6b-4b25-a8a0-3aa792f4291d-cni-sysctl-allowlist\") on node \"crc\" DevicePath \"\"" Oct 01 15:06:16 crc kubenswrapper[4869]: I1001 15:06:16.336332 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gltw6\" (UniqueName: \"kubernetes.io/projected/9ff567e5-6f6b-4b25-a8a0-3aa792f4291d-kube-api-access-gltw6\") on node \"crc\" DevicePath \"\"" Oct 01 15:06:16 crc kubenswrapper[4869]: I1001 15:06:16.336342 4869 reconciler_common.go:293] "Volume detached for volume \"tuning-conf-dir\" (UniqueName: \"kubernetes.io/host-path/9ff567e5-6f6b-4b25-a8a0-3aa792f4291d-tuning-conf-dir\") on node \"crc\" DevicePath \"\"" Oct 01 15:06:16 crc kubenswrapper[4869]: I1001 15:06:16.395474 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"03706444-4f26-489d-a0af-457d9bbaec6c","Type":"ContainerStarted","Data":"95885671dd5abc4eb8e3eca24b3685818ce6e94fe5ef409b836efa74c39d0104"} Oct 01 15:06:16 crc kubenswrapper[4869]: I1001 15:06:16.397374 4869 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_cni-sysctl-allowlist-ds-xffhm_9ff567e5-6f6b-4b25-a8a0-3aa792f4291d/kube-multus-additional-cni-plugins/0.log" Oct 01 15:06:16 crc kubenswrapper[4869]: I1001 15:06:16.397639 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/cni-sysctl-allowlist-ds-xffhm" event={"ID":"9ff567e5-6f6b-4b25-a8a0-3aa792f4291d","Type":"ContainerDied","Data":"2cc02681b4c213008c8b3fc78fd27a872c10a9029e60ebbb3680e6c52757aae4"} Oct 01 15:06:16 crc kubenswrapper[4869]: I1001 15:06:16.397712 4869 scope.go:117] "RemoveContainer" containerID="44e089bc1369f6c1a4bcd915248cba1eac03bcf6058f0b48a934fd3e6a73f37a" Oct 01 15:06:16 crc kubenswrapper[4869]: I1001 15:06:16.397897 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-multus/cni-sysctl-allowlist-ds-xffhm" Oct 01 15:06:16 crc kubenswrapper[4869]: E1001 15:06:16.400971 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"\"" pod="openshift-marketplace/redhat-marketplace-bcf4v" podUID="1b0f8e72-9451-4c28-8e39-9e8c94096b80" Oct 01 15:06:16 crc kubenswrapper[4869]: E1001 15:06:16.401157 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"\"" pod="openshift-marketplace/certified-operators-262k2" podUID="f6f8609b-7f80-4b8f-a371-bb1d10396a15" Oct 01 15:06:16 crc kubenswrapper[4869]: E1001 15:06:16.401195 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"\"" pod="openshift-marketplace/redhat-operators-xvckx" podUID="3d37704b-7106-4dcc-b91a-1de81e03d6a9" Oct 01 15:06:16 crc kubenswrapper[4869]: E1001 15:06:16.401279 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"\"" pod="openshift-marketplace/redhat-marketplace-qxb5h" podUID="a0c02ede-8eef-4d4e-a277-bcda77228c29" Oct 01 15:06:16 crc kubenswrapper[4869]: E1001 15:06:16.403459 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"\"" pod="openshift-marketplace/redhat-operators-mv2bb" podUID="c1e7add4-dd2c-47c1-afee-e8dc937be3a8" Oct 01 15:06:16 crc kubenswrapper[4869]: I1001 15:06:16.506241 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-multus/cni-sysctl-allowlist-ds-xffhm"] Oct 01 15:06:16 crc kubenswrapper[4869]: I1001 15:06:16.508789 4869 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-multus/cni-sysctl-allowlist-ds-xffhm"] Oct 01 15:06:17 crc kubenswrapper[4869]: I1001 15:06:17.407895 4869 generic.go:334] "Generic (PLEG): container finished" podID="03706444-4f26-489d-a0af-457d9bbaec6c" containerID="f73859d10ba38a37424ebdc05c2e4d892b65e3793996abf296a22414b1ec3a83" exitCode=0 Oct 01 15:06:17 crc kubenswrapper[4869]: I1001 15:06:17.407956 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"03706444-4f26-489d-a0af-457d9bbaec6c","Type":"ContainerDied","Data":"f73859d10ba38a37424ebdc05c2e4d892b65e3793996abf296a22414b1ec3a83"} Oct 01 15:06:17 crc kubenswrapper[4869]: I1001 15:06:17.593091 4869 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9ff567e5-6f6b-4b25-a8a0-3aa792f4291d" path="/var/lib/kubelet/pods/9ff567e5-6f6b-4b25-a8a0-3aa792f4291d/volumes" Oct 01 15:06:18 crc kubenswrapper[4869]: I1001 15:06:18.641072 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Oct 01 15:06:18 crc kubenswrapper[4869]: I1001 15:06:18.766390 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/03706444-4f26-489d-a0af-457d9bbaec6c-kube-api-access\") pod \"03706444-4f26-489d-a0af-457d9bbaec6c\" (UID: \"03706444-4f26-489d-a0af-457d9bbaec6c\") " Oct 01 15:06:18 crc kubenswrapper[4869]: I1001 15:06:18.766475 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/03706444-4f26-489d-a0af-457d9bbaec6c-kubelet-dir\") pod \"03706444-4f26-489d-a0af-457d9bbaec6c\" (UID: \"03706444-4f26-489d-a0af-457d9bbaec6c\") " Oct 01 15:06:18 crc kubenswrapper[4869]: I1001 15:06:18.766814 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/03706444-4f26-489d-a0af-457d9bbaec6c-kubelet-dir" (OuterVolumeSpecName: "kubelet-dir") pod "03706444-4f26-489d-a0af-457d9bbaec6c" (UID: "03706444-4f26-489d-a0af-457d9bbaec6c"). InnerVolumeSpecName "kubelet-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 01 15:06:18 crc kubenswrapper[4869]: I1001 15:06:18.772948 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/03706444-4f26-489d-a0af-457d9bbaec6c-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "03706444-4f26-489d-a0af-457d9bbaec6c" (UID: "03706444-4f26-489d-a0af-457d9bbaec6c"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 15:06:18 crc kubenswrapper[4869]: I1001 15:06:18.868786 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/03706444-4f26-489d-a0af-457d9bbaec6c-kube-api-access\") on node \"crc\" DevicePath \"\"" Oct 01 15:06:18 crc kubenswrapper[4869]: I1001 15:06:18.869193 4869 reconciler_common.go:293] "Volume detached for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/03706444-4f26-489d-a0af-457d9bbaec6c-kubelet-dir\") on node \"crc\" DevicePath \"\"" Oct 01 15:06:19 crc kubenswrapper[4869]: I1001 15:06:19.423807 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"03706444-4f26-489d-a0af-457d9bbaec6c","Type":"ContainerDied","Data":"95885671dd5abc4eb8e3eca24b3685818ce6e94fe5ef409b836efa74c39d0104"} Oct 01 15:06:19 crc kubenswrapper[4869]: I1001 15:06:19.424597 4869 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="95885671dd5abc4eb8e3eca24b3685818ce6e94fe5ef409b836efa74c39d0104" Oct 01 15:06:19 crc kubenswrapper[4869]: I1001 15:06:19.424109 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Oct 01 15:06:24 crc kubenswrapper[4869]: I1001 15:06:24.459359 4869 generic.go:334] "Generic (PLEG): container finished" podID="87c849d7-d613-446d-9f2f-bdcf6da7e4e6" containerID="33f28d08c64f57316452d07d2498ce606408877e0e5441f222fdbd068848b0f3" exitCode=0 Oct 01 15:06:24 crc kubenswrapper[4869]: I1001 15:06:24.459484 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-qf2zt" event={"ID":"87c849d7-d613-446d-9f2f-bdcf6da7e4e6","Type":"ContainerDied","Data":"33f28d08c64f57316452d07d2498ce606408877e0e5441f222fdbd068848b0f3"} Oct 01 15:06:25 crc kubenswrapper[4869]: I1001 15:06:25.469572 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-qf2zt" event={"ID":"87c849d7-d613-446d-9f2f-bdcf6da7e4e6","Type":"ContainerStarted","Data":"f6a51d050f74f3ce6a16bba6da88dba863231f0f8ac6c9bbea228ebf6335f69e"} Oct 01 15:06:25 crc kubenswrapper[4869]: I1001 15:06:25.486900 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-qf2zt" podStartSLOduration=3.125854376 podStartE2EDuration="54.486880538s" podCreationTimestamp="2025-10-01 15:05:31 +0000 UTC" firstStartedPulling="2025-10-01 15:05:33.837226333 +0000 UTC m=+42.984069449" lastFinishedPulling="2025-10-01 15:06:25.198252495 +0000 UTC m=+94.345095611" observedRunningTime="2025-10-01 15:06:25.484458314 +0000 UTC m=+94.631301500" watchObservedRunningTime="2025-10-01 15:06:25.486880538 +0000 UTC m=+94.633723654" Oct 01 15:06:27 crc kubenswrapper[4869]: I1001 15:06:27.484301 4869 generic.go:334] "Generic (PLEG): container finished" podID="61d7fda4-0848-4beb-b7ad-7c361fe37595" containerID="8dfa2c3c10ab90d6e106f10c56740ecfc2ac5557b6057845c248716809fc105d" exitCode=0 Oct 01 15:06:27 crc kubenswrapper[4869]: I1001 15:06:27.484406 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-g22pf" event={"ID":"61d7fda4-0848-4beb-b7ad-7c361fe37595","Type":"ContainerDied","Data":"8dfa2c3c10ab90d6e106f10c56740ecfc2ac5557b6057845c248716809fc105d"} Oct 01 15:06:27 crc kubenswrapper[4869]: I1001 15:06:27.488910 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-q7dfv" event={"ID":"08693370-8f83-4bb1-8e2f-83a2ece2c9dd","Type":"ContainerStarted","Data":"f4fe48b4ef52a60314d72cf4d691611739b53c01c03b7241e3424a92d02304d5"} Oct 01 15:06:28 crc kubenswrapper[4869]: I1001 15:06:28.501341 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-g22pf" event={"ID":"61d7fda4-0848-4beb-b7ad-7c361fe37595","Type":"ContainerStarted","Data":"f8a1b5b701401e15c467fa2ee7afea4b5dd1cb10531909956f9ee1391dd1b09d"} Oct 01 15:06:28 crc kubenswrapper[4869]: I1001 15:06:28.502872 4869 generic.go:334] "Generic (PLEG): container finished" podID="08693370-8f83-4bb1-8e2f-83a2ece2c9dd" containerID="f4fe48b4ef52a60314d72cf4d691611739b53c01c03b7241e3424a92d02304d5" exitCode=0 Oct 01 15:06:28 crc kubenswrapper[4869]: I1001 15:06:28.502924 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-q7dfv" event={"ID":"08693370-8f83-4bb1-8e2f-83a2ece2c9dd","Type":"ContainerDied","Data":"f4fe48b4ef52a60314d72cf4d691611739b53c01c03b7241e3424a92d02304d5"} Oct 01 15:06:28 crc kubenswrapper[4869]: I1001 15:06:28.526466 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-g22pf" podStartSLOduration=3.147084881 podStartE2EDuration="56.526444053s" podCreationTimestamp="2025-10-01 15:05:32 +0000 UTC" firstStartedPulling="2025-10-01 15:05:34.913540997 +0000 UTC m=+44.060384113" lastFinishedPulling="2025-10-01 15:06:28.292900179 +0000 UTC m=+97.439743285" observedRunningTime="2025-10-01 15:06:28.522453948 +0000 UTC m=+97.669297074" watchObservedRunningTime="2025-10-01 15:06:28.526444053 +0000 UTC m=+97.673287179" Oct 01 15:06:29 crc kubenswrapper[4869]: I1001 15:06:29.510997 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-q7dfv" event={"ID":"08693370-8f83-4bb1-8e2f-83a2ece2c9dd","Type":"ContainerStarted","Data":"c47ec48492ee886f60c92fd628e357d590889a5b0a6424d7d525ea44a9e25eec"} Oct 01 15:06:29 crc kubenswrapper[4869]: I1001 15:06:29.537566 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-q7dfv" podStartSLOduration=3.372196721 podStartE2EDuration="57.537542377s" podCreationTimestamp="2025-10-01 15:05:32 +0000 UTC" firstStartedPulling="2025-10-01 15:05:34.87064431 +0000 UTC m=+44.017487426" lastFinishedPulling="2025-10-01 15:06:29.035989926 +0000 UTC m=+98.182833082" observedRunningTime="2025-10-01 15:06:29.533048148 +0000 UTC m=+98.679891284" watchObservedRunningTime="2025-10-01 15:06:29.537542377 +0000 UTC m=+98.684385533" Oct 01 15:06:30 crc kubenswrapper[4869]: I1001 15:06:30.516683 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-bcf4v" event={"ID":"1b0f8e72-9451-4c28-8e39-9e8c94096b80","Type":"ContainerStarted","Data":"0334bf3a4f0053cd94ddaeedb5dc71b8dcd4a3eafafdd7eb97ebe43d07d018a8"} Oct 01 15:06:30 crc kubenswrapper[4869]: I1001 15:06:30.519480 4869 generic.go:334] "Generic (PLEG): container finished" podID="c1e7add4-dd2c-47c1-afee-e8dc937be3a8" containerID="8a3cd0218ac6dfbb2e1f3dfffe49531c5fc8290e5b3a3e592f94d331f13e0323" exitCode=0 Oct 01 15:06:30 crc kubenswrapper[4869]: I1001 15:06:30.519522 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-mv2bb" event={"ID":"c1e7add4-dd2c-47c1-afee-e8dc937be3a8","Type":"ContainerDied","Data":"8a3cd0218ac6dfbb2e1f3dfffe49531c5fc8290e5b3a3e592f94d331f13e0323"} Oct 01 15:06:30 crc kubenswrapper[4869]: I1001 15:06:30.522002 4869 generic.go:334] "Generic (PLEG): container finished" podID="a0c02ede-8eef-4d4e-a277-bcda77228c29" containerID="f4b27a68a711919f69e81ca860f0ea7de8402c9fe9a36e389152b1b2a2536b33" exitCode=0 Oct 01 15:06:30 crc kubenswrapper[4869]: I1001 15:06:30.522023 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-qxb5h" event={"ID":"a0c02ede-8eef-4d4e-a277-bcda77228c29","Type":"ContainerDied","Data":"f4b27a68a711919f69e81ca860f0ea7de8402c9fe9a36e389152b1b2a2536b33"} Oct 01 15:06:31 crc kubenswrapper[4869]: I1001 15:06:31.530078 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-262k2" event={"ID":"f6f8609b-7f80-4b8f-a371-bb1d10396a15","Type":"ContainerStarted","Data":"70aa4e4d52e780ca00904740e4605f094164fc76a86417afff954c1f8ece8cd6"} Oct 01 15:06:31 crc kubenswrapper[4869]: I1001 15:06:31.533046 4869 generic.go:334] "Generic (PLEG): container finished" podID="1b0f8e72-9451-4c28-8e39-9e8c94096b80" containerID="0334bf3a4f0053cd94ddaeedb5dc71b8dcd4a3eafafdd7eb97ebe43d07d018a8" exitCode=0 Oct 01 15:06:31 crc kubenswrapper[4869]: I1001 15:06:31.533321 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-bcf4v" event={"ID":"1b0f8e72-9451-4c28-8e39-9e8c94096b80","Type":"ContainerDied","Data":"0334bf3a4f0053cd94ddaeedb5dc71b8dcd4a3eafafdd7eb97ebe43d07d018a8"} Oct 01 15:06:31 crc kubenswrapper[4869]: I1001 15:06:31.537959 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-mv2bb" event={"ID":"c1e7add4-dd2c-47c1-afee-e8dc937be3a8","Type":"ContainerStarted","Data":"f1064283a9baa5ac5011d25c2f16f3c72db86062f6499aa01fbc6ea6a1789b38"} Oct 01 15:06:31 crc kubenswrapper[4869]: I1001 15:06:31.540410 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-qxb5h" event={"ID":"a0c02ede-8eef-4d4e-a277-bcda77228c29","Type":"ContainerStarted","Data":"20b4aff429cc480e195971cc9740c76b6db63e74a008801d284cc9104977abdc"} Oct 01 15:06:31 crc kubenswrapper[4869]: I1001 15:06:31.596065 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-qxb5h" podStartSLOduration=2.514306069 podStartE2EDuration="57.596047494s" podCreationTimestamp="2025-10-01 15:05:34 +0000 UTC" firstStartedPulling="2025-10-01 15:05:35.918599765 +0000 UTC m=+45.065442881" lastFinishedPulling="2025-10-01 15:06:31.00034112 +0000 UTC m=+100.147184306" observedRunningTime="2025-10-01 15:06:31.594110113 +0000 UTC m=+100.740953229" watchObservedRunningTime="2025-10-01 15:06:31.596047494 +0000 UTC m=+100.742890610" Oct 01 15:06:31 crc kubenswrapper[4869]: I1001 15:06:31.616067 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-mv2bb" podStartSLOduration=3.5336679909999997 podStartE2EDuration="56.616051824s" podCreationTimestamp="2025-10-01 15:05:35 +0000 UTC" firstStartedPulling="2025-10-01 15:05:38.044085077 +0000 UTC m=+47.190928193" lastFinishedPulling="2025-10-01 15:06:31.1264689 +0000 UTC m=+100.273312026" observedRunningTime="2025-10-01 15:06:31.614959335 +0000 UTC m=+100.761802461" watchObservedRunningTime="2025-10-01 15:06:31.616051824 +0000 UTC m=+100.762894940" Oct 01 15:06:32 crc kubenswrapper[4869]: I1001 15:06:32.245752 4869 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-qf2zt" Oct 01 15:06:32 crc kubenswrapper[4869]: I1001 15:06:32.245806 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-qf2zt" Oct 01 15:06:32 crc kubenswrapper[4869]: I1001 15:06:32.454180 4869 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-qf2zt" Oct 01 15:06:32 crc kubenswrapper[4869]: I1001 15:06:32.549857 4869 generic.go:334] "Generic (PLEG): container finished" podID="f6f8609b-7f80-4b8f-a371-bb1d10396a15" containerID="70aa4e4d52e780ca00904740e4605f094164fc76a86417afff954c1f8ece8cd6" exitCode=0 Oct 01 15:06:32 crc kubenswrapper[4869]: I1001 15:06:32.549940 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-262k2" event={"ID":"f6f8609b-7f80-4b8f-a371-bb1d10396a15","Type":"ContainerDied","Data":"70aa4e4d52e780ca00904740e4605f094164fc76a86417afff954c1f8ece8cd6"} Oct 01 15:06:32 crc kubenswrapper[4869]: I1001 15:06:32.554257 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-bcf4v" event={"ID":"1b0f8e72-9451-4c28-8e39-9e8c94096b80","Type":"ContainerStarted","Data":"07720c3cd6704525796dcb655526f3093a28bf2533a62fd574ad80e771ef0ce5"} Oct 01 15:06:32 crc kubenswrapper[4869]: I1001 15:06:32.588585 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-bcf4v" podStartSLOduration=2.443998702 podStartE2EDuration="59.588564685s" podCreationTimestamp="2025-10-01 15:05:33 +0000 UTC" firstStartedPulling="2025-10-01 15:05:34.868460922 +0000 UTC m=+44.015304038" lastFinishedPulling="2025-10-01 15:06:32.013026905 +0000 UTC m=+101.159870021" observedRunningTime="2025-10-01 15:06:32.584542879 +0000 UTC m=+101.731386025" watchObservedRunningTime="2025-10-01 15:06:32.588564685 +0000 UTC m=+101.735407801" Oct 01 15:06:32 crc kubenswrapper[4869]: I1001 15:06:32.615037 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-qf2zt" Oct 01 15:06:32 crc kubenswrapper[4869]: I1001 15:06:32.722088 4869 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-q7dfv" Oct 01 15:06:32 crc kubenswrapper[4869]: I1001 15:06:32.722125 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-q7dfv" Oct 01 15:06:32 crc kubenswrapper[4869]: I1001 15:06:32.758095 4869 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-q7dfv" Oct 01 15:06:32 crc kubenswrapper[4869]: I1001 15:06:32.823113 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-g22pf" Oct 01 15:06:32 crc kubenswrapper[4869]: I1001 15:06:32.823154 4869 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-g22pf" Oct 01 15:06:32 crc kubenswrapper[4869]: I1001 15:06:32.876082 4869 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-g22pf" Oct 01 15:06:33 crc kubenswrapper[4869]: I1001 15:06:33.561775 4869 generic.go:334] "Generic (PLEG): container finished" podID="3d37704b-7106-4dcc-b91a-1de81e03d6a9" containerID="a6ffeaa91d7cfa19714dbc69460541e49602f320ca1a1049a4f9936e9705d9cc" exitCode=0 Oct 01 15:06:33 crc kubenswrapper[4869]: I1001 15:06:33.561887 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-xvckx" event={"ID":"3d37704b-7106-4dcc-b91a-1de81e03d6a9","Type":"ContainerDied","Data":"a6ffeaa91d7cfa19714dbc69460541e49602f320ca1a1049a4f9936e9705d9cc"} Oct 01 15:06:33 crc kubenswrapper[4869]: I1001 15:06:33.565382 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-262k2" event={"ID":"f6f8609b-7f80-4b8f-a371-bb1d10396a15","Type":"ContainerStarted","Data":"eeab3bcbb7bf7e2eca7c12d433f5c81e2680737975733fdcd5ec24c251ff68bd"} Oct 01 15:06:33 crc kubenswrapper[4869]: I1001 15:06:33.596470 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-262k2" podStartSLOduration=3.338207906 podStartE2EDuration="1m1.596449302s" podCreationTimestamp="2025-10-01 15:05:32 +0000 UTC" firstStartedPulling="2025-10-01 15:05:34.858752322 +0000 UTC m=+44.005595438" lastFinishedPulling="2025-10-01 15:06:33.116993708 +0000 UTC m=+102.263836834" observedRunningTime="2025-10-01 15:06:33.595578979 +0000 UTC m=+102.742422095" watchObservedRunningTime="2025-10-01 15:06:33.596449302 +0000 UTC m=+102.743292418" Oct 01 15:06:33 crc kubenswrapper[4869]: I1001 15:06:33.622931 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-g22pf" Oct 01 15:06:34 crc kubenswrapper[4869]: I1001 15:06:34.320315 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-bcf4v" Oct 01 15:06:34 crc kubenswrapper[4869]: I1001 15:06:34.320663 4869 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-bcf4v" Oct 01 15:06:34 crc kubenswrapper[4869]: I1001 15:06:34.369098 4869 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-bcf4v" Oct 01 15:06:34 crc kubenswrapper[4869]: I1001 15:06:34.580468 4869 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-qxb5h" Oct 01 15:06:34 crc kubenswrapper[4869]: I1001 15:06:34.580520 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-qxb5h" Oct 01 15:06:34 crc kubenswrapper[4869]: I1001 15:06:34.583623 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-xvckx" event={"ID":"3d37704b-7106-4dcc-b91a-1de81e03d6a9","Type":"ContainerStarted","Data":"cfdfde00cc688d2f87dc9212c3c234cbe19f52fb5bc46b6aaf3b58cf03db6930"} Oct 01 15:06:34 crc kubenswrapper[4869]: I1001 15:06:34.650677 4869 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-qxb5h" Oct 01 15:06:34 crc kubenswrapper[4869]: I1001 15:06:34.672847 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-xvckx" podStartSLOduration=2.626153506 podStartE2EDuration="59.672822434s" podCreationTimestamp="2025-10-01 15:05:35 +0000 UTC" firstStartedPulling="2025-10-01 15:05:36.964992039 +0000 UTC m=+46.111835155" lastFinishedPulling="2025-10-01 15:06:34.011660957 +0000 UTC m=+103.158504083" observedRunningTime="2025-10-01 15:06:34.603021346 +0000 UTC m=+103.749864462" watchObservedRunningTime="2025-10-01 15:06:34.672822434 +0000 UTC m=+103.819665610" Oct 01 15:06:34 crc kubenswrapper[4869]: I1001 15:06:34.904601 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-g22pf"] Oct 01 15:06:35 crc kubenswrapper[4869]: I1001 15:06:35.589988 4869 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-g22pf" podUID="61d7fda4-0848-4beb-b7ad-7c361fe37595" containerName="registry-server" containerID="cri-o://f8a1b5b701401e15c467fa2ee7afea4b5dd1cb10531909956f9ee1391dd1b09d" gracePeriod=2 Oct 01 15:06:35 crc kubenswrapper[4869]: I1001 15:06:35.602957 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-xvckx" Oct 01 15:06:35 crc kubenswrapper[4869]: I1001 15:06:35.603148 4869 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-xvckx" Oct 01 15:06:36 crc kubenswrapper[4869]: I1001 15:06:36.017632 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-mv2bb" Oct 01 15:06:36 crc kubenswrapper[4869]: I1001 15:06:36.017828 4869 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-mv2bb" Oct 01 15:06:36 crc kubenswrapper[4869]: I1001 15:06:36.598743 4869 generic.go:334] "Generic (PLEG): container finished" podID="61d7fda4-0848-4beb-b7ad-7c361fe37595" containerID="f8a1b5b701401e15c467fa2ee7afea4b5dd1cb10531909956f9ee1391dd1b09d" exitCode=0 Oct 01 15:06:36 crc kubenswrapper[4869]: I1001 15:06:36.598849 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-g22pf" event={"ID":"61d7fda4-0848-4beb-b7ad-7c361fe37595","Type":"ContainerDied","Data":"f8a1b5b701401e15c467fa2ee7afea4b5dd1cb10531909956f9ee1391dd1b09d"} Oct 01 15:06:36 crc kubenswrapper[4869]: I1001 15:06:36.644388 4869 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-xvckx" podUID="3d37704b-7106-4dcc-b91a-1de81e03d6a9" containerName="registry-server" probeResult="failure" output=< Oct 01 15:06:36 crc kubenswrapper[4869]: timeout: failed to connect service ":50051" within 1s Oct 01 15:06:36 crc kubenswrapper[4869]: > Oct 01 15:06:37 crc kubenswrapper[4869]: I1001 15:06:37.080303 4869 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-mv2bb" podUID="c1e7add4-dd2c-47c1-afee-e8dc937be3a8" containerName="registry-server" probeResult="failure" output=< Oct 01 15:06:37 crc kubenswrapper[4869]: timeout: failed to connect service ":50051" within 1s Oct 01 15:06:37 crc kubenswrapper[4869]: > Oct 01 15:06:37 crc kubenswrapper[4869]: I1001 15:06:37.256153 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-g22pf" Oct 01 15:06:37 crc kubenswrapper[4869]: I1001 15:06:37.337515 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/61d7fda4-0848-4beb-b7ad-7c361fe37595-utilities\") pod \"61d7fda4-0848-4beb-b7ad-7c361fe37595\" (UID: \"61d7fda4-0848-4beb-b7ad-7c361fe37595\") " Oct 01 15:06:37 crc kubenswrapper[4869]: I1001 15:06:37.337606 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9jgjl\" (UniqueName: \"kubernetes.io/projected/61d7fda4-0848-4beb-b7ad-7c361fe37595-kube-api-access-9jgjl\") pod \"61d7fda4-0848-4beb-b7ad-7c361fe37595\" (UID: \"61d7fda4-0848-4beb-b7ad-7c361fe37595\") " Oct 01 15:06:37 crc kubenswrapper[4869]: I1001 15:06:37.337716 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/61d7fda4-0848-4beb-b7ad-7c361fe37595-catalog-content\") pod \"61d7fda4-0848-4beb-b7ad-7c361fe37595\" (UID: \"61d7fda4-0848-4beb-b7ad-7c361fe37595\") " Oct 01 15:06:37 crc kubenswrapper[4869]: I1001 15:06:37.338523 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/61d7fda4-0848-4beb-b7ad-7c361fe37595-utilities" (OuterVolumeSpecName: "utilities") pod "61d7fda4-0848-4beb-b7ad-7c361fe37595" (UID: "61d7fda4-0848-4beb-b7ad-7c361fe37595"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 15:06:37 crc kubenswrapper[4869]: I1001 15:06:37.344059 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/61d7fda4-0848-4beb-b7ad-7c361fe37595-kube-api-access-9jgjl" (OuterVolumeSpecName: "kube-api-access-9jgjl") pod "61d7fda4-0848-4beb-b7ad-7c361fe37595" (UID: "61d7fda4-0848-4beb-b7ad-7c361fe37595"). InnerVolumeSpecName "kube-api-access-9jgjl". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 15:06:37 crc kubenswrapper[4869]: I1001 15:06:37.385643 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/61d7fda4-0848-4beb-b7ad-7c361fe37595-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "61d7fda4-0848-4beb-b7ad-7c361fe37595" (UID: "61d7fda4-0848-4beb-b7ad-7c361fe37595"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 15:06:37 crc kubenswrapper[4869]: I1001 15:06:37.439136 4869 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/61d7fda4-0848-4beb-b7ad-7c361fe37595-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 01 15:06:37 crc kubenswrapper[4869]: I1001 15:06:37.439413 4869 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/61d7fda4-0848-4beb-b7ad-7c361fe37595-utilities\") on node \"crc\" DevicePath \"\"" Oct 01 15:06:37 crc kubenswrapper[4869]: I1001 15:06:37.439496 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9jgjl\" (UniqueName: \"kubernetes.io/projected/61d7fda4-0848-4beb-b7ad-7c361fe37595-kube-api-access-9jgjl\") on node \"crc\" DevicePath \"\"" Oct 01 15:06:37 crc kubenswrapper[4869]: I1001 15:06:37.606235 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-g22pf" event={"ID":"61d7fda4-0848-4beb-b7ad-7c361fe37595","Type":"ContainerDied","Data":"d11b99210822f3cb9bb34f1bc54c4cb7e1fc623f237b268995131f5f02627068"} Oct 01 15:06:37 crc kubenswrapper[4869]: I1001 15:06:37.606317 4869 scope.go:117] "RemoveContainer" containerID="f8a1b5b701401e15c467fa2ee7afea4b5dd1cb10531909956f9ee1391dd1b09d" Oct 01 15:06:37 crc kubenswrapper[4869]: I1001 15:06:37.607205 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-g22pf" Oct 01 15:06:37 crc kubenswrapper[4869]: I1001 15:06:37.623760 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-g22pf"] Oct 01 15:06:37 crc kubenswrapper[4869]: I1001 15:06:37.626289 4869 scope.go:117] "RemoveContainer" containerID="8dfa2c3c10ab90d6e106f10c56740ecfc2ac5557b6057845c248716809fc105d" Oct 01 15:06:37 crc kubenswrapper[4869]: I1001 15:06:37.630700 4869 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-g22pf"] Oct 01 15:06:37 crc kubenswrapper[4869]: I1001 15:06:37.646105 4869 scope.go:117] "RemoveContainer" containerID="af0abb9116574ec1f0efff2d221182fa86aaf2f53e7d2328da5a64f71cff4c5c" Oct 01 15:06:39 crc kubenswrapper[4869]: I1001 15:06:39.595168 4869 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="61d7fda4-0848-4beb-b7ad-7c361fe37595" path="/var/lib/kubelet/pods/61d7fda4-0848-4beb-b7ad-7c361fe37595/volumes" Oct 01 15:06:42 crc kubenswrapper[4869]: I1001 15:06:42.542870 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-262k2" Oct 01 15:06:42 crc kubenswrapper[4869]: I1001 15:06:42.543426 4869 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-262k2" Oct 01 15:06:42 crc kubenswrapper[4869]: I1001 15:06:42.590552 4869 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-262k2" Oct 01 15:06:42 crc kubenswrapper[4869]: I1001 15:06:42.671102 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-262k2" Oct 01 15:06:42 crc kubenswrapper[4869]: I1001 15:06:42.773975 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-q7dfv" Oct 01 15:06:43 crc kubenswrapper[4869]: I1001 15:06:43.910578 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-q7dfv"] Oct 01 15:06:43 crc kubenswrapper[4869]: I1001 15:06:43.911473 4869 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-q7dfv" podUID="08693370-8f83-4bb1-8e2f-83a2ece2c9dd" containerName="registry-server" containerID="cri-o://c47ec48492ee886f60c92fd628e357d590889a5b0a6424d7d525ea44a9e25eec" gracePeriod=2 Oct 01 15:06:44 crc kubenswrapper[4869]: I1001 15:06:44.248696 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-q7dfv" Oct 01 15:06:44 crc kubenswrapper[4869]: I1001 15:06:44.359735 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-szl8n\" (UniqueName: \"kubernetes.io/projected/08693370-8f83-4bb1-8e2f-83a2ece2c9dd-kube-api-access-szl8n\") pod \"08693370-8f83-4bb1-8e2f-83a2ece2c9dd\" (UID: \"08693370-8f83-4bb1-8e2f-83a2ece2c9dd\") " Oct 01 15:06:44 crc kubenswrapper[4869]: I1001 15:06:44.359833 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/08693370-8f83-4bb1-8e2f-83a2ece2c9dd-catalog-content\") pod \"08693370-8f83-4bb1-8e2f-83a2ece2c9dd\" (UID: \"08693370-8f83-4bb1-8e2f-83a2ece2c9dd\") " Oct 01 15:06:44 crc kubenswrapper[4869]: I1001 15:06:44.359881 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/08693370-8f83-4bb1-8e2f-83a2ece2c9dd-utilities\") pod \"08693370-8f83-4bb1-8e2f-83a2ece2c9dd\" (UID: \"08693370-8f83-4bb1-8e2f-83a2ece2c9dd\") " Oct 01 15:06:44 crc kubenswrapper[4869]: I1001 15:06:44.360468 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-bcf4v" Oct 01 15:06:44 crc kubenswrapper[4869]: I1001 15:06:44.360914 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/08693370-8f83-4bb1-8e2f-83a2ece2c9dd-utilities" (OuterVolumeSpecName: "utilities") pod "08693370-8f83-4bb1-8e2f-83a2ece2c9dd" (UID: "08693370-8f83-4bb1-8e2f-83a2ece2c9dd"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 15:06:44 crc kubenswrapper[4869]: I1001 15:06:44.371741 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/08693370-8f83-4bb1-8e2f-83a2ece2c9dd-kube-api-access-szl8n" (OuterVolumeSpecName: "kube-api-access-szl8n") pod "08693370-8f83-4bb1-8e2f-83a2ece2c9dd" (UID: "08693370-8f83-4bb1-8e2f-83a2ece2c9dd"). InnerVolumeSpecName "kube-api-access-szl8n". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 15:06:44 crc kubenswrapper[4869]: I1001 15:06:44.416506 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/08693370-8f83-4bb1-8e2f-83a2ece2c9dd-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "08693370-8f83-4bb1-8e2f-83a2ece2c9dd" (UID: "08693370-8f83-4bb1-8e2f-83a2ece2c9dd"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 15:06:44 crc kubenswrapper[4869]: I1001 15:06:44.460696 4869 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/08693370-8f83-4bb1-8e2f-83a2ece2c9dd-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 01 15:06:44 crc kubenswrapper[4869]: I1001 15:06:44.460719 4869 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/08693370-8f83-4bb1-8e2f-83a2ece2c9dd-utilities\") on node \"crc\" DevicePath \"\"" Oct 01 15:06:44 crc kubenswrapper[4869]: I1001 15:06:44.460729 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-szl8n\" (UniqueName: \"kubernetes.io/projected/08693370-8f83-4bb1-8e2f-83a2ece2c9dd-kube-api-access-szl8n\") on node \"crc\" DevicePath \"\"" Oct 01 15:06:44 crc kubenswrapper[4869]: I1001 15:06:44.640690 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-qxb5h" Oct 01 15:06:44 crc kubenswrapper[4869]: I1001 15:06:44.652102 4869 generic.go:334] "Generic (PLEG): container finished" podID="08693370-8f83-4bb1-8e2f-83a2ece2c9dd" containerID="c47ec48492ee886f60c92fd628e357d590889a5b0a6424d7d525ea44a9e25eec" exitCode=0 Oct 01 15:06:44 crc kubenswrapper[4869]: I1001 15:06:44.652136 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-q7dfv" event={"ID":"08693370-8f83-4bb1-8e2f-83a2ece2c9dd","Type":"ContainerDied","Data":"c47ec48492ee886f60c92fd628e357d590889a5b0a6424d7d525ea44a9e25eec"} Oct 01 15:06:44 crc kubenswrapper[4869]: I1001 15:06:44.652158 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-q7dfv" event={"ID":"08693370-8f83-4bb1-8e2f-83a2ece2c9dd","Type":"ContainerDied","Data":"efd4f1a81bc09150ba17e6a08e11be307d4cdfb1382a4e746855246f366a804a"} Oct 01 15:06:44 crc kubenswrapper[4869]: I1001 15:06:44.652168 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-q7dfv" Oct 01 15:06:44 crc kubenswrapper[4869]: I1001 15:06:44.652174 4869 scope.go:117] "RemoveContainer" containerID="c47ec48492ee886f60c92fd628e357d590889a5b0a6424d7d525ea44a9e25eec" Oct 01 15:06:44 crc kubenswrapper[4869]: I1001 15:06:44.675858 4869 scope.go:117] "RemoveContainer" containerID="f4fe48b4ef52a60314d72cf4d691611739b53c01c03b7241e3424a92d02304d5" Oct 01 15:06:44 crc kubenswrapper[4869]: I1001 15:06:44.692916 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-q7dfv"] Oct 01 15:06:44 crc kubenswrapper[4869]: I1001 15:06:44.699088 4869 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-q7dfv"] Oct 01 15:06:44 crc kubenswrapper[4869]: I1001 15:06:44.709609 4869 scope.go:117] "RemoveContainer" containerID="d620af3444aac50fc7f4066d147e6e91e320f66965795f87556eaa667a1a0bb6" Oct 01 15:06:44 crc kubenswrapper[4869]: I1001 15:06:44.729814 4869 scope.go:117] "RemoveContainer" containerID="c47ec48492ee886f60c92fd628e357d590889a5b0a6424d7d525ea44a9e25eec" Oct 01 15:06:44 crc kubenswrapper[4869]: E1001 15:06:44.730478 4869 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c47ec48492ee886f60c92fd628e357d590889a5b0a6424d7d525ea44a9e25eec\": container with ID starting with c47ec48492ee886f60c92fd628e357d590889a5b0a6424d7d525ea44a9e25eec not found: ID does not exist" containerID="c47ec48492ee886f60c92fd628e357d590889a5b0a6424d7d525ea44a9e25eec" Oct 01 15:06:44 crc kubenswrapper[4869]: I1001 15:06:44.730530 4869 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c47ec48492ee886f60c92fd628e357d590889a5b0a6424d7d525ea44a9e25eec"} err="failed to get container status \"c47ec48492ee886f60c92fd628e357d590889a5b0a6424d7d525ea44a9e25eec\": rpc error: code = NotFound desc = could not find container \"c47ec48492ee886f60c92fd628e357d590889a5b0a6424d7d525ea44a9e25eec\": container with ID starting with c47ec48492ee886f60c92fd628e357d590889a5b0a6424d7d525ea44a9e25eec not found: ID does not exist" Oct 01 15:06:44 crc kubenswrapper[4869]: I1001 15:06:44.730594 4869 scope.go:117] "RemoveContainer" containerID="f4fe48b4ef52a60314d72cf4d691611739b53c01c03b7241e3424a92d02304d5" Oct 01 15:06:44 crc kubenswrapper[4869]: E1001 15:06:44.730958 4869 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f4fe48b4ef52a60314d72cf4d691611739b53c01c03b7241e3424a92d02304d5\": container with ID starting with f4fe48b4ef52a60314d72cf4d691611739b53c01c03b7241e3424a92d02304d5 not found: ID does not exist" containerID="f4fe48b4ef52a60314d72cf4d691611739b53c01c03b7241e3424a92d02304d5" Oct 01 15:06:44 crc kubenswrapper[4869]: I1001 15:06:44.730978 4869 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f4fe48b4ef52a60314d72cf4d691611739b53c01c03b7241e3424a92d02304d5"} err="failed to get container status \"f4fe48b4ef52a60314d72cf4d691611739b53c01c03b7241e3424a92d02304d5\": rpc error: code = NotFound desc = could not find container \"f4fe48b4ef52a60314d72cf4d691611739b53c01c03b7241e3424a92d02304d5\": container with ID starting with f4fe48b4ef52a60314d72cf4d691611739b53c01c03b7241e3424a92d02304d5 not found: ID does not exist" Oct 01 15:06:44 crc kubenswrapper[4869]: I1001 15:06:44.730992 4869 scope.go:117] "RemoveContainer" containerID="d620af3444aac50fc7f4066d147e6e91e320f66965795f87556eaa667a1a0bb6" Oct 01 15:06:44 crc kubenswrapper[4869]: E1001 15:06:44.750799 4869 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d620af3444aac50fc7f4066d147e6e91e320f66965795f87556eaa667a1a0bb6\": container with ID starting with d620af3444aac50fc7f4066d147e6e91e320f66965795f87556eaa667a1a0bb6 not found: ID does not exist" containerID="d620af3444aac50fc7f4066d147e6e91e320f66965795f87556eaa667a1a0bb6" Oct 01 15:06:44 crc kubenswrapper[4869]: I1001 15:06:44.750848 4869 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d620af3444aac50fc7f4066d147e6e91e320f66965795f87556eaa667a1a0bb6"} err="failed to get container status \"d620af3444aac50fc7f4066d147e6e91e320f66965795f87556eaa667a1a0bb6\": rpc error: code = NotFound desc = could not find container \"d620af3444aac50fc7f4066d147e6e91e320f66965795f87556eaa667a1a0bb6\": container with ID starting with d620af3444aac50fc7f4066d147e6e91e320f66965795f87556eaa667a1a0bb6 not found: ID does not exist" Oct 01 15:06:45 crc kubenswrapper[4869]: I1001 15:06:45.589079 4869 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="08693370-8f83-4bb1-8e2f-83a2ece2c9dd" path="/var/lib/kubelet/pods/08693370-8f83-4bb1-8e2f-83a2ece2c9dd/volumes" Oct 01 15:06:45 crc kubenswrapper[4869]: I1001 15:06:45.654893 4869 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-xvckx" Oct 01 15:06:45 crc kubenswrapper[4869]: I1001 15:06:45.699816 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-xvckx" Oct 01 15:06:46 crc kubenswrapper[4869]: I1001 15:06:46.057729 4869 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-mv2bb" Oct 01 15:06:46 crc kubenswrapper[4869]: I1001 15:06:46.123125 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-mv2bb" Oct 01 15:06:46 crc kubenswrapper[4869]: I1001 15:06:46.710217 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-qxb5h"] Oct 01 15:06:46 crc kubenswrapper[4869]: I1001 15:06:46.711003 4869 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-qxb5h" podUID="a0c02ede-8eef-4d4e-a277-bcda77228c29" containerName="registry-server" containerID="cri-o://20b4aff429cc480e195971cc9740c76b6db63e74a008801d284cc9104977abdc" gracePeriod=2 Oct 01 15:06:47 crc kubenswrapper[4869]: I1001 15:06:47.088981 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-qxb5h" Oct 01 15:06:47 crc kubenswrapper[4869]: I1001 15:06:47.202753 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a0c02ede-8eef-4d4e-a277-bcda77228c29-catalog-content\") pod \"a0c02ede-8eef-4d4e-a277-bcda77228c29\" (UID: \"a0c02ede-8eef-4d4e-a277-bcda77228c29\") " Oct 01 15:06:47 crc kubenswrapper[4869]: I1001 15:06:47.202990 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a0c02ede-8eef-4d4e-a277-bcda77228c29-utilities\") pod \"a0c02ede-8eef-4d4e-a277-bcda77228c29\" (UID: \"a0c02ede-8eef-4d4e-a277-bcda77228c29\") " Oct 01 15:06:47 crc kubenswrapper[4869]: I1001 15:06:47.203060 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9956d\" (UniqueName: \"kubernetes.io/projected/a0c02ede-8eef-4d4e-a277-bcda77228c29-kube-api-access-9956d\") pod \"a0c02ede-8eef-4d4e-a277-bcda77228c29\" (UID: \"a0c02ede-8eef-4d4e-a277-bcda77228c29\") " Oct 01 15:06:47 crc kubenswrapper[4869]: I1001 15:06:47.203741 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a0c02ede-8eef-4d4e-a277-bcda77228c29-utilities" (OuterVolumeSpecName: "utilities") pod "a0c02ede-8eef-4d4e-a277-bcda77228c29" (UID: "a0c02ede-8eef-4d4e-a277-bcda77228c29"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 15:06:47 crc kubenswrapper[4869]: I1001 15:06:47.211036 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a0c02ede-8eef-4d4e-a277-bcda77228c29-kube-api-access-9956d" (OuterVolumeSpecName: "kube-api-access-9956d") pod "a0c02ede-8eef-4d4e-a277-bcda77228c29" (UID: "a0c02ede-8eef-4d4e-a277-bcda77228c29"). InnerVolumeSpecName "kube-api-access-9956d". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 15:06:47 crc kubenswrapper[4869]: I1001 15:06:47.217094 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a0c02ede-8eef-4d4e-a277-bcda77228c29-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "a0c02ede-8eef-4d4e-a277-bcda77228c29" (UID: "a0c02ede-8eef-4d4e-a277-bcda77228c29"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 15:06:47 crc kubenswrapper[4869]: I1001 15:06:47.304418 4869 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a0c02ede-8eef-4d4e-a277-bcda77228c29-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 01 15:06:47 crc kubenswrapper[4869]: I1001 15:06:47.304447 4869 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a0c02ede-8eef-4d4e-a277-bcda77228c29-utilities\") on node \"crc\" DevicePath \"\"" Oct 01 15:06:47 crc kubenswrapper[4869]: I1001 15:06:47.304457 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9956d\" (UniqueName: \"kubernetes.io/projected/a0c02ede-8eef-4d4e-a277-bcda77228c29-kube-api-access-9956d\") on node \"crc\" DevicePath \"\"" Oct 01 15:06:47 crc kubenswrapper[4869]: I1001 15:06:47.670996 4869 generic.go:334] "Generic (PLEG): container finished" podID="a0c02ede-8eef-4d4e-a277-bcda77228c29" containerID="20b4aff429cc480e195971cc9740c76b6db63e74a008801d284cc9104977abdc" exitCode=0 Oct 01 15:06:47 crc kubenswrapper[4869]: I1001 15:06:47.671036 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-qxb5h" event={"ID":"a0c02ede-8eef-4d4e-a277-bcda77228c29","Type":"ContainerDied","Data":"20b4aff429cc480e195971cc9740c76b6db63e74a008801d284cc9104977abdc"} Oct 01 15:06:47 crc kubenswrapper[4869]: I1001 15:06:47.671063 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-qxb5h" event={"ID":"a0c02ede-8eef-4d4e-a277-bcda77228c29","Type":"ContainerDied","Data":"80a58603e24d81cbd49e54b75293f38de157c0af3079f1003d293de01a2765b3"} Oct 01 15:06:47 crc kubenswrapper[4869]: I1001 15:06:47.671079 4869 scope.go:117] "RemoveContainer" containerID="20b4aff429cc480e195971cc9740c76b6db63e74a008801d284cc9104977abdc" Oct 01 15:06:47 crc kubenswrapper[4869]: I1001 15:06:47.671108 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-qxb5h" Oct 01 15:06:47 crc kubenswrapper[4869]: I1001 15:06:47.688769 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-qxb5h"] Oct 01 15:06:47 crc kubenswrapper[4869]: I1001 15:06:47.691747 4869 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-qxb5h"] Oct 01 15:06:47 crc kubenswrapper[4869]: I1001 15:06:47.695333 4869 scope.go:117] "RemoveContainer" containerID="f4b27a68a711919f69e81ca860f0ea7de8402c9fe9a36e389152b1b2a2536b33" Oct 01 15:06:47 crc kubenswrapper[4869]: I1001 15:06:47.706808 4869 scope.go:117] "RemoveContainer" containerID="462206422cc7124651e76efa953eaca34c538e69cd80c94425a6061bc7b0be0e" Oct 01 15:06:47 crc kubenswrapper[4869]: I1001 15:06:47.727107 4869 scope.go:117] "RemoveContainer" containerID="20b4aff429cc480e195971cc9740c76b6db63e74a008801d284cc9104977abdc" Oct 01 15:06:47 crc kubenswrapper[4869]: E1001 15:06:47.727440 4869 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"20b4aff429cc480e195971cc9740c76b6db63e74a008801d284cc9104977abdc\": container with ID starting with 20b4aff429cc480e195971cc9740c76b6db63e74a008801d284cc9104977abdc not found: ID does not exist" containerID="20b4aff429cc480e195971cc9740c76b6db63e74a008801d284cc9104977abdc" Oct 01 15:06:47 crc kubenswrapper[4869]: I1001 15:06:47.727469 4869 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"20b4aff429cc480e195971cc9740c76b6db63e74a008801d284cc9104977abdc"} err="failed to get container status \"20b4aff429cc480e195971cc9740c76b6db63e74a008801d284cc9104977abdc\": rpc error: code = NotFound desc = could not find container \"20b4aff429cc480e195971cc9740c76b6db63e74a008801d284cc9104977abdc\": container with ID starting with 20b4aff429cc480e195971cc9740c76b6db63e74a008801d284cc9104977abdc not found: ID does not exist" Oct 01 15:06:47 crc kubenswrapper[4869]: I1001 15:06:47.727494 4869 scope.go:117] "RemoveContainer" containerID="f4b27a68a711919f69e81ca860f0ea7de8402c9fe9a36e389152b1b2a2536b33" Oct 01 15:06:47 crc kubenswrapper[4869]: E1001 15:06:47.727826 4869 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f4b27a68a711919f69e81ca860f0ea7de8402c9fe9a36e389152b1b2a2536b33\": container with ID starting with f4b27a68a711919f69e81ca860f0ea7de8402c9fe9a36e389152b1b2a2536b33 not found: ID does not exist" containerID="f4b27a68a711919f69e81ca860f0ea7de8402c9fe9a36e389152b1b2a2536b33" Oct 01 15:06:47 crc kubenswrapper[4869]: I1001 15:06:47.727854 4869 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f4b27a68a711919f69e81ca860f0ea7de8402c9fe9a36e389152b1b2a2536b33"} err="failed to get container status \"f4b27a68a711919f69e81ca860f0ea7de8402c9fe9a36e389152b1b2a2536b33\": rpc error: code = NotFound desc = could not find container \"f4b27a68a711919f69e81ca860f0ea7de8402c9fe9a36e389152b1b2a2536b33\": container with ID starting with f4b27a68a711919f69e81ca860f0ea7de8402c9fe9a36e389152b1b2a2536b33 not found: ID does not exist" Oct 01 15:06:47 crc kubenswrapper[4869]: I1001 15:06:47.727876 4869 scope.go:117] "RemoveContainer" containerID="462206422cc7124651e76efa953eaca34c538e69cd80c94425a6061bc7b0be0e" Oct 01 15:06:47 crc kubenswrapper[4869]: E1001 15:06:47.728071 4869 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"462206422cc7124651e76efa953eaca34c538e69cd80c94425a6061bc7b0be0e\": container with ID starting with 462206422cc7124651e76efa953eaca34c538e69cd80c94425a6061bc7b0be0e not found: ID does not exist" containerID="462206422cc7124651e76efa953eaca34c538e69cd80c94425a6061bc7b0be0e" Oct 01 15:06:47 crc kubenswrapper[4869]: I1001 15:06:47.728093 4869 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"462206422cc7124651e76efa953eaca34c538e69cd80c94425a6061bc7b0be0e"} err="failed to get container status \"462206422cc7124651e76efa953eaca34c538e69cd80c94425a6061bc7b0be0e\": rpc error: code = NotFound desc = could not find container \"462206422cc7124651e76efa953eaca34c538e69cd80c94425a6061bc7b0be0e\": container with ID starting with 462206422cc7124651e76efa953eaca34c538e69cd80c94425a6061bc7b0be0e not found: ID does not exist" Oct 01 15:06:48 crc kubenswrapper[4869]: I1001 15:06:48.974701 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-8mczm"] Oct 01 15:06:49 crc kubenswrapper[4869]: I1001 15:06:49.108781 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-mv2bb"] Oct 01 15:06:49 crc kubenswrapper[4869]: I1001 15:06:49.108979 4869 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-mv2bb" podUID="c1e7add4-dd2c-47c1-afee-e8dc937be3a8" containerName="registry-server" containerID="cri-o://f1064283a9baa5ac5011d25c2f16f3c72db86062f6499aa01fbc6ea6a1789b38" gracePeriod=2 Oct 01 15:06:49 crc kubenswrapper[4869]: I1001 15:06:49.480985 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-mv2bb" Oct 01 15:06:49 crc kubenswrapper[4869]: I1001 15:06:49.587842 4869 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a0c02ede-8eef-4d4e-a277-bcda77228c29" path="/var/lib/kubelet/pods/a0c02ede-8eef-4d4e-a277-bcda77228c29/volumes" Oct 01 15:06:49 crc kubenswrapper[4869]: I1001 15:06:49.634123 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c1e7add4-dd2c-47c1-afee-e8dc937be3a8-catalog-content\") pod \"c1e7add4-dd2c-47c1-afee-e8dc937be3a8\" (UID: \"c1e7add4-dd2c-47c1-afee-e8dc937be3a8\") " Oct 01 15:06:49 crc kubenswrapper[4869]: I1001 15:06:49.634230 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c1e7add4-dd2c-47c1-afee-e8dc937be3a8-utilities\") pod \"c1e7add4-dd2c-47c1-afee-e8dc937be3a8\" (UID: \"c1e7add4-dd2c-47c1-afee-e8dc937be3a8\") " Oct 01 15:06:49 crc kubenswrapper[4869]: I1001 15:06:49.634322 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5b7pk\" (UniqueName: \"kubernetes.io/projected/c1e7add4-dd2c-47c1-afee-e8dc937be3a8-kube-api-access-5b7pk\") pod \"c1e7add4-dd2c-47c1-afee-e8dc937be3a8\" (UID: \"c1e7add4-dd2c-47c1-afee-e8dc937be3a8\") " Oct 01 15:06:49 crc kubenswrapper[4869]: I1001 15:06:49.635887 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c1e7add4-dd2c-47c1-afee-e8dc937be3a8-utilities" (OuterVolumeSpecName: "utilities") pod "c1e7add4-dd2c-47c1-afee-e8dc937be3a8" (UID: "c1e7add4-dd2c-47c1-afee-e8dc937be3a8"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 15:06:49 crc kubenswrapper[4869]: I1001 15:06:49.641240 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c1e7add4-dd2c-47c1-afee-e8dc937be3a8-kube-api-access-5b7pk" (OuterVolumeSpecName: "kube-api-access-5b7pk") pod "c1e7add4-dd2c-47c1-afee-e8dc937be3a8" (UID: "c1e7add4-dd2c-47c1-afee-e8dc937be3a8"). InnerVolumeSpecName "kube-api-access-5b7pk". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 15:06:49 crc kubenswrapper[4869]: I1001 15:06:49.683747 4869 generic.go:334] "Generic (PLEG): container finished" podID="c1e7add4-dd2c-47c1-afee-e8dc937be3a8" containerID="f1064283a9baa5ac5011d25c2f16f3c72db86062f6499aa01fbc6ea6a1789b38" exitCode=0 Oct 01 15:06:49 crc kubenswrapper[4869]: I1001 15:06:49.683791 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-mv2bb" event={"ID":"c1e7add4-dd2c-47c1-afee-e8dc937be3a8","Type":"ContainerDied","Data":"f1064283a9baa5ac5011d25c2f16f3c72db86062f6499aa01fbc6ea6a1789b38"} Oct 01 15:06:49 crc kubenswrapper[4869]: I1001 15:06:49.683812 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-mv2bb" Oct 01 15:06:49 crc kubenswrapper[4869]: I1001 15:06:49.683833 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-mv2bb" event={"ID":"c1e7add4-dd2c-47c1-afee-e8dc937be3a8","Type":"ContainerDied","Data":"1ab2f152cff2240ed7e93c1fd9cea3d3f76cf6148686c4d2139f9cdf5ed05f16"} Oct 01 15:06:49 crc kubenswrapper[4869]: I1001 15:06:49.683854 4869 scope.go:117] "RemoveContainer" containerID="f1064283a9baa5ac5011d25c2f16f3c72db86062f6499aa01fbc6ea6a1789b38" Oct 01 15:06:49 crc kubenswrapper[4869]: I1001 15:06:49.696349 4869 scope.go:117] "RemoveContainer" containerID="8a3cd0218ac6dfbb2e1f3dfffe49531c5fc8290e5b3a3e592f94d331f13e0323" Oct 01 15:06:49 crc kubenswrapper[4869]: I1001 15:06:49.715801 4869 scope.go:117] "RemoveContainer" containerID="547c7b7716e0a4dd9a9790d183ff1e84e1989a2c0e1aa029ac1339a78be818d2" Oct 01 15:06:49 crc kubenswrapper[4869]: I1001 15:06:49.724744 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c1e7add4-dd2c-47c1-afee-e8dc937be3a8-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "c1e7add4-dd2c-47c1-afee-e8dc937be3a8" (UID: "c1e7add4-dd2c-47c1-afee-e8dc937be3a8"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 15:06:49 crc kubenswrapper[4869]: I1001 15:06:49.734745 4869 scope.go:117] "RemoveContainer" containerID="f1064283a9baa5ac5011d25c2f16f3c72db86062f6499aa01fbc6ea6a1789b38" Oct 01 15:06:49 crc kubenswrapper[4869]: E1001 15:06:49.735396 4869 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f1064283a9baa5ac5011d25c2f16f3c72db86062f6499aa01fbc6ea6a1789b38\": container with ID starting with f1064283a9baa5ac5011d25c2f16f3c72db86062f6499aa01fbc6ea6a1789b38 not found: ID does not exist" containerID="f1064283a9baa5ac5011d25c2f16f3c72db86062f6499aa01fbc6ea6a1789b38" Oct 01 15:06:49 crc kubenswrapper[4869]: I1001 15:06:49.735430 4869 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f1064283a9baa5ac5011d25c2f16f3c72db86062f6499aa01fbc6ea6a1789b38"} err="failed to get container status \"f1064283a9baa5ac5011d25c2f16f3c72db86062f6499aa01fbc6ea6a1789b38\": rpc error: code = NotFound desc = could not find container \"f1064283a9baa5ac5011d25c2f16f3c72db86062f6499aa01fbc6ea6a1789b38\": container with ID starting with f1064283a9baa5ac5011d25c2f16f3c72db86062f6499aa01fbc6ea6a1789b38 not found: ID does not exist" Oct 01 15:06:49 crc kubenswrapper[4869]: I1001 15:06:49.735450 4869 scope.go:117] "RemoveContainer" containerID="8a3cd0218ac6dfbb2e1f3dfffe49531c5fc8290e5b3a3e592f94d331f13e0323" Oct 01 15:06:49 crc kubenswrapper[4869]: I1001 15:06:49.735524 4869 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c1e7add4-dd2c-47c1-afee-e8dc937be3a8-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 01 15:06:49 crc kubenswrapper[4869]: I1001 15:06:49.735556 4869 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c1e7add4-dd2c-47c1-afee-e8dc937be3a8-utilities\") on node \"crc\" DevicePath \"\"" Oct 01 15:06:49 crc kubenswrapper[4869]: I1001 15:06:49.735570 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5b7pk\" (UniqueName: \"kubernetes.io/projected/c1e7add4-dd2c-47c1-afee-e8dc937be3a8-kube-api-access-5b7pk\") on node \"crc\" DevicePath \"\"" Oct 01 15:06:49 crc kubenswrapper[4869]: E1001 15:06:49.735904 4869 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8a3cd0218ac6dfbb2e1f3dfffe49531c5fc8290e5b3a3e592f94d331f13e0323\": container with ID starting with 8a3cd0218ac6dfbb2e1f3dfffe49531c5fc8290e5b3a3e592f94d331f13e0323 not found: ID does not exist" containerID="8a3cd0218ac6dfbb2e1f3dfffe49531c5fc8290e5b3a3e592f94d331f13e0323" Oct 01 15:06:49 crc kubenswrapper[4869]: I1001 15:06:49.735932 4869 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8a3cd0218ac6dfbb2e1f3dfffe49531c5fc8290e5b3a3e592f94d331f13e0323"} err="failed to get container status \"8a3cd0218ac6dfbb2e1f3dfffe49531c5fc8290e5b3a3e592f94d331f13e0323\": rpc error: code = NotFound desc = could not find container \"8a3cd0218ac6dfbb2e1f3dfffe49531c5fc8290e5b3a3e592f94d331f13e0323\": container with ID starting with 8a3cd0218ac6dfbb2e1f3dfffe49531c5fc8290e5b3a3e592f94d331f13e0323 not found: ID does not exist" Oct 01 15:06:49 crc kubenswrapper[4869]: I1001 15:06:49.735960 4869 scope.go:117] "RemoveContainer" containerID="547c7b7716e0a4dd9a9790d183ff1e84e1989a2c0e1aa029ac1339a78be818d2" Oct 01 15:06:49 crc kubenswrapper[4869]: E1001 15:06:49.736381 4869 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"547c7b7716e0a4dd9a9790d183ff1e84e1989a2c0e1aa029ac1339a78be818d2\": container with ID starting with 547c7b7716e0a4dd9a9790d183ff1e84e1989a2c0e1aa029ac1339a78be818d2 not found: ID does not exist" containerID="547c7b7716e0a4dd9a9790d183ff1e84e1989a2c0e1aa029ac1339a78be818d2" Oct 01 15:06:49 crc kubenswrapper[4869]: I1001 15:06:49.736401 4869 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"547c7b7716e0a4dd9a9790d183ff1e84e1989a2c0e1aa029ac1339a78be818d2"} err="failed to get container status \"547c7b7716e0a4dd9a9790d183ff1e84e1989a2c0e1aa029ac1339a78be818d2\": rpc error: code = NotFound desc = could not find container \"547c7b7716e0a4dd9a9790d183ff1e84e1989a2c0e1aa029ac1339a78be818d2\": container with ID starting with 547c7b7716e0a4dd9a9790d183ff1e84e1989a2c0e1aa029ac1339a78be818d2 not found: ID does not exist" Oct 01 15:06:50 crc kubenswrapper[4869]: I1001 15:06:50.014517 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-mv2bb"] Oct 01 15:06:50 crc kubenswrapper[4869]: I1001 15:06:50.017216 4869 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-mv2bb"] Oct 01 15:06:51 crc kubenswrapper[4869]: I1001 15:06:51.586739 4869 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c1e7add4-dd2c-47c1-afee-e8dc937be3a8" path="/var/lib/kubelet/pods/c1e7add4-dd2c-47c1-afee-e8dc937be3a8/volumes" Oct 01 15:07:13 crc kubenswrapper[4869]: I1001 15:07:13.354835 4869 patch_prober.go:28] interesting pod/machine-config-daemon-c86m8 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 01 15:07:13 crc kubenswrapper[4869]: I1001 15:07:13.355950 4869 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-c86m8" podUID="a4b64f7f-0b03-4f47-965b-9fde048b735c" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 01 15:07:14 crc kubenswrapper[4869]: I1001 15:07:14.019214 4869 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-authentication/oauth-openshift-558db77b4-8mczm" podUID="6ee4ec07-4fc1-4d3e-9b21-e111b2950c5a" containerName="oauth-openshift" containerID="cri-o://afb576a00a14d022036fb4e7d4e78b0b6487da741f958ca836ccc7510556a41b" gracePeriod=15 Oct 01 15:07:14 crc kubenswrapper[4869]: I1001 15:07:14.508431 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-8mczm" Oct 01 15:07:14 crc kubenswrapper[4869]: I1001 15:07:14.557517 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-authentication/oauth-openshift-657494565c-sbplh"] Oct 01 15:07:14 crc kubenswrapper[4869]: E1001 15:07:14.557791 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="08693370-8f83-4bb1-8e2f-83a2ece2c9dd" containerName="registry-server" Oct 01 15:07:14 crc kubenswrapper[4869]: I1001 15:07:14.557810 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="08693370-8f83-4bb1-8e2f-83a2ece2c9dd" containerName="registry-server" Oct 01 15:07:14 crc kubenswrapper[4869]: E1001 15:07:14.557821 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="08693370-8f83-4bb1-8e2f-83a2ece2c9dd" containerName="extract-content" Oct 01 15:07:14 crc kubenswrapper[4869]: I1001 15:07:14.557828 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="08693370-8f83-4bb1-8e2f-83a2ece2c9dd" containerName="extract-content" Oct 01 15:07:14 crc kubenswrapper[4869]: E1001 15:07:14.557840 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a0c02ede-8eef-4d4e-a277-bcda77228c29" containerName="extract-utilities" Oct 01 15:07:14 crc kubenswrapper[4869]: I1001 15:07:14.557848 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="a0c02ede-8eef-4d4e-a277-bcda77228c29" containerName="extract-utilities" Oct 01 15:07:14 crc kubenswrapper[4869]: E1001 15:07:14.557856 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="61d7fda4-0848-4beb-b7ad-7c361fe37595" containerName="extract-utilities" Oct 01 15:07:14 crc kubenswrapper[4869]: I1001 15:07:14.557862 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="61d7fda4-0848-4beb-b7ad-7c361fe37595" containerName="extract-utilities" Oct 01 15:07:14 crc kubenswrapper[4869]: E1001 15:07:14.557870 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c1e7add4-dd2c-47c1-afee-e8dc937be3a8" containerName="extract-utilities" Oct 01 15:07:14 crc kubenswrapper[4869]: I1001 15:07:14.557877 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="c1e7add4-dd2c-47c1-afee-e8dc937be3a8" containerName="extract-utilities" Oct 01 15:07:14 crc kubenswrapper[4869]: E1001 15:07:14.557885 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6ee4ec07-4fc1-4d3e-9b21-e111b2950c5a" containerName="oauth-openshift" Oct 01 15:07:14 crc kubenswrapper[4869]: I1001 15:07:14.557892 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="6ee4ec07-4fc1-4d3e-9b21-e111b2950c5a" containerName="oauth-openshift" Oct 01 15:07:14 crc kubenswrapper[4869]: E1001 15:07:14.557899 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c1e7add4-dd2c-47c1-afee-e8dc937be3a8" containerName="extract-content" Oct 01 15:07:14 crc kubenswrapper[4869]: I1001 15:07:14.557906 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="c1e7add4-dd2c-47c1-afee-e8dc937be3a8" containerName="extract-content" Oct 01 15:07:14 crc kubenswrapper[4869]: E1001 15:07:14.557918 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="61d7fda4-0848-4beb-b7ad-7c361fe37595" containerName="registry-server" Oct 01 15:07:14 crc kubenswrapper[4869]: I1001 15:07:14.557924 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="61d7fda4-0848-4beb-b7ad-7c361fe37595" containerName="registry-server" Oct 01 15:07:14 crc kubenswrapper[4869]: E1001 15:07:14.557932 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c1e7add4-dd2c-47c1-afee-e8dc937be3a8" containerName="registry-server" Oct 01 15:07:14 crc kubenswrapper[4869]: I1001 15:07:14.557938 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="c1e7add4-dd2c-47c1-afee-e8dc937be3a8" containerName="registry-server" Oct 01 15:07:14 crc kubenswrapper[4869]: E1001 15:07:14.557947 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="61d7fda4-0848-4beb-b7ad-7c361fe37595" containerName="extract-content" Oct 01 15:07:14 crc kubenswrapper[4869]: I1001 15:07:14.557954 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="61d7fda4-0848-4beb-b7ad-7c361fe37595" containerName="extract-content" Oct 01 15:07:14 crc kubenswrapper[4869]: E1001 15:07:14.557964 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a0c02ede-8eef-4d4e-a277-bcda77228c29" containerName="extract-content" Oct 01 15:07:14 crc kubenswrapper[4869]: I1001 15:07:14.557973 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="a0c02ede-8eef-4d4e-a277-bcda77228c29" containerName="extract-content" Oct 01 15:07:14 crc kubenswrapper[4869]: E1001 15:07:14.557983 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4c1dcd57-cee2-45ab-9a92-8cdd8b864f98" containerName="collect-profiles" Oct 01 15:07:14 crc kubenswrapper[4869]: I1001 15:07:14.557990 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="4c1dcd57-cee2-45ab-9a92-8cdd8b864f98" containerName="collect-profiles" Oct 01 15:07:14 crc kubenswrapper[4869]: E1001 15:07:14.558000 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a0c02ede-8eef-4d4e-a277-bcda77228c29" containerName="registry-server" Oct 01 15:07:14 crc kubenswrapper[4869]: I1001 15:07:14.558007 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="a0c02ede-8eef-4d4e-a277-bcda77228c29" containerName="registry-server" Oct 01 15:07:14 crc kubenswrapper[4869]: E1001 15:07:14.558021 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="08693370-8f83-4bb1-8e2f-83a2ece2c9dd" containerName="extract-utilities" Oct 01 15:07:14 crc kubenswrapper[4869]: I1001 15:07:14.558028 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="08693370-8f83-4bb1-8e2f-83a2ece2c9dd" containerName="extract-utilities" Oct 01 15:07:14 crc kubenswrapper[4869]: E1001 15:07:14.558041 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="03706444-4f26-489d-a0af-457d9bbaec6c" containerName="pruner" Oct 01 15:07:14 crc kubenswrapper[4869]: I1001 15:07:14.558048 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="03706444-4f26-489d-a0af-457d9bbaec6c" containerName="pruner" Oct 01 15:07:14 crc kubenswrapper[4869]: E1001 15:07:14.558056 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9ff567e5-6f6b-4b25-a8a0-3aa792f4291d" containerName="kube-multus-additional-cni-plugins" Oct 01 15:07:14 crc kubenswrapper[4869]: I1001 15:07:14.558064 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="9ff567e5-6f6b-4b25-a8a0-3aa792f4291d" containerName="kube-multus-additional-cni-plugins" Oct 01 15:07:14 crc kubenswrapper[4869]: I1001 15:07:14.558743 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="c1e7add4-dd2c-47c1-afee-e8dc937be3a8" containerName="registry-server" Oct 01 15:07:14 crc kubenswrapper[4869]: I1001 15:07:14.558756 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="6ee4ec07-4fc1-4d3e-9b21-e111b2950c5a" containerName="oauth-openshift" Oct 01 15:07:14 crc kubenswrapper[4869]: I1001 15:07:14.558768 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="4c1dcd57-cee2-45ab-9a92-8cdd8b864f98" containerName="collect-profiles" Oct 01 15:07:14 crc kubenswrapper[4869]: I1001 15:07:14.558778 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="9ff567e5-6f6b-4b25-a8a0-3aa792f4291d" containerName="kube-multus-additional-cni-plugins" Oct 01 15:07:14 crc kubenswrapper[4869]: I1001 15:07:14.558786 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="03706444-4f26-489d-a0af-457d9bbaec6c" containerName="pruner" Oct 01 15:07:14 crc kubenswrapper[4869]: I1001 15:07:14.558833 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="61d7fda4-0848-4beb-b7ad-7c361fe37595" containerName="registry-server" Oct 01 15:07:14 crc kubenswrapper[4869]: I1001 15:07:14.558843 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="a0c02ede-8eef-4d4e-a277-bcda77228c29" containerName="registry-server" Oct 01 15:07:14 crc kubenswrapper[4869]: I1001 15:07:14.558851 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="08693370-8f83-4bb1-8e2f-83a2ece2c9dd" containerName="registry-server" Oct 01 15:07:14 crc kubenswrapper[4869]: I1001 15:07:14.559308 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-657494565c-sbplh" Oct 01 15:07:14 crc kubenswrapper[4869]: I1001 15:07:14.567215 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-657494565c-sbplh"] Oct 01 15:07:14 crc kubenswrapper[4869]: I1001 15:07:14.691015 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/6ee4ec07-4fc1-4d3e-9b21-e111b2950c5a-v4-0-config-system-ocp-branding-template\") pod \"6ee4ec07-4fc1-4d3e-9b21-e111b2950c5a\" (UID: \"6ee4ec07-4fc1-4d3e-9b21-e111b2950c5a\") " Oct 01 15:07:14 crc kubenswrapper[4869]: I1001 15:07:14.691088 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/6ee4ec07-4fc1-4d3e-9b21-e111b2950c5a-v4-0-config-system-router-certs\") pod \"6ee4ec07-4fc1-4d3e-9b21-e111b2950c5a\" (UID: \"6ee4ec07-4fc1-4d3e-9b21-e111b2950c5a\") " Oct 01 15:07:14 crc kubenswrapper[4869]: I1001 15:07:14.691119 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/6ee4ec07-4fc1-4d3e-9b21-e111b2950c5a-v4-0-config-system-serving-cert\") pod \"6ee4ec07-4fc1-4d3e-9b21-e111b2950c5a\" (UID: \"6ee4ec07-4fc1-4d3e-9b21-e111b2950c5a\") " Oct 01 15:07:14 crc kubenswrapper[4869]: I1001 15:07:14.691150 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/6ee4ec07-4fc1-4d3e-9b21-e111b2950c5a-audit-dir\") pod \"6ee4ec07-4fc1-4d3e-9b21-e111b2950c5a\" (UID: \"6ee4ec07-4fc1-4d3e-9b21-e111b2950c5a\") " Oct 01 15:07:14 crc kubenswrapper[4869]: I1001 15:07:14.691174 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/6ee4ec07-4fc1-4d3e-9b21-e111b2950c5a-v4-0-config-user-idp-0-file-data\") pod \"6ee4ec07-4fc1-4d3e-9b21-e111b2950c5a\" (UID: \"6ee4ec07-4fc1-4d3e-9b21-e111b2950c5a\") " Oct 01 15:07:14 crc kubenswrapper[4869]: I1001 15:07:14.691201 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/6ee4ec07-4fc1-4d3e-9b21-e111b2950c5a-v4-0-config-system-session\") pod \"6ee4ec07-4fc1-4d3e-9b21-e111b2950c5a\" (UID: \"6ee4ec07-4fc1-4d3e-9b21-e111b2950c5a\") " Oct 01 15:07:14 crc kubenswrapper[4869]: I1001 15:07:14.691234 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/6ee4ec07-4fc1-4d3e-9b21-e111b2950c5a-v4-0-config-user-template-provider-selection\") pod \"6ee4ec07-4fc1-4d3e-9b21-e111b2950c5a\" (UID: \"6ee4ec07-4fc1-4d3e-9b21-e111b2950c5a\") " Oct 01 15:07:14 crc kubenswrapper[4869]: I1001 15:07:14.691289 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6ee4ec07-4fc1-4d3e-9b21-e111b2950c5a-v4-0-config-system-trusted-ca-bundle\") pod \"6ee4ec07-4fc1-4d3e-9b21-e111b2950c5a\" (UID: \"6ee4ec07-4fc1-4d3e-9b21-e111b2950c5a\") " Oct 01 15:07:14 crc kubenswrapper[4869]: I1001 15:07:14.691324 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/6ee4ec07-4fc1-4d3e-9b21-e111b2950c5a-v4-0-config-user-template-login\") pod \"6ee4ec07-4fc1-4d3e-9b21-e111b2950c5a\" (UID: \"6ee4ec07-4fc1-4d3e-9b21-e111b2950c5a\") " Oct 01 15:07:14 crc kubenswrapper[4869]: I1001 15:07:14.691354 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/6ee4ec07-4fc1-4d3e-9b21-e111b2950c5a-v4-0-config-system-service-ca\") pod \"6ee4ec07-4fc1-4d3e-9b21-e111b2950c5a\" (UID: \"6ee4ec07-4fc1-4d3e-9b21-e111b2950c5a\") " Oct 01 15:07:14 crc kubenswrapper[4869]: I1001 15:07:14.691374 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jn46p\" (UniqueName: \"kubernetes.io/projected/6ee4ec07-4fc1-4d3e-9b21-e111b2950c5a-kube-api-access-jn46p\") pod \"6ee4ec07-4fc1-4d3e-9b21-e111b2950c5a\" (UID: \"6ee4ec07-4fc1-4d3e-9b21-e111b2950c5a\") " Oct 01 15:07:14 crc kubenswrapper[4869]: I1001 15:07:14.691397 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/6ee4ec07-4fc1-4d3e-9b21-e111b2950c5a-v4-0-config-system-cliconfig\") pod \"6ee4ec07-4fc1-4d3e-9b21-e111b2950c5a\" (UID: \"6ee4ec07-4fc1-4d3e-9b21-e111b2950c5a\") " Oct 01 15:07:14 crc kubenswrapper[4869]: I1001 15:07:14.691424 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/6ee4ec07-4fc1-4d3e-9b21-e111b2950c5a-v4-0-config-user-template-error\") pod \"6ee4ec07-4fc1-4d3e-9b21-e111b2950c5a\" (UID: \"6ee4ec07-4fc1-4d3e-9b21-e111b2950c5a\") " Oct 01 15:07:14 crc kubenswrapper[4869]: I1001 15:07:14.691446 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/6ee4ec07-4fc1-4d3e-9b21-e111b2950c5a-audit-policies\") pod \"6ee4ec07-4fc1-4d3e-9b21-e111b2950c5a\" (UID: \"6ee4ec07-4fc1-4d3e-9b21-e111b2950c5a\") " Oct 01 15:07:14 crc kubenswrapper[4869]: I1001 15:07:14.691594 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/a380beb5-c27d-4b68-8e4b-e863a4771a66-audit-dir\") pod \"oauth-openshift-657494565c-sbplh\" (UID: \"a380beb5-c27d-4b68-8e4b-e863a4771a66\") " pod="openshift-authentication/oauth-openshift-657494565c-sbplh" Oct 01 15:07:14 crc kubenswrapper[4869]: I1001 15:07:14.691626 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/a380beb5-c27d-4b68-8e4b-e863a4771a66-v4-0-config-system-session\") pod \"oauth-openshift-657494565c-sbplh\" (UID: \"a380beb5-c27d-4b68-8e4b-e863a4771a66\") " pod="openshift-authentication/oauth-openshift-657494565c-sbplh" Oct 01 15:07:14 crc kubenswrapper[4869]: I1001 15:07:14.691654 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/a380beb5-c27d-4b68-8e4b-e863a4771a66-v4-0-config-system-cliconfig\") pod \"oauth-openshift-657494565c-sbplh\" (UID: \"a380beb5-c27d-4b68-8e4b-e863a4771a66\") " pod="openshift-authentication/oauth-openshift-657494565c-sbplh" Oct 01 15:07:14 crc kubenswrapper[4869]: I1001 15:07:14.691679 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/a380beb5-c27d-4b68-8e4b-e863a4771a66-v4-0-config-system-serving-cert\") pod \"oauth-openshift-657494565c-sbplh\" (UID: \"a380beb5-c27d-4b68-8e4b-e863a4771a66\") " pod="openshift-authentication/oauth-openshift-657494565c-sbplh" Oct 01 15:07:14 crc kubenswrapper[4869]: I1001 15:07:14.691698 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/a380beb5-c27d-4b68-8e4b-e863a4771a66-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-657494565c-sbplh\" (UID: \"a380beb5-c27d-4b68-8e4b-e863a4771a66\") " pod="openshift-authentication/oauth-openshift-657494565c-sbplh" Oct 01 15:07:14 crc kubenswrapper[4869]: I1001 15:07:14.691721 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/a380beb5-c27d-4b68-8e4b-e863a4771a66-audit-policies\") pod \"oauth-openshift-657494565c-sbplh\" (UID: \"a380beb5-c27d-4b68-8e4b-e863a4771a66\") " pod="openshift-authentication/oauth-openshift-657494565c-sbplh" Oct 01 15:07:14 crc kubenswrapper[4869]: I1001 15:07:14.691741 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8zjgm\" (UniqueName: \"kubernetes.io/projected/a380beb5-c27d-4b68-8e4b-e863a4771a66-kube-api-access-8zjgm\") pod \"oauth-openshift-657494565c-sbplh\" (UID: \"a380beb5-c27d-4b68-8e4b-e863a4771a66\") " pod="openshift-authentication/oauth-openshift-657494565c-sbplh" Oct 01 15:07:14 crc kubenswrapper[4869]: I1001 15:07:14.691772 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/a380beb5-c27d-4b68-8e4b-e863a4771a66-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-657494565c-sbplh\" (UID: \"a380beb5-c27d-4b68-8e4b-e863a4771a66\") " pod="openshift-authentication/oauth-openshift-657494565c-sbplh" Oct 01 15:07:14 crc kubenswrapper[4869]: I1001 15:07:14.691795 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/a380beb5-c27d-4b68-8e4b-e863a4771a66-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-657494565c-sbplh\" (UID: \"a380beb5-c27d-4b68-8e4b-e863a4771a66\") " pod="openshift-authentication/oauth-openshift-657494565c-sbplh" Oct 01 15:07:14 crc kubenswrapper[4869]: I1001 15:07:14.691822 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/6ee4ec07-4fc1-4d3e-9b21-e111b2950c5a-audit-dir" (OuterVolumeSpecName: "audit-dir") pod "6ee4ec07-4fc1-4d3e-9b21-e111b2950c5a" (UID: "6ee4ec07-4fc1-4d3e-9b21-e111b2950c5a"). InnerVolumeSpecName "audit-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 01 15:07:14 crc kubenswrapper[4869]: I1001 15:07:14.691863 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/a380beb5-c27d-4b68-8e4b-e863a4771a66-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-657494565c-sbplh\" (UID: \"a380beb5-c27d-4b68-8e4b-e863a4771a66\") " pod="openshift-authentication/oauth-openshift-657494565c-sbplh" Oct 01 15:07:14 crc kubenswrapper[4869]: I1001 15:07:14.692042 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/a380beb5-c27d-4b68-8e4b-e863a4771a66-v4-0-config-user-template-login\") pod \"oauth-openshift-657494565c-sbplh\" (UID: \"a380beb5-c27d-4b68-8e4b-e863a4771a66\") " pod="openshift-authentication/oauth-openshift-657494565c-sbplh" Oct 01 15:07:14 crc kubenswrapper[4869]: I1001 15:07:14.692078 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/a380beb5-c27d-4b68-8e4b-e863a4771a66-v4-0-config-system-service-ca\") pod \"oauth-openshift-657494565c-sbplh\" (UID: \"a380beb5-c27d-4b68-8e4b-e863a4771a66\") " pod="openshift-authentication/oauth-openshift-657494565c-sbplh" Oct 01 15:07:14 crc kubenswrapper[4869]: I1001 15:07:14.692144 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/a380beb5-c27d-4b68-8e4b-e863a4771a66-v4-0-config-system-router-certs\") pod \"oauth-openshift-657494565c-sbplh\" (UID: \"a380beb5-c27d-4b68-8e4b-e863a4771a66\") " pod="openshift-authentication/oauth-openshift-657494565c-sbplh" Oct 01 15:07:14 crc kubenswrapper[4869]: I1001 15:07:14.692187 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/a380beb5-c27d-4b68-8e4b-e863a4771a66-v4-0-config-user-template-error\") pod \"oauth-openshift-657494565c-sbplh\" (UID: \"a380beb5-c27d-4b68-8e4b-e863a4771a66\") " pod="openshift-authentication/oauth-openshift-657494565c-sbplh" Oct 01 15:07:14 crc kubenswrapper[4869]: I1001 15:07:14.692287 4869 reconciler_common.go:293] "Volume detached for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/6ee4ec07-4fc1-4d3e-9b21-e111b2950c5a-audit-dir\") on node \"crc\" DevicePath \"\"" Oct 01 15:07:14 crc kubenswrapper[4869]: I1001 15:07:14.692564 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6ee4ec07-4fc1-4d3e-9b21-e111b2950c5a-v4-0-config-system-cliconfig" (OuterVolumeSpecName: "v4-0-config-system-cliconfig") pod "6ee4ec07-4fc1-4d3e-9b21-e111b2950c5a" (UID: "6ee4ec07-4fc1-4d3e-9b21-e111b2950c5a"). InnerVolumeSpecName "v4-0-config-system-cliconfig". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 15:07:14 crc kubenswrapper[4869]: I1001 15:07:14.692628 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6ee4ec07-4fc1-4d3e-9b21-e111b2950c5a-v4-0-config-system-service-ca" (OuterVolumeSpecName: "v4-0-config-system-service-ca") pod "6ee4ec07-4fc1-4d3e-9b21-e111b2950c5a" (UID: "6ee4ec07-4fc1-4d3e-9b21-e111b2950c5a"). InnerVolumeSpecName "v4-0-config-system-service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 15:07:14 crc kubenswrapper[4869]: I1001 15:07:14.692846 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6ee4ec07-4fc1-4d3e-9b21-e111b2950c5a-audit-policies" (OuterVolumeSpecName: "audit-policies") pod "6ee4ec07-4fc1-4d3e-9b21-e111b2950c5a" (UID: "6ee4ec07-4fc1-4d3e-9b21-e111b2950c5a"). InnerVolumeSpecName "audit-policies". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 15:07:14 crc kubenswrapper[4869]: I1001 15:07:14.694800 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6ee4ec07-4fc1-4d3e-9b21-e111b2950c5a-v4-0-config-system-trusted-ca-bundle" (OuterVolumeSpecName: "v4-0-config-system-trusted-ca-bundle") pod "6ee4ec07-4fc1-4d3e-9b21-e111b2950c5a" (UID: "6ee4ec07-4fc1-4d3e-9b21-e111b2950c5a"). InnerVolumeSpecName "v4-0-config-system-trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 15:07:14 crc kubenswrapper[4869]: I1001 15:07:14.698222 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6ee4ec07-4fc1-4d3e-9b21-e111b2950c5a-v4-0-config-user-template-error" (OuterVolumeSpecName: "v4-0-config-user-template-error") pod "6ee4ec07-4fc1-4d3e-9b21-e111b2950c5a" (UID: "6ee4ec07-4fc1-4d3e-9b21-e111b2950c5a"). InnerVolumeSpecName "v4-0-config-user-template-error". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 15:07:14 crc kubenswrapper[4869]: I1001 15:07:14.700793 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6ee4ec07-4fc1-4d3e-9b21-e111b2950c5a-v4-0-config-user-template-provider-selection" (OuterVolumeSpecName: "v4-0-config-user-template-provider-selection") pod "6ee4ec07-4fc1-4d3e-9b21-e111b2950c5a" (UID: "6ee4ec07-4fc1-4d3e-9b21-e111b2950c5a"). InnerVolumeSpecName "v4-0-config-user-template-provider-selection". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 15:07:14 crc kubenswrapper[4869]: I1001 15:07:14.701124 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6ee4ec07-4fc1-4d3e-9b21-e111b2950c5a-v4-0-config-user-template-login" (OuterVolumeSpecName: "v4-0-config-user-template-login") pod "6ee4ec07-4fc1-4d3e-9b21-e111b2950c5a" (UID: "6ee4ec07-4fc1-4d3e-9b21-e111b2950c5a"). InnerVolumeSpecName "v4-0-config-user-template-login". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 15:07:14 crc kubenswrapper[4869]: I1001 15:07:14.701321 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6ee4ec07-4fc1-4d3e-9b21-e111b2950c5a-v4-0-config-system-serving-cert" (OuterVolumeSpecName: "v4-0-config-system-serving-cert") pod "6ee4ec07-4fc1-4d3e-9b21-e111b2950c5a" (UID: "6ee4ec07-4fc1-4d3e-9b21-e111b2950c5a"). InnerVolumeSpecName "v4-0-config-system-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 15:07:14 crc kubenswrapper[4869]: I1001 15:07:14.701581 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6ee4ec07-4fc1-4d3e-9b21-e111b2950c5a-v4-0-config-user-idp-0-file-data" (OuterVolumeSpecName: "v4-0-config-user-idp-0-file-data") pod "6ee4ec07-4fc1-4d3e-9b21-e111b2950c5a" (UID: "6ee4ec07-4fc1-4d3e-9b21-e111b2950c5a"). InnerVolumeSpecName "v4-0-config-user-idp-0-file-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 15:07:14 crc kubenswrapper[4869]: I1001 15:07:14.701896 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6ee4ec07-4fc1-4d3e-9b21-e111b2950c5a-v4-0-config-system-router-certs" (OuterVolumeSpecName: "v4-0-config-system-router-certs") pod "6ee4ec07-4fc1-4d3e-9b21-e111b2950c5a" (UID: "6ee4ec07-4fc1-4d3e-9b21-e111b2950c5a"). InnerVolumeSpecName "v4-0-config-system-router-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 15:07:14 crc kubenswrapper[4869]: I1001 15:07:14.701916 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6ee4ec07-4fc1-4d3e-9b21-e111b2950c5a-v4-0-config-system-session" (OuterVolumeSpecName: "v4-0-config-system-session") pod "6ee4ec07-4fc1-4d3e-9b21-e111b2950c5a" (UID: "6ee4ec07-4fc1-4d3e-9b21-e111b2950c5a"). InnerVolumeSpecName "v4-0-config-system-session". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 15:07:14 crc kubenswrapper[4869]: I1001 15:07:14.702515 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6ee4ec07-4fc1-4d3e-9b21-e111b2950c5a-v4-0-config-system-ocp-branding-template" (OuterVolumeSpecName: "v4-0-config-system-ocp-branding-template") pod "6ee4ec07-4fc1-4d3e-9b21-e111b2950c5a" (UID: "6ee4ec07-4fc1-4d3e-9b21-e111b2950c5a"). InnerVolumeSpecName "v4-0-config-system-ocp-branding-template". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 15:07:14 crc kubenswrapper[4869]: I1001 15:07:14.708962 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6ee4ec07-4fc1-4d3e-9b21-e111b2950c5a-kube-api-access-jn46p" (OuterVolumeSpecName: "kube-api-access-jn46p") pod "6ee4ec07-4fc1-4d3e-9b21-e111b2950c5a" (UID: "6ee4ec07-4fc1-4d3e-9b21-e111b2950c5a"). InnerVolumeSpecName "kube-api-access-jn46p". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 15:07:14 crc kubenswrapper[4869]: I1001 15:07:14.793522 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/a380beb5-c27d-4b68-8e4b-e863a4771a66-v4-0-config-system-router-certs\") pod \"oauth-openshift-657494565c-sbplh\" (UID: \"a380beb5-c27d-4b68-8e4b-e863a4771a66\") " pod="openshift-authentication/oauth-openshift-657494565c-sbplh" Oct 01 15:07:14 crc kubenswrapper[4869]: I1001 15:07:14.793584 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/a380beb5-c27d-4b68-8e4b-e863a4771a66-v4-0-config-user-template-error\") pod \"oauth-openshift-657494565c-sbplh\" (UID: \"a380beb5-c27d-4b68-8e4b-e863a4771a66\") " pod="openshift-authentication/oauth-openshift-657494565c-sbplh" Oct 01 15:07:14 crc kubenswrapper[4869]: I1001 15:07:14.793638 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/a380beb5-c27d-4b68-8e4b-e863a4771a66-audit-dir\") pod \"oauth-openshift-657494565c-sbplh\" (UID: \"a380beb5-c27d-4b68-8e4b-e863a4771a66\") " pod="openshift-authentication/oauth-openshift-657494565c-sbplh" Oct 01 15:07:14 crc kubenswrapper[4869]: I1001 15:07:14.793662 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/a380beb5-c27d-4b68-8e4b-e863a4771a66-v4-0-config-system-session\") pod \"oauth-openshift-657494565c-sbplh\" (UID: \"a380beb5-c27d-4b68-8e4b-e863a4771a66\") " pod="openshift-authentication/oauth-openshift-657494565c-sbplh" Oct 01 15:07:14 crc kubenswrapper[4869]: I1001 15:07:14.793688 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/a380beb5-c27d-4b68-8e4b-e863a4771a66-v4-0-config-system-cliconfig\") pod \"oauth-openshift-657494565c-sbplh\" (UID: \"a380beb5-c27d-4b68-8e4b-e863a4771a66\") " pod="openshift-authentication/oauth-openshift-657494565c-sbplh" Oct 01 15:07:14 crc kubenswrapper[4869]: I1001 15:07:14.793712 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/a380beb5-c27d-4b68-8e4b-e863a4771a66-v4-0-config-system-serving-cert\") pod \"oauth-openshift-657494565c-sbplh\" (UID: \"a380beb5-c27d-4b68-8e4b-e863a4771a66\") " pod="openshift-authentication/oauth-openshift-657494565c-sbplh" Oct 01 15:07:14 crc kubenswrapper[4869]: I1001 15:07:14.793742 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/a380beb5-c27d-4b68-8e4b-e863a4771a66-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-657494565c-sbplh\" (UID: \"a380beb5-c27d-4b68-8e4b-e863a4771a66\") " pod="openshift-authentication/oauth-openshift-657494565c-sbplh" Oct 01 15:07:14 crc kubenswrapper[4869]: I1001 15:07:14.793815 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/a380beb5-c27d-4b68-8e4b-e863a4771a66-audit-policies\") pod \"oauth-openshift-657494565c-sbplh\" (UID: \"a380beb5-c27d-4b68-8e4b-e863a4771a66\") " pod="openshift-authentication/oauth-openshift-657494565c-sbplh" Oct 01 15:07:14 crc kubenswrapper[4869]: I1001 15:07:14.793858 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8zjgm\" (UniqueName: \"kubernetes.io/projected/a380beb5-c27d-4b68-8e4b-e863a4771a66-kube-api-access-8zjgm\") pod \"oauth-openshift-657494565c-sbplh\" (UID: \"a380beb5-c27d-4b68-8e4b-e863a4771a66\") " pod="openshift-authentication/oauth-openshift-657494565c-sbplh" Oct 01 15:07:14 crc kubenswrapper[4869]: I1001 15:07:14.793909 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/a380beb5-c27d-4b68-8e4b-e863a4771a66-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-657494565c-sbplh\" (UID: \"a380beb5-c27d-4b68-8e4b-e863a4771a66\") " pod="openshift-authentication/oauth-openshift-657494565c-sbplh" Oct 01 15:07:14 crc kubenswrapper[4869]: I1001 15:07:14.793950 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/a380beb5-c27d-4b68-8e4b-e863a4771a66-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-657494565c-sbplh\" (UID: \"a380beb5-c27d-4b68-8e4b-e863a4771a66\") " pod="openshift-authentication/oauth-openshift-657494565c-sbplh" Oct 01 15:07:14 crc kubenswrapper[4869]: I1001 15:07:14.793981 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/a380beb5-c27d-4b68-8e4b-e863a4771a66-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-657494565c-sbplh\" (UID: \"a380beb5-c27d-4b68-8e4b-e863a4771a66\") " pod="openshift-authentication/oauth-openshift-657494565c-sbplh" Oct 01 15:07:14 crc kubenswrapper[4869]: I1001 15:07:14.794035 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/a380beb5-c27d-4b68-8e4b-e863a4771a66-v4-0-config-user-template-login\") pod \"oauth-openshift-657494565c-sbplh\" (UID: \"a380beb5-c27d-4b68-8e4b-e863a4771a66\") " pod="openshift-authentication/oauth-openshift-657494565c-sbplh" Oct 01 15:07:14 crc kubenswrapper[4869]: I1001 15:07:14.794068 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/a380beb5-c27d-4b68-8e4b-e863a4771a66-v4-0-config-system-service-ca\") pod \"oauth-openshift-657494565c-sbplh\" (UID: \"a380beb5-c27d-4b68-8e4b-e863a4771a66\") " pod="openshift-authentication/oauth-openshift-657494565c-sbplh" Oct 01 15:07:14 crc kubenswrapper[4869]: I1001 15:07:14.794138 4869 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/6ee4ec07-4fc1-4d3e-9b21-e111b2950c5a-v4-0-config-system-ocp-branding-template\") on node \"crc\" DevicePath \"\"" Oct 01 15:07:14 crc kubenswrapper[4869]: I1001 15:07:14.794159 4869 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/6ee4ec07-4fc1-4d3e-9b21-e111b2950c5a-v4-0-config-system-router-certs\") on node \"crc\" DevicePath \"\"" Oct 01 15:07:14 crc kubenswrapper[4869]: I1001 15:07:14.794179 4869 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/6ee4ec07-4fc1-4d3e-9b21-e111b2950c5a-v4-0-config-system-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 01 15:07:14 crc kubenswrapper[4869]: I1001 15:07:14.794198 4869 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/6ee4ec07-4fc1-4d3e-9b21-e111b2950c5a-v4-0-config-user-idp-0-file-data\") on node \"crc\" DevicePath \"\"" Oct 01 15:07:14 crc kubenswrapper[4869]: I1001 15:07:14.794217 4869 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/6ee4ec07-4fc1-4d3e-9b21-e111b2950c5a-v4-0-config-system-session\") on node \"crc\" DevicePath \"\"" Oct 01 15:07:14 crc kubenswrapper[4869]: I1001 15:07:14.794236 4869 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/6ee4ec07-4fc1-4d3e-9b21-e111b2950c5a-v4-0-config-user-template-provider-selection\") on node \"crc\" DevicePath \"\"" Oct 01 15:07:14 crc kubenswrapper[4869]: I1001 15:07:14.794281 4869 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6ee4ec07-4fc1-4d3e-9b21-e111b2950c5a-v4-0-config-system-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 01 15:07:14 crc kubenswrapper[4869]: I1001 15:07:14.794302 4869 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/6ee4ec07-4fc1-4d3e-9b21-e111b2950c5a-v4-0-config-user-template-login\") on node \"crc\" DevicePath \"\"" Oct 01 15:07:14 crc kubenswrapper[4869]: I1001 15:07:14.794322 4869 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/6ee4ec07-4fc1-4d3e-9b21-e111b2950c5a-v4-0-config-system-service-ca\") on node \"crc\" DevicePath \"\"" Oct 01 15:07:14 crc kubenswrapper[4869]: I1001 15:07:14.794340 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jn46p\" (UniqueName: \"kubernetes.io/projected/6ee4ec07-4fc1-4d3e-9b21-e111b2950c5a-kube-api-access-jn46p\") on node \"crc\" DevicePath \"\"" Oct 01 15:07:14 crc kubenswrapper[4869]: I1001 15:07:14.794358 4869 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/6ee4ec07-4fc1-4d3e-9b21-e111b2950c5a-v4-0-config-system-cliconfig\") on node \"crc\" DevicePath \"\"" Oct 01 15:07:14 crc kubenswrapper[4869]: I1001 15:07:14.794378 4869 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/6ee4ec07-4fc1-4d3e-9b21-e111b2950c5a-v4-0-config-user-template-error\") on node \"crc\" DevicePath \"\"" Oct 01 15:07:14 crc kubenswrapper[4869]: I1001 15:07:14.794396 4869 reconciler_common.go:293] "Volume detached for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/6ee4ec07-4fc1-4d3e-9b21-e111b2950c5a-audit-policies\") on node \"crc\" DevicePath \"\"" Oct 01 15:07:14 crc kubenswrapper[4869]: I1001 15:07:14.795117 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/a380beb5-c27d-4b68-8e4b-e863a4771a66-audit-dir\") pod \"oauth-openshift-657494565c-sbplh\" (UID: \"a380beb5-c27d-4b68-8e4b-e863a4771a66\") " pod="openshift-authentication/oauth-openshift-657494565c-sbplh" Oct 01 15:07:14 crc kubenswrapper[4869]: I1001 15:07:14.798325 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/a380beb5-c27d-4b68-8e4b-e863a4771a66-v4-0-config-system-cliconfig\") pod \"oauth-openshift-657494565c-sbplh\" (UID: \"a380beb5-c27d-4b68-8e4b-e863a4771a66\") " pod="openshift-authentication/oauth-openshift-657494565c-sbplh" Oct 01 15:07:14 crc kubenswrapper[4869]: I1001 15:07:14.798379 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/a380beb5-c27d-4b68-8e4b-e863a4771a66-v4-0-config-system-service-ca\") pod \"oauth-openshift-657494565c-sbplh\" (UID: \"a380beb5-c27d-4b68-8e4b-e863a4771a66\") " pod="openshift-authentication/oauth-openshift-657494565c-sbplh" Oct 01 15:07:14 crc kubenswrapper[4869]: I1001 15:07:14.798698 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/a380beb5-c27d-4b68-8e4b-e863a4771a66-audit-policies\") pod \"oauth-openshift-657494565c-sbplh\" (UID: \"a380beb5-c27d-4b68-8e4b-e863a4771a66\") " pod="openshift-authentication/oauth-openshift-657494565c-sbplh" Oct 01 15:07:14 crc kubenswrapper[4869]: I1001 15:07:14.799974 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/a380beb5-c27d-4b68-8e4b-e863a4771a66-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-657494565c-sbplh\" (UID: \"a380beb5-c27d-4b68-8e4b-e863a4771a66\") " pod="openshift-authentication/oauth-openshift-657494565c-sbplh" Oct 01 15:07:14 crc kubenswrapper[4869]: I1001 15:07:14.801939 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/a380beb5-c27d-4b68-8e4b-e863a4771a66-v4-0-config-user-template-error\") pod \"oauth-openshift-657494565c-sbplh\" (UID: \"a380beb5-c27d-4b68-8e4b-e863a4771a66\") " pod="openshift-authentication/oauth-openshift-657494565c-sbplh" Oct 01 15:07:14 crc kubenswrapper[4869]: I1001 15:07:14.802911 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/a380beb5-c27d-4b68-8e4b-e863a4771a66-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-657494565c-sbplh\" (UID: \"a380beb5-c27d-4b68-8e4b-e863a4771a66\") " pod="openshift-authentication/oauth-openshift-657494565c-sbplh" Oct 01 15:07:14 crc kubenswrapper[4869]: I1001 15:07:14.803330 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/a380beb5-c27d-4b68-8e4b-e863a4771a66-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-657494565c-sbplh\" (UID: \"a380beb5-c27d-4b68-8e4b-e863a4771a66\") " pod="openshift-authentication/oauth-openshift-657494565c-sbplh" Oct 01 15:07:14 crc kubenswrapper[4869]: I1001 15:07:14.803340 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/a380beb5-c27d-4b68-8e4b-e863a4771a66-v4-0-config-user-template-login\") pod \"oauth-openshift-657494565c-sbplh\" (UID: \"a380beb5-c27d-4b68-8e4b-e863a4771a66\") " pod="openshift-authentication/oauth-openshift-657494565c-sbplh" Oct 01 15:07:14 crc kubenswrapper[4869]: I1001 15:07:14.803681 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/a380beb5-c27d-4b68-8e4b-e863a4771a66-v4-0-config-system-session\") pod \"oauth-openshift-657494565c-sbplh\" (UID: \"a380beb5-c27d-4b68-8e4b-e863a4771a66\") " pod="openshift-authentication/oauth-openshift-657494565c-sbplh" Oct 01 15:07:14 crc kubenswrapper[4869]: I1001 15:07:14.804142 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/a380beb5-c27d-4b68-8e4b-e863a4771a66-v4-0-config-system-router-certs\") pod \"oauth-openshift-657494565c-sbplh\" (UID: \"a380beb5-c27d-4b68-8e4b-e863a4771a66\") " pod="openshift-authentication/oauth-openshift-657494565c-sbplh" Oct 01 15:07:14 crc kubenswrapper[4869]: I1001 15:07:14.804443 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/a380beb5-c27d-4b68-8e4b-e863a4771a66-v4-0-config-system-serving-cert\") pod \"oauth-openshift-657494565c-sbplh\" (UID: \"a380beb5-c27d-4b68-8e4b-e863a4771a66\") " pod="openshift-authentication/oauth-openshift-657494565c-sbplh" Oct 01 15:07:14 crc kubenswrapper[4869]: I1001 15:07:14.806738 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/a380beb5-c27d-4b68-8e4b-e863a4771a66-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-657494565c-sbplh\" (UID: \"a380beb5-c27d-4b68-8e4b-e863a4771a66\") " pod="openshift-authentication/oauth-openshift-657494565c-sbplh" Oct 01 15:07:14 crc kubenswrapper[4869]: I1001 15:07:14.819520 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8zjgm\" (UniqueName: \"kubernetes.io/projected/a380beb5-c27d-4b68-8e4b-e863a4771a66-kube-api-access-8zjgm\") pod \"oauth-openshift-657494565c-sbplh\" (UID: \"a380beb5-c27d-4b68-8e4b-e863a4771a66\") " pod="openshift-authentication/oauth-openshift-657494565c-sbplh" Oct 01 15:07:14 crc kubenswrapper[4869]: I1001 15:07:14.851982 4869 generic.go:334] "Generic (PLEG): container finished" podID="6ee4ec07-4fc1-4d3e-9b21-e111b2950c5a" containerID="afb576a00a14d022036fb4e7d4e78b0b6487da741f958ca836ccc7510556a41b" exitCode=0 Oct 01 15:07:14 crc kubenswrapper[4869]: I1001 15:07:14.852028 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-8mczm" event={"ID":"6ee4ec07-4fc1-4d3e-9b21-e111b2950c5a","Type":"ContainerDied","Data":"afb576a00a14d022036fb4e7d4e78b0b6487da741f958ca836ccc7510556a41b"} Oct 01 15:07:14 crc kubenswrapper[4869]: I1001 15:07:14.852072 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-8mczm" Oct 01 15:07:14 crc kubenswrapper[4869]: I1001 15:07:14.852100 4869 scope.go:117] "RemoveContainer" containerID="afb576a00a14d022036fb4e7d4e78b0b6487da741f958ca836ccc7510556a41b" Oct 01 15:07:14 crc kubenswrapper[4869]: I1001 15:07:14.852083 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-8mczm" event={"ID":"6ee4ec07-4fc1-4d3e-9b21-e111b2950c5a","Type":"ContainerDied","Data":"c2ed8b6f10028dabfe097ee76619aa500fc8ff4342fb9d2ba5a40f30e2a92b21"} Oct 01 15:07:14 crc kubenswrapper[4869]: I1001 15:07:14.873452 4869 scope.go:117] "RemoveContainer" containerID="afb576a00a14d022036fb4e7d4e78b0b6487da741f958ca836ccc7510556a41b" Oct 01 15:07:14 crc kubenswrapper[4869]: E1001 15:07:14.873951 4869 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"afb576a00a14d022036fb4e7d4e78b0b6487da741f958ca836ccc7510556a41b\": container with ID starting with afb576a00a14d022036fb4e7d4e78b0b6487da741f958ca836ccc7510556a41b not found: ID does not exist" containerID="afb576a00a14d022036fb4e7d4e78b0b6487da741f958ca836ccc7510556a41b" Oct 01 15:07:14 crc kubenswrapper[4869]: I1001 15:07:14.873995 4869 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"afb576a00a14d022036fb4e7d4e78b0b6487da741f958ca836ccc7510556a41b"} err="failed to get container status \"afb576a00a14d022036fb4e7d4e78b0b6487da741f958ca836ccc7510556a41b\": rpc error: code = NotFound desc = could not find container \"afb576a00a14d022036fb4e7d4e78b0b6487da741f958ca836ccc7510556a41b\": container with ID starting with afb576a00a14d022036fb4e7d4e78b0b6487da741f958ca836ccc7510556a41b not found: ID does not exist" Oct 01 15:07:14 crc kubenswrapper[4869]: I1001 15:07:14.889440 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-657494565c-sbplh" Oct 01 15:07:14 crc kubenswrapper[4869]: I1001 15:07:14.892943 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-8mczm"] Oct 01 15:07:14 crc kubenswrapper[4869]: I1001 15:07:14.895934 4869 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-8mczm"] Oct 01 15:07:15 crc kubenswrapper[4869]: I1001 15:07:15.144368 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-657494565c-sbplh"] Oct 01 15:07:15 crc kubenswrapper[4869]: I1001 15:07:15.597903 4869 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6ee4ec07-4fc1-4d3e-9b21-e111b2950c5a" path="/var/lib/kubelet/pods/6ee4ec07-4fc1-4d3e-9b21-e111b2950c5a/volumes" Oct 01 15:07:15 crc kubenswrapper[4869]: I1001 15:07:15.861611 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-657494565c-sbplh" event={"ID":"a380beb5-c27d-4b68-8e4b-e863a4771a66","Type":"ContainerStarted","Data":"461993a4321b1c7e73e9552da10128abc265570dda96018d7e26854e12283fb2"} Oct 01 15:07:15 crc kubenswrapper[4869]: I1001 15:07:15.861656 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-657494565c-sbplh" event={"ID":"a380beb5-c27d-4b68-8e4b-e863a4771a66","Type":"ContainerStarted","Data":"8c96bd625ec03b09496c22d3d343e6a89984e0114e4dadce21c1e9618dad6a3d"} Oct 01 15:07:15 crc kubenswrapper[4869]: I1001 15:07:15.861951 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-authentication/oauth-openshift-657494565c-sbplh" Oct 01 15:07:15 crc kubenswrapper[4869]: I1001 15:07:15.886630 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-authentication/oauth-openshift-657494565c-sbplh" podStartSLOduration=26.886604537 podStartE2EDuration="26.886604537s" podCreationTimestamp="2025-10-01 15:06:49 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-01 15:07:15.883581317 +0000 UTC m=+145.030424453" watchObservedRunningTime="2025-10-01 15:07:15.886604537 +0000 UTC m=+145.033447683" Oct 01 15:07:16 crc kubenswrapper[4869]: I1001 15:07:16.339403 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-authentication/oauth-openshift-657494565c-sbplh" Oct 01 15:07:27 crc kubenswrapper[4869]: I1001 15:07:27.793873 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-262k2"] Oct 01 15:07:27 crc kubenswrapper[4869]: I1001 15:07:27.794588 4869 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-262k2" podUID="f6f8609b-7f80-4b8f-a371-bb1d10396a15" containerName="registry-server" containerID="cri-o://eeab3bcbb7bf7e2eca7c12d433f5c81e2680737975733fdcd5ec24c251ff68bd" gracePeriod=30 Oct 01 15:07:27 crc kubenswrapper[4869]: I1001 15:07:27.805600 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-qf2zt"] Oct 01 15:07:27 crc kubenswrapper[4869]: I1001 15:07:27.805898 4869 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-qf2zt" podUID="87c849d7-d613-446d-9f2f-bdcf6da7e4e6" containerName="registry-server" containerID="cri-o://f6a51d050f74f3ce6a16bba6da88dba863231f0f8ac6c9bbea228ebf6335f69e" gracePeriod=30 Oct 01 15:07:27 crc kubenswrapper[4869]: I1001 15:07:27.816736 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-t5kll"] Oct 01 15:07:27 crc kubenswrapper[4869]: I1001 15:07:27.817385 4869 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/marketplace-operator-79b997595-t5kll" podUID="2d195bae-5172-4387-869b-086f215963ff" containerName="marketplace-operator" containerID="cri-o://aa96b28348f96b32cfacf6e83e427c3bff779f8a42f91f2b49d611ddf30525f5" gracePeriod=30 Oct 01 15:07:27 crc kubenswrapper[4869]: I1001 15:07:27.832511 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-bcf4v"] Oct 01 15:07:27 crc kubenswrapper[4869]: I1001 15:07:27.832780 4869 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-bcf4v" podUID="1b0f8e72-9451-4c28-8e39-9e8c94096b80" containerName="registry-server" containerID="cri-o://07720c3cd6704525796dcb655526f3093a28bf2533a62fd574ad80e771ef0ce5" gracePeriod=30 Oct 01 15:07:27 crc kubenswrapper[4869]: I1001 15:07:27.836123 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-fqbbl"] Oct 01 15:07:27 crc kubenswrapper[4869]: I1001 15:07:27.836838 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-fqbbl" Oct 01 15:07:27 crc kubenswrapper[4869]: I1001 15:07:27.848629 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-xvckx"] Oct 01 15:07:27 crc kubenswrapper[4869]: I1001 15:07:27.848937 4869 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-xvckx" podUID="3d37704b-7106-4dcc-b91a-1de81e03d6a9" containerName="registry-server" containerID="cri-o://cfdfde00cc688d2f87dc9212c3c234cbe19f52fb5bc46b6aaf3b58cf03db6930" gracePeriod=30 Oct 01 15:07:27 crc kubenswrapper[4869]: I1001 15:07:27.851731 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-fqbbl"] Oct 01 15:07:27 crc kubenswrapper[4869]: I1001 15:07:27.928328 4869 generic.go:334] "Generic (PLEG): container finished" podID="87c849d7-d613-446d-9f2f-bdcf6da7e4e6" containerID="f6a51d050f74f3ce6a16bba6da88dba863231f0f8ac6c9bbea228ebf6335f69e" exitCode=0 Oct 01 15:07:27 crc kubenswrapper[4869]: I1001 15:07:27.928379 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-qf2zt" event={"ID":"87c849d7-d613-446d-9f2f-bdcf6da7e4e6","Type":"ContainerDied","Data":"f6a51d050f74f3ce6a16bba6da88dba863231f0f8ac6c9bbea228ebf6335f69e"} Oct 01 15:07:27 crc kubenswrapper[4869]: I1001 15:07:27.930057 4869 generic.go:334] "Generic (PLEG): container finished" podID="f6f8609b-7f80-4b8f-a371-bb1d10396a15" containerID="eeab3bcbb7bf7e2eca7c12d433f5c81e2680737975733fdcd5ec24c251ff68bd" exitCode=0 Oct 01 15:07:27 crc kubenswrapper[4869]: I1001 15:07:27.930081 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-262k2" event={"ID":"f6f8609b-7f80-4b8f-a371-bb1d10396a15","Type":"ContainerDied","Data":"eeab3bcbb7bf7e2eca7c12d433f5c81e2680737975733fdcd5ec24c251ff68bd"} Oct 01 15:07:27 crc kubenswrapper[4869]: I1001 15:07:27.969296 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/0674ba6e-99f1-494a-ab15-a852605f2d52-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-fqbbl\" (UID: \"0674ba6e-99f1-494a-ab15-a852605f2d52\") " pod="openshift-marketplace/marketplace-operator-79b997595-fqbbl" Oct 01 15:07:27 crc kubenswrapper[4869]: I1001 15:07:27.969729 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-m4nhp\" (UniqueName: \"kubernetes.io/projected/0674ba6e-99f1-494a-ab15-a852605f2d52-kube-api-access-m4nhp\") pod \"marketplace-operator-79b997595-fqbbl\" (UID: \"0674ba6e-99f1-494a-ab15-a852605f2d52\") " pod="openshift-marketplace/marketplace-operator-79b997595-fqbbl" Oct 01 15:07:27 crc kubenswrapper[4869]: I1001 15:07:27.969775 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/0674ba6e-99f1-494a-ab15-a852605f2d52-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-fqbbl\" (UID: \"0674ba6e-99f1-494a-ab15-a852605f2d52\") " pod="openshift-marketplace/marketplace-operator-79b997595-fqbbl" Oct 01 15:07:28 crc kubenswrapper[4869]: I1001 15:07:28.010174 4869 patch_prober.go:28] interesting pod/marketplace-operator-79b997595-t5kll container/marketplace-operator namespace/openshift-marketplace: Readiness probe status=failure output="Get \"http://10.217.0.21:8080/healthz\": dial tcp 10.217.0.21:8080: connect: connection refused" start-of-body= Oct 01 15:07:28 crc kubenswrapper[4869]: I1001 15:07:28.010232 4869 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-marketplace/marketplace-operator-79b997595-t5kll" podUID="2d195bae-5172-4387-869b-086f215963ff" containerName="marketplace-operator" probeResult="failure" output="Get \"http://10.217.0.21:8080/healthz\": dial tcp 10.217.0.21:8080: connect: connection refused" Oct 01 15:07:28 crc kubenswrapper[4869]: I1001 15:07:28.070400 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/0674ba6e-99f1-494a-ab15-a852605f2d52-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-fqbbl\" (UID: \"0674ba6e-99f1-494a-ab15-a852605f2d52\") " pod="openshift-marketplace/marketplace-operator-79b997595-fqbbl" Oct 01 15:07:28 crc kubenswrapper[4869]: I1001 15:07:28.070471 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-m4nhp\" (UniqueName: \"kubernetes.io/projected/0674ba6e-99f1-494a-ab15-a852605f2d52-kube-api-access-m4nhp\") pod \"marketplace-operator-79b997595-fqbbl\" (UID: \"0674ba6e-99f1-494a-ab15-a852605f2d52\") " pod="openshift-marketplace/marketplace-operator-79b997595-fqbbl" Oct 01 15:07:28 crc kubenswrapper[4869]: I1001 15:07:28.070517 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/0674ba6e-99f1-494a-ab15-a852605f2d52-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-fqbbl\" (UID: \"0674ba6e-99f1-494a-ab15-a852605f2d52\") " pod="openshift-marketplace/marketplace-operator-79b997595-fqbbl" Oct 01 15:07:28 crc kubenswrapper[4869]: I1001 15:07:28.072397 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/0674ba6e-99f1-494a-ab15-a852605f2d52-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-fqbbl\" (UID: \"0674ba6e-99f1-494a-ab15-a852605f2d52\") " pod="openshift-marketplace/marketplace-operator-79b997595-fqbbl" Oct 01 15:07:28 crc kubenswrapper[4869]: I1001 15:07:28.077955 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/0674ba6e-99f1-494a-ab15-a852605f2d52-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-fqbbl\" (UID: \"0674ba6e-99f1-494a-ab15-a852605f2d52\") " pod="openshift-marketplace/marketplace-operator-79b997595-fqbbl" Oct 01 15:07:28 crc kubenswrapper[4869]: I1001 15:07:28.090564 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-m4nhp\" (UniqueName: \"kubernetes.io/projected/0674ba6e-99f1-494a-ab15-a852605f2d52-kube-api-access-m4nhp\") pod \"marketplace-operator-79b997595-fqbbl\" (UID: \"0674ba6e-99f1-494a-ab15-a852605f2d52\") " pod="openshift-marketplace/marketplace-operator-79b997595-fqbbl" Oct 01 15:07:28 crc kubenswrapper[4869]: I1001 15:07:28.152773 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-fqbbl" Oct 01 15:07:28 crc kubenswrapper[4869]: I1001 15:07:28.201228 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-262k2" Oct 01 15:07:28 crc kubenswrapper[4869]: I1001 15:07:28.299278 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-qf2zt" Oct 01 15:07:28 crc kubenswrapper[4869]: I1001 15:07:28.313393 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-xvckx" Oct 01 15:07:28 crc kubenswrapper[4869]: I1001 15:07:28.361188 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-bcf4v" Oct 01 15:07:28 crc kubenswrapper[4869]: I1001 15:07:28.365176 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-t5kll" Oct 01 15:07:28 crc kubenswrapper[4869]: I1001 15:07:28.390756 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f6f8609b-7f80-4b8f-a371-bb1d10396a15-catalog-content\") pod \"f6f8609b-7f80-4b8f-a371-bb1d10396a15\" (UID: \"f6f8609b-7f80-4b8f-a371-bb1d10396a15\") " Oct 01 15:07:28 crc kubenswrapper[4869]: I1001 15:07:28.390800 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f6f8609b-7f80-4b8f-a371-bb1d10396a15-utilities\") pod \"f6f8609b-7f80-4b8f-a371-bb1d10396a15\" (UID: \"f6f8609b-7f80-4b8f-a371-bb1d10396a15\") " Oct 01 15:07:28 crc kubenswrapper[4869]: I1001 15:07:28.390947 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vbv92\" (UniqueName: \"kubernetes.io/projected/f6f8609b-7f80-4b8f-a371-bb1d10396a15-kube-api-access-vbv92\") pod \"f6f8609b-7f80-4b8f-a371-bb1d10396a15\" (UID: \"f6f8609b-7f80-4b8f-a371-bb1d10396a15\") " Oct 01 15:07:28 crc kubenswrapper[4869]: I1001 15:07:28.392114 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-f877n\" (UniqueName: \"kubernetes.io/projected/1b0f8e72-9451-4c28-8e39-9e8c94096b80-kube-api-access-f877n\") pod \"1b0f8e72-9451-4c28-8e39-9e8c94096b80\" (UID: \"1b0f8e72-9451-4c28-8e39-9e8c94096b80\") " Oct 01 15:07:28 crc kubenswrapper[4869]: I1001 15:07:28.392163 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-sk5qg\" (UniqueName: \"kubernetes.io/projected/2d195bae-5172-4387-869b-086f215963ff-kube-api-access-sk5qg\") pod \"2d195bae-5172-4387-869b-086f215963ff\" (UID: \"2d195bae-5172-4387-869b-086f215963ff\") " Oct 01 15:07:28 crc kubenswrapper[4869]: I1001 15:07:28.392571 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f6f8609b-7f80-4b8f-a371-bb1d10396a15-utilities" (OuterVolumeSpecName: "utilities") pod "f6f8609b-7f80-4b8f-a371-bb1d10396a15" (UID: "f6f8609b-7f80-4b8f-a371-bb1d10396a15"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 15:07:28 crc kubenswrapper[4869]: I1001 15:07:28.397326 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2d195bae-5172-4387-869b-086f215963ff-kube-api-access-sk5qg" (OuterVolumeSpecName: "kube-api-access-sk5qg") pod "2d195bae-5172-4387-869b-086f215963ff" (UID: "2d195bae-5172-4387-869b-086f215963ff"). InnerVolumeSpecName "kube-api-access-sk5qg". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 15:07:28 crc kubenswrapper[4869]: I1001 15:07:28.398421 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f6f8609b-7f80-4b8f-a371-bb1d10396a15-kube-api-access-vbv92" (OuterVolumeSpecName: "kube-api-access-vbv92") pod "f6f8609b-7f80-4b8f-a371-bb1d10396a15" (UID: "f6f8609b-7f80-4b8f-a371-bb1d10396a15"). InnerVolumeSpecName "kube-api-access-vbv92". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 15:07:28 crc kubenswrapper[4869]: I1001 15:07:28.398690 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1b0f8e72-9451-4c28-8e39-9e8c94096b80-kube-api-access-f877n" (OuterVolumeSpecName: "kube-api-access-f877n") pod "1b0f8e72-9451-4c28-8e39-9e8c94096b80" (UID: "1b0f8e72-9451-4c28-8e39-9e8c94096b80"). InnerVolumeSpecName "kube-api-access-f877n". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 15:07:28 crc kubenswrapper[4869]: I1001 15:07:28.442700 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f6f8609b-7f80-4b8f-a371-bb1d10396a15-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "f6f8609b-7f80-4b8f-a371-bb1d10396a15" (UID: "f6f8609b-7f80-4b8f-a371-bb1d10396a15"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 15:07:28 crc kubenswrapper[4869]: I1001 15:07:28.493163 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-h2sfg\" (UniqueName: \"kubernetes.io/projected/3d37704b-7106-4dcc-b91a-1de81e03d6a9-kube-api-access-h2sfg\") pod \"3d37704b-7106-4dcc-b91a-1de81e03d6a9\" (UID: \"3d37704b-7106-4dcc-b91a-1de81e03d6a9\") " Oct 01 15:07:28 crc kubenswrapper[4869]: I1001 15:07:28.493205 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/87c849d7-d613-446d-9f2f-bdcf6da7e4e6-catalog-content\") pod \"87c849d7-d613-446d-9f2f-bdcf6da7e4e6\" (UID: \"87c849d7-d613-446d-9f2f-bdcf6da7e4e6\") " Oct 01 15:07:28 crc kubenswrapper[4869]: I1001 15:07:28.493270 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gb7pd\" (UniqueName: \"kubernetes.io/projected/87c849d7-d613-446d-9f2f-bdcf6da7e4e6-kube-api-access-gb7pd\") pod \"87c849d7-d613-446d-9f2f-bdcf6da7e4e6\" (UID: \"87c849d7-d613-446d-9f2f-bdcf6da7e4e6\") " Oct 01 15:07:28 crc kubenswrapper[4869]: I1001 15:07:28.493351 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/2d195bae-5172-4387-869b-086f215963ff-marketplace-operator-metrics\") pod \"2d195bae-5172-4387-869b-086f215963ff\" (UID: \"2d195bae-5172-4387-869b-086f215963ff\") " Oct 01 15:07:28 crc kubenswrapper[4869]: I1001 15:07:28.493378 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3d37704b-7106-4dcc-b91a-1de81e03d6a9-catalog-content\") pod \"3d37704b-7106-4dcc-b91a-1de81e03d6a9\" (UID: \"3d37704b-7106-4dcc-b91a-1de81e03d6a9\") " Oct 01 15:07:28 crc kubenswrapper[4869]: I1001 15:07:28.493402 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1b0f8e72-9451-4c28-8e39-9e8c94096b80-catalog-content\") pod \"1b0f8e72-9451-4c28-8e39-9e8c94096b80\" (UID: \"1b0f8e72-9451-4c28-8e39-9e8c94096b80\") " Oct 01 15:07:28 crc kubenswrapper[4869]: I1001 15:07:28.493425 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1b0f8e72-9451-4c28-8e39-9e8c94096b80-utilities\") pod \"1b0f8e72-9451-4c28-8e39-9e8c94096b80\" (UID: \"1b0f8e72-9451-4c28-8e39-9e8c94096b80\") " Oct 01 15:07:28 crc kubenswrapper[4869]: I1001 15:07:28.493445 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/2d195bae-5172-4387-869b-086f215963ff-marketplace-trusted-ca\") pod \"2d195bae-5172-4387-869b-086f215963ff\" (UID: \"2d195bae-5172-4387-869b-086f215963ff\") " Oct 01 15:07:28 crc kubenswrapper[4869]: I1001 15:07:28.493460 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3d37704b-7106-4dcc-b91a-1de81e03d6a9-utilities\") pod \"3d37704b-7106-4dcc-b91a-1de81e03d6a9\" (UID: \"3d37704b-7106-4dcc-b91a-1de81e03d6a9\") " Oct 01 15:07:28 crc kubenswrapper[4869]: I1001 15:07:28.493479 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/87c849d7-d613-446d-9f2f-bdcf6da7e4e6-utilities\") pod \"87c849d7-d613-446d-9f2f-bdcf6da7e4e6\" (UID: \"87c849d7-d613-446d-9f2f-bdcf6da7e4e6\") " Oct 01 15:07:28 crc kubenswrapper[4869]: I1001 15:07:28.493649 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-f877n\" (UniqueName: \"kubernetes.io/projected/1b0f8e72-9451-4c28-8e39-9e8c94096b80-kube-api-access-f877n\") on node \"crc\" DevicePath \"\"" Oct 01 15:07:28 crc kubenswrapper[4869]: I1001 15:07:28.493661 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-sk5qg\" (UniqueName: \"kubernetes.io/projected/2d195bae-5172-4387-869b-086f215963ff-kube-api-access-sk5qg\") on node \"crc\" DevicePath \"\"" Oct 01 15:07:28 crc kubenswrapper[4869]: I1001 15:07:28.493671 4869 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f6f8609b-7f80-4b8f-a371-bb1d10396a15-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 01 15:07:28 crc kubenswrapper[4869]: I1001 15:07:28.493681 4869 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f6f8609b-7f80-4b8f-a371-bb1d10396a15-utilities\") on node \"crc\" DevicePath \"\"" Oct 01 15:07:28 crc kubenswrapper[4869]: I1001 15:07:28.493689 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vbv92\" (UniqueName: \"kubernetes.io/projected/f6f8609b-7f80-4b8f-a371-bb1d10396a15-kube-api-access-vbv92\") on node \"crc\" DevicePath \"\"" Oct 01 15:07:28 crc kubenswrapper[4869]: I1001 15:07:28.494675 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1b0f8e72-9451-4c28-8e39-9e8c94096b80-utilities" (OuterVolumeSpecName: "utilities") pod "1b0f8e72-9451-4c28-8e39-9e8c94096b80" (UID: "1b0f8e72-9451-4c28-8e39-9e8c94096b80"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 15:07:28 crc kubenswrapper[4869]: I1001 15:07:28.494745 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/3d37704b-7106-4dcc-b91a-1de81e03d6a9-utilities" (OuterVolumeSpecName: "utilities") pod "3d37704b-7106-4dcc-b91a-1de81e03d6a9" (UID: "3d37704b-7106-4dcc-b91a-1de81e03d6a9"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 15:07:28 crc kubenswrapper[4869]: I1001 15:07:28.495458 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2d195bae-5172-4387-869b-086f215963ff-marketplace-trusted-ca" (OuterVolumeSpecName: "marketplace-trusted-ca") pod "2d195bae-5172-4387-869b-086f215963ff" (UID: "2d195bae-5172-4387-869b-086f215963ff"). InnerVolumeSpecName "marketplace-trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 15:07:28 crc kubenswrapper[4869]: I1001 15:07:28.496062 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/87c849d7-d613-446d-9f2f-bdcf6da7e4e6-utilities" (OuterVolumeSpecName: "utilities") pod "87c849d7-d613-446d-9f2f-bdcf6da7e4e6" (UID: "87c849d7-d613-446d-9f2f-bdcf6da7e4e6"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 15:07:28 crc kubenswrapper[4869]: I1001 15:07:28.497405 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3d37704b-7106-4dcc-b91a-1de81e03d6a9-kube-api-access-h2sfg" (OuterVolumeSpecName: "kube-api-access-h2sfg") pod "3d37704b-7106-4dcc-b91a-1de81e03d6a9" (UID: "3d37704b-7106-4dcc-b91a-1de81e03d6a9"). InnerVolumeSpecName "kube-api-access-h2sfg". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 15:07:28 crc kubenswrapper[4869]: I1001 15:07:28.498375 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2d195bae-5172-4387-869b-086f215963ff-marketplace-operator-metrics" (OuterVolumeSpecName: "marketplace-operator-metrics") pod "2d195bae-5172-4387-869b-086f215963ff" (UID: "2d195bae-5172-4387-869b-086f215963ff"). InnerVolumeSpecName "marketplace-operator-metrics". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 15:07:28 crc kubenswrapper[4869]: I1001 15:07:28.498733 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/87c849d7-d613-446d-9f2f-bdcf6da7e4e6-kube-api-access-gb7pd" (OuterVolumeSpecName: "kube-api-access-gb7pd") pod "87c849d7-d613-446d-9f2f-bdcf6da7e4e6" (UID: "87c849d7-d613-446d-9f2f-bdcf6da7e4e6"). InnerVolumeSpecName "kube-api-access-gb7pd". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 15:07:28 crc kubenswrapper[4869]: I1001 15:07:28.509241 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1b0f8e72-9451-4c28-8e39-9e8c94096b80-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "1b0f8e72-9451-4c28-8e39-9e8c94096b80" (UID: "1b0f8e72-9451-4c28-8e39-9e8c94096b80"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 15:07:28 crc kubenswrapper[4869]: I1001 15:07:28.544217 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/87c849d7-d613-446d-9f2f-bdcf6da7e4e6-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "87c849d7-d613-446d-9f2f-bdcf6da7e4e6" (UID: "87c849d7-d613-446d-9f2f-bdcf6da7e4e6"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 15:07:28 crc kubenswrapper[4869]: I1001 15:07:28.575151 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/3d37704b-7106-4dcc-b91a-1de81e03d6a9-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "3d37704b-7106-4dcc-b91a-1de81e03d6a9" (UID: "3d37704b-7106-4dcc-b91a-1de81e03d6a9"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 15:07:28 crc kubenswrapper[4869]: I1001 15:07:28.595020 4869 reconciler_common.go:293] "Volume detached for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/2d195bae-5172-4387-869b-086f215963ff-marketplace-operator-metrics\") on node \"crc\" DevicePath \"\"" Oct 01 15:07:28 crc kubenswrapper[4869]: I1001 15:07:28.595048 4869 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3d37704b-7106-4dcc-b91a-1de81e03d6a9-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 01 15:07:28 crc kubenswrapper[4869]: I1001 15:07:28.595059 4869 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1b0f8e72-9451-4c28-8e39-9e8c94096b80-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 01 15:07:28 crc kubenswrapper[4869]: I1001 15:07:28.595069 4869 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1b0f8e72-9451-4c28-8e39-9e8c94096b80-utilities\") on node \"crc\" DevicePath \"\"" Oct 01 15:07:28 crc kubenswrapper[4869]: I1001 15:07:28.595079 4869 reconciler_common.go:293] "Volume detached for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/2d195bae-5172-4387-869b-086f215963ff-marketplace-trusted-ca\") on node \"crc\" DevicePath \"\"" Oct 01 15:07:28 crc kubenswrapper[4869]: I1001 15:07:28.595088 4869 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3d37704b-7106-4dcc-b91a-1de81e03d6a9-utilities\") on node \"crc\" DevicePath \"\"" Oct 01 15:07:28 crc kubenswrapper[4869]: I1001 15:07:28.595098 4869 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/87c849d7-d613-446d-9f2f-bdcf6da7e4e6-utilities\") on node \"crc\" DevicePath \"\"" Oct 01 15:07:28 crc kubenswrapper[4869]: I1001 15:07:28.595108 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-h2sfg\" (UniqueName: \"kubernetes.io/projected/3d37704b-7106-4dcc-b91a-1de81e03d6a9-kube-api-access-h2sfg\") on node \"crc\" DevicePath \"\"" Oct 01 15:07:28 crc kubenswrapper[4869]: I1001 15:07:28.595116 4869 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/87c849d7-d613-446d-9f2f-bdcf6da7e4e6-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 01 15:07:28 crc kubenswrapper[4869]: I1001 15:07:28.595127 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gb7pd\" (UniqueName: \"kubernetes.io/projected/87c849d7-d613-446d-9f2f-bdcf6da7e4e6-kube-api-access-gb7pd\") on node \"crc\" DevicePath \"\"" Oct 01 15:07:28 crc kubenswrapper[4869]: I1001 15:07:28.638968 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-fqbbl"] Oct 01 15:07:28 crc kubenswrapper[4869]: W1001 15:07:28.647469 4869 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod0674ba6e_99f1_494a_ab15_a852605f2d52.slice/crio-360a467c73cb2f1061c77326c26758aff839d59ee018d302d72179d159d2a54e WatchSource:0}: Error finding container 360a467c73cb2f1061c77326c26758aff839d59ee018d302d72179d159d2a54e: Status 404 returned error can't find the container with id 360a467c73cb2f1061c77326c26758aff839d59ee018d302d72179d159d2a54e Oct 01 15:07:28 crc kubenswrapper[4869]: I1001 15:07:28.938094 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-262k2" event={"ID":"f6f8609b-7f80-4b8f-a371-bb1d10396a15","Type":"ContainerDied","Data":"fc8de042cac333c18ee3d6e072bd6281ec19b61a76f5ac6e56f77b905b7a2251"} Oct 01 15:07:28 crc kubenswrapper[4869]: I1001 15:07:28.938666 4869 scope.go:117] "RemoveContainer" containerID="eeab3bcbb7bf7e2eca7c12d433f5c81e2680737975733fdcd5ec24c251ff68bd" Oct 01 15:07:28 crc kubenswrapper[4869]: I1001 15:07:28.938709 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-262k2" Oct 01 15:07:28 crc kubenswrapper[4869]: I1001 15:07:28.942343 4869 generic.go:334] "Generic (PLEG): container finished" podID="1b0f8e72-9451-4c28-8e39-9e8c94096b80" containerID="07720c3cd6704525796dcb655526f3093a28bf2533a62fd574ad80e771ef0ce5" exitCode=0 Oct 01 15:07:28 crc kubenswrapper[4869]: I1001 15:07:28.942390 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-bcf4v" event={"ID":"1b0f8e72-9451-4c28-8e39-9e8c94096b80","Type":"ContainerDied","Data":"07720c3cd6704525796dcb655526f3093a28bf2533a62fd574ad80e771ef0ce5"} Oct 01 15:07:28 crc kubenswrapper[4869]: I1001 15:07:28.942407 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-bcf4v" event={"ID":"1b0f8e72-9451-4c28-8e39-9e8c94096b80","Type":"ContainerDied","Data":"6a00783b2c2395eb51be85a38eaf093df51f3a6cf804a95bce08fbdae35d553c"} Oct 01 15:07:28 crc kubenswrapper[4869]: I1001 15:07:28.942470 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-bcf4v" Oct 01 15:07:28 crc kubenswrapper[4869]: I1001 15:07:28.946094 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-qf2zt" event={"ID":"87c849d7-d613-446d-9f2f-bdcf6da7e4e6","Type":"ContainerDied","Data":"443568e766c749eb77d704ea5f0cd76ba1cda514ae736ea92dbc8c8a8f57c687"} Oct 01 15:07:28 crc kubenswrapper[4869]: I1001 15:07:28.946218 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-qf2zt" Oct 01 15:07:28 crc kubenswrapper[4869]: I1001 15:07:28.953705 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-fqbbl" event={"ID":"0674ba6e-99f1-494a-ab15-a852605f2d52","Type":"ContainerStarted","Data":"32c6d0ddc8e3ddbc027154abdc9193748490b70b15a9f611c1593c6ff5c2d8c2"} Oct 01 15:07:28 crc kubenswrapper[4869]: I1001 15:07:28.953761 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-fqbbl" event={"ID":"0674ba6e-99f1-494a-ab15-a852605f2d52","Type":"ContainerStarted","Data":"360a467c73cb2f1061c77326c26758aff839d59ee018d302d72179d159d2a54e"} Oct 01 15:07:28 crc kubenswrapper[4869]: I1001 15:07:28.953882 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/marketplace-operator-79b997595-fqbbl" Oct 01 15:07:28 crc kubenswrapper[4869]: I1001 15:07:28.955113 4869 generic.go:334] "Generic (PLEG): container finished" podID="2d195bae-5172-4387-869b-086f215963ff" containerID="aa96b28348f96b32cfacf6e83e427c3bff779f8a42f91f2b49d611ddf30525f5" exitCode=0 Oct 01 15:07:28 crc kubenswrapper[4869]: I1001 15:07:28.955165 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-t5kll" event={"ID":"2d195bae-5172-4387-869b-086f215963ff","Type":"ContainerDied","Data":"aa96b28348f96b32cfacf6e83e427c3bff779f8a42f91f2b49d611ddf30525f5"} Oct 01 15:07:28 crc kubenswrapper[4869]: I1001 15:07:28.955188 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-t5kll" event={"ID":"2d195bae-5172-4387-869b-086f215963ff","Type":"ContainerDied","Data":"c941b6d52eb8b5a42fa91ad19317a29a2027c4fa6f57538ec5a3e9334c13b07c"} Oct 01 15:07:28 crc kubenswrapper[4869]: I1001 15:07:28.955289 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-t5kll" Oct 01 15:07:28 crc kubenswrapper[4869]: I1001 15:07:28.956678 4869 patch_prober.go:28] interesting pod/marketplace-operator-79b997595-fqbbl container/marketplace-operator namespace/openshift-marketplace: Readiness probe status=failure output="Get \"http://10.217.0.55:8080/healthz\": dial tcp 10.217.0.55:8080: connect: connection refused" start-of-body= Oct 01 15:07:28 crc kubenswrapper[4869]: I1001 15:07:28.956732 4869 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-marketplace/marketplace-operator-79b997595-fqbbl" podUID="0674ba6e-99f1-494a-ab15-a852605f2d52" containerName="marketplace-operator" probeResult="failure" output="Get \"http://10.217.0.55:8080/healthz\": dial tcp 10.217.0.55:8080: connect: connection refused" Oct 01 15:07:28 crc kubenswrapper[4869]: I1001 15:07:28.963333 4869 generic.go:334] "Generic (PLEG): container finished" podID="3d37704b-7106-4dcc-b91a-1de81e03d6a9" containerID="cfdfde00cc688d2f87dc9212c3c234cbe19f52fb5bc46b6aaf3b58cf03db6930" exitCode=0 Oct 01 15:07:28 crc kubenswrapper[4869]: I1001 15:07:28.963379 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-xvckx" event={"ID":"3d37704b-7106-4dcc-b91a-1de81e03d6a9","Type":"ContainerDied","Data":"cfdfde00cc688d2f87dc9212c3c234cbe19f52fb5bc46b6aaf3b58cf03db6930"} Oct 01 15:07:28 crc kubenswrapper[4869]: I1001 15:07:28.963410 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-xvckx" event={"ID":"3d37704b-7106-4dcc-b91a-1de81e03d6a9","Type":"ContainerDied","Data":"8d4c57fe0e9ca89d859c76e89b0c9f4644a7c8ee8fe224ad4603a2904e641750"} Oct 01 15:07:28 crc kubenswrapper[4869]: I1001 15:07:28.963482 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-xvckx" Oct 01 15:07:28 crc kubenswrapper[4869]: I1001 15:07:28.987039 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/marketplace-operator-79b997595-fqbbl" podStartSLOduration=1.987020535 podStartE2EDuration="1.987020535s" podCreationTimestamp="2025-10-01 15:07:27 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-01 15:07:28.984914969 +0000 UTC m=+158.131758285" watchObservedRunningTime="2025-10-01 15:07:28.987020535 +0000 UTC m=+158.133863651" Oct 01 15:07:29 crc kubenswrapper[4869]: I1001 15:07:29.004172 4869 scope.go:117] "RemoveContainer" containerID="70aa4e4d52e780ca00904740e4605f094164fc76a86417afff954c1f8ece8cd6" Oct 01 15:07:29 crc kubenswrapper[4869]: I1001 15:07:29.019613 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-qf2zt"] Oct 01 15:07:29 crc kubenswrapper[4869]: I1001 15:07:29.023609 4869 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-qf2zt"] Oct 01 15:07:29 crc kubenswrapper[4869]: I1001 15:07:29.031519 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-262k2"] Oct 01 15:07:29 crc kubenswrapper[4869]: I1001 15:07:29.035364 4869 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-262k2"] Oct 01 15:07:29 crc kubenswrapper[4869]: I1001 15:07:29.039732 4869 scope.go:117] "RemoveContainer" containerID="13586bd6881cf500aeb36a4b8acab801edf57b584a7848ff9784afcba0abad7f" Oct 01 15:07:29 crc kubenswrapper[4869]: I1001 15:07:29.046166 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-xvckx"] Oct 01 15:07:29 crc kubenswrapper[4869]: I1001 15:07:29.048549 4869 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-xvckx"] Oct 01 15:07:29 crc kubenswrapper[4869]: I1001 15:07:29.058783 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-t5kll"] Oct 01 15:07:29 crc kubenswrapper[4869]: I1001 15:07:29.060528 4869 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-t5kll"] Oct 01 15:07:29 crc kubenswrapper[4869]: I1001 15:07:29.061774 4869 scope.go:117] "RemoveContainer" containerID="07720c3cd6704525796dcb655526f3093a28bf2533a62fd574ad80e771ef0ce5" Oct 01 15:07:29 crc kubenswrapper[4869]: I1001 15:07:29.071186 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-bcf4v"] Oct 01 15:07:29 crc kubenswrapper[4869]: I1001 15:07:29.073445 4869 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-bcf4v"] Oct 01 15:07:29 crc kubenswrapper[4869]: I1001 15:07:29.086125 4869 scope.go:117] "RemoveContainer" containerID="0334bf3a4f0053cd94ddaeedb5dc71b8dcd4a3eafafdd7eb97ebe43d07d018a8" Oct 01 15:07:29 crc kubenswrapper[4869]: I1001 15:07:29.101587 4869 scope.go:117] "RemoveContainer" containerID="27b6aac3f6cbc1b4d61941e52a5acbe636a346e2e1b6089319caf8c4c0f7963b" Oct 01 15:07:29 crc kubenswrapper[4869]: I1001 15:07:29.121863 4869 scope.go:117] "RemoveContainer" containerID="07720c3cd6704525796dcb655526f3093a28bf2533a62fd574ad80e771ef0ce5" Oct 01 15:07:29 crc kubenswrapper[4869]: E1001 15:07:29.122205 4869 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"07720c3cd6704525796dcb655526f3093a28bf2533a62fd574ad80e771ef0ce5\": container with ID starting with 07720c3cd6704525796dcb655526f3093a28bf2533a62fd574ad80e771ef0ce5 not found: ID does not exist" containerID="07720c3cd6704525796dcb655526f3093a28bf2533a62fd574ad80e771ef0ce5" Oct 01 15:07:29 crc kubenswrapper[4869]: I1001 15:07:29.122243 4869 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"07720c3cd6704525796dcb655526f3093a28bf2533a62fd574ad80e771ef0ce5"} err="failed to get container status \"07720c3cd6704525796dcb655526f3093a28bf2533a62fd574ad80e771ef0ce5\": rpc error: code = NotFound desc = could not find container \"07720c3cd6704525796dcb655526f3093a28bf2533a62fd574ad80e771ef0ce5\": container with ID starting with 07720c3cd6704525796dcb655526f3093a28bf2533a62fd574ad80e771ef0ce5 not found: ID does not exist" Oct 01 15:07:29 crc kubenswrapper[4869]: I1001 15:07:29.122286 4869 scope.go:117] "RemoveContainer" containerID="0334bf3a4f0053cd94ddaeedb5dc71b8dcd4a3eafafdd7eb97ebe43d07d018a8" Oct 01 15:07:29 crc kubenswrapper[4869]: E1001 15:07:29.122500 4869 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0334bf3a4f0053cd94ddaeedb5dc71b8dcd4a3eafafdd7eb97ebe43d07d018a8\": container with ID starting with 0334bf3a4f0053cd94ddaeedb5dc71b8dcd4a3eafafdd7eb97ebe43d07d018a8 not found: ID does not exist" containerID="0334bf3a4f0053cd94ddaeedb5dc71b8dcd4a3eafafdd7eb97ebe43d07d018a8" Oct 01 15:07:29 crc kubenswrapper[4869]: I1001 15:07:29.122528 4869 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0334bf3a4f0053cd94ddaeedb5dc71b8dcd4a3eafafdd7eb97ebe43d07d018a8"} err="failed to get container status \"0334bf3a4f0053cd94ddaeedb5dc71b8dcd4a3eafafdd7eb97ebe43d07d018a8\": rpc error: code = NotFound desc = could not find container \"0334bf3a4f0053cd94ddaeedb5dc71b8dcd4a3eafafdd7eb97ebe43d07d018a8\": container with ID starting with 0334bf3a4f0053cd94ddaeedb5dc71b8dcd4a3eafafdd7eb97ebe43d07d018a8 not found: ID does not exist" Oct 01 15:07:29 crc kubenswrapper[4869]: I1001 15:07:29.122545 4869 scope.go:117] "RemoveContainer" containerID="27b6aac3f6cbc1b4d61941e52a5acbe636a346e2e1b6089319caf8c4c0f7963b" Oct 01 15:07:29 crc kubenswrapper[4869]: E1001 15:07:29.122764 4869 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"27b6aac3f6cbc1b4d61941e52a5acbe636a346e2e1b6089319caf8c4c0f7963b\": container with ID starting with 27b6aac3f6cbc1b4d61941e52a5acbe636a346e2e1b6089319caf8c4c0f7963b not found: ID does not exist" containerID="27b6aac3f6cbc1b4d61941e52a5acbe636a346e2e1b6089319caf8c4c0f7963b" Oct 01 15:07:29 crc kubenswrapper[4869]: I1001 15:07:29.122794 4869 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"27b6aac3f6cbc1b4d61941e52a5acbe636a346e2e1b6089319caf8c4c0f7963b"} err="failed to get container status \"27b6aac3f6cbc1b4d61941e52a5acbe636a346e2e1b6089319caf8c4c0f7963b\": rpc error: code = NotFound desc = could not find container \"27b6aac3f6cbc1b4d61941e52a5acbe636a346e2e1b6089319caf8c4c0f7963b\": container with ID starting with 27b6aac3f6cbc1b4d61941e52a5acbe636a346e2e1b6089319caf8c4c0f7963b not found: ID does not exist" Oct 01 15:07:29 crc kubenswrapper[4869]: I1001 15:07:29.122814 4869 scope.go:117] "RemoveContainer" containerID="f6a51d050f74f3ce6a16bba6da88dba863231f0f8ac6c9bbea228ebf6335f69e" Oct 01 15:07:29 crc kubenswrapper[4869]: I1001 15:07:29.136505 4869 scope.go:117] "RemoveContainer" containerID="33f28d08c64f57316452d07d2498ce606408877e0e5441f222fdbd068848b0f3" Oct 01 15:07:29 crc kubenswrapper[4869]: I1001 15:07:29.151828 4869 scope.go:117] "RemoveContainer" containerID="2464ac7ae10423f08930a1b33f60af97c97e44ce0a6b6c6be1cc9b7ff7c6dbc4" Oct 01 15:07:29 crc kubenswrapper[4869]: I1001 15:07:29.166789 4869 scope.go:117] "RemoveContainer" containerID="aa96b28348f96b32cfacf6e83e427c3bff779f8a42f91f2b49d611ddf30525f5" Oct 01 15:07:29 crc kubenswrapper[4869]: I1001 15:07:29.179618 4869 scope.go:117] "RemoveContainer" containerID="aa96b28348f96b32cfacf6e83e427c3bff779f8a42f91f2b49d611ddf30525f5" Oct 01 15:07:29 crc kubenswrapper[4869]: E1001 15:07:29.180047 4869 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"aa96b28348f96b32cfacf6e83e427c3bff779f8a42f91f2b49d611ddf30525f5\": container with ID starting with aa96b28348f96b32cfacf6e83e427c3bff779f8a42f91f2b49d611ddf30525f5 not found: ID does not exist" containerID="aa96b28348f96b32cfacf6e83e427c3bff779f8a42f91f2b49d611ddf30525f5" Oct 01 15:07:29 crc kubenswrapper[4869]: I1001 15:07:29.180093 4869 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"aa96b28348f96b32cfacf6e83e427c3bff779f8a42f91f2b49d611ddf30525f5"} err="failed to get container status \"aa96b28348f96b32cfacf6e83e427c3bff779f8a42f91f2b49d611ddf30525f5\": rpc error: code = NotFound desc = could not find container \"aa96b28348f96b32cfacf6e83e427c3bff779f8a42f91f2b49d611ddf30525f5\": container with ID starting with aa96b28348f96b32cfacf6e83e427c3bff779f8a42f91f2b49d611ddf30525f5 not found: ID does not exist" Oct 01 15:07:29 crc kubenswrapper[4869]: I1001 15:07:29.180123 4869 scope.go:117] "RemoveContainer" containerID="cfdfde00cc688d2f87dc9212c3c234cbe19f52fb5bc46b6aaf3b58cf03db6930" Oct 01 15:07:29 crc kubenswrapper[4869]: I1001 15:07:29.196904 4869 scope.go:117] "RemoveContainer" containerID="a6ffeaa91d7cfa19714dbc69460541e49602f320ca1a1049a4f9936e9705d9cc" Oct 01 15:07:29 crc kubenswrapper[4869]: I1001 15:07:29.225037 4869 scope.go:117] "RemoveContainer" containerID="eff29f31cf7005108ec7269a92445ed7b765afa376cce5debe729f7aac109324" Oct 01 15:07:29 crc kubenswrapper[4869]: I1001 15:07:29.240414 4869 scope.go:117] "RemoveContainer" containerID="cfdfde00cc688d2f87dc9212c3c234cbe19f52fb5bc46b6aaf3b58cf03db6930" Oct 01 15:07:29 crc kubenswrapper[4869]: E1001 15:07:29.240783 4869 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"cfdfde00cc688d2f87dc9212c3c234cbe19f52fb5bc46b6aaf3b58cf03db6930\": container with ID starting with cfdfde00cc688d2f87dc9212c3c234cbe19f52fb5bc46b6aaf3b58cf03db6930 not found: ID does not exist" containerID="cfdfde00cc688d2f87dc9212c3c234cbe19f52fb5bc46b6aaf3b58cf03db6930" Oct 01 15:07:29 crc kubenswrapper[4869]: I1001 15:07:29.240826 4869 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"cfdfde00cc688d2f87dc9212c3c234cbe19f52fb5bc46b6aaf3b58cf03db6930"} err="failed to get container status \"cfdfde00cc688d2f87dc9212c3c234cbe19f52fb5bc46b6aaf3b58cf03db6930\": rpc error: code = NotFound desc = could not find container \"cfdfde00cc688d2f87dc9212c3c234cbe19f52fb5bc46b6aaf3b58cf03db6930\": container with ID starting with cfdfde00cc688d2f87dc9212c3c234cbe19f52fb5bc46b6aaf3b58cf03db6930 not found: ID does not exist" Oct 01 15:07:29 crc kubenswrapper[4869]: I1001 15:07:29.240854 4869 scope.go:117] "RemoveContainer" containerID="a6ffeaa91d7cfa19714dbc69460541e49602f320ca1a1049a4f9936e9705d9cc" Oct 01 15:07:29 crc kubenswrapper[4869]: E1001 15:07:29.241134 4869 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a6ffeaa91d7cfa19714dbc69460541e49602f320ca1a1049a4f9936e9705d9cc\": container with ID starting with a6ffeaa91d7cfa19714dbc69460541e49602f320ca1a1049a4f9936e9705d9cc not found: ID does not exist" containerID="a6ffeaa91d7cfa19714dbc69460541e49602f320ca1a1049a4f9936e9705d9cc" Oct 01 15:07:29 crc kubenswrapper[4869]: I1001 15:07:29.241166 4869 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a6ffeaa91d7cfa19714dbc69460541e49602f320ca1a1049a4f9936e9705d9cc"} err="failed to get container status \"a6ffeaa91d7cfa19714dbc69460541e49602f320ca1a1049a4f9936e9705d9cc\": rpc error: code = NotFound desc = could not find container \"a6ffeaa91d7cfa19714dbc69460541e49602f320ca1a1049a4f9936e9705d9cc\": container with ID starting with a6ffeaa91d7cfa19714dbc69460541e49602f320ca1a1049a4f9936e9705d9cc not found: ID does not exist" Oct 01 15:07:29 crc kubenswrapper[4869]: I1001 15:07:29.241188 4869 scope.go:117] "RemoveContainer" containerID="eff29f31cf7005108ec7269a92445ed7b765afa376cce5debe729f7aac109324" Oct 01 15:07:29 crc kubenswrapper[4869]: E1001 15:07:29.241823 4869 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"eff29f31cf7005108ec7269a92445ed7b765afa376cce5debe729f7aac109324\": container with ID starting with eff29f31cf7005108ec7269a92445ed7b765afa376cce5debe729f7aac109324 not found: ID does not exist" containerID="eff29f31cf7005108ec7269a92445ed7b765afa376cce5debe729f7aac109324" Oct 01 15:07:29 crc kubenswrapper[4869]: I1001 15:07:29.241845 4869 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"eff29f31cf7005108ec7269a92445ed7b765afa376cce5debe729f7aac109324"} err="failed to get container status \"eff29f31cf7005108ec7269a92445ed7b765afa376cce5debe729f7aac109324\": rpc error: code = NotFound desc = could not find container \"eff29f31cf7005108ec7269a92445ed7b765afa376cce5debe729f7aac109324\": container with ID starting with eff29f31cf7005108ec7269a92445ed7b765afa376cce5debe729f7aac109324 not found: ID does not exist" Oct 01 15:07:29 crc kubenswrapper[4869]: I1001 15:07:29.591134 4869 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1b0f8e72-9451-4c28-8e39-9e8c94096b80" path="/var/lib/kubelet/pods/1b0f8e72-9451-4c28-8e39-9e8c94096b80/volumes" Oct 01 15:07:29 crc kubenswrapper[4869]: I1001 15:07:29.593598 4869 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2d195bae-5172-4387-869b-086f215963ff" path="/var/lib/kubelet/pods/2d195bae-5172-4387-869b-086f215963ff/volumes" Oct 01 15:07:29 crc kubenswrapper[4869]: I1001 15:07:29.594233 4869 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3d37704b-7106-4dcc-b91a-1de81e03d6a9" path="/var/lib/kubelet/pods/3d37704b-7106-4dcc-b91a-1de81e03d6a9/volumes" Oct 01 15:07:29 crc kubenswrapper[4869]: I1001 15:07:29.595691 4869 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="87c849d7-d613-446d-9f2f-bdcf6da7e4e6" path="/var/lib/kubelet/pods/87c849d7-d613-446d-9f2f-bdcf6da7e4e6/volumes" Oct 01 15:07:29 crc kubenswrapper[4869]: I1001 15:07:29.596952 4869 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f6f8609b-7f80-4b8f-a371-bb1d10396a15" path="/var/lib/kubelet/pods/f6f8609b-7f80-4b8f-a371-bb1d10396a15/volumes" Oct 01 15:07:29 crc kubenswrapper[4869]: I1001 15:07:29.979235 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/marketplace-operator-79b997595-fqbbl" Oct 01 15:07:30 crc kubenswrapper[4869]: I1001 15:07:30.019325 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-r6jsz"] Oct 01 15:07:30 crc kubenswrapper[4869]: E1001 15:07:30.020952 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3d37704b-7106-4dcc-b91a-1de81e03d6a9" containerName="extract-utilities" Oct 01 15:07:30 crc kubenswrapper[4869]: I1001 15:07:30.021114 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="3d37704b-7106-4dcc-b91a-1de81e03d6a9" containerName="extract-utilities" Oct 01 15:07:30 crc kubenswrapper[4869]: E1001 15:07:30.021248 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1b0f8e72-9451-4c28-8e39-9e8c94096b80" containerName="extract-content" Oct 01 15:07:30 crc kubenswrapper[4869]: I1001 15:07:30.021405 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="1b0f8e72-9451-4c28-8e39-9e8c94096b80" containerName="extract-content" Oct 01 15:07:30 crc kubenswrapper[4869]: E1001 15:07:30.021533 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3d37704b-7106-4dcc-b91a-1de81e03d6a9" containerName="extract-content" Oct 01 15:07:30 crc kubenswrapper[4869]: I1001 15:07:30.021650 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="3d37704b-7106-4dcc-b91a-1de81e03d6a9" containerName="extract-content" Oct 01 15:07:30 crc kubenswrapper[4869]: E1001 15:07:30.021772 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1b0f8e72-9451-4c28-8e39-9e8c94096b80" containerName="registry-server" Oct 01 15:07:30 crc kubenswrapper[4869]: I1001 15:07:30.021890 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="1b0f8e72-9451-4c28-8e39-9e8c94096b80" containerName="registry-server" Oct 01 15:07:30 crc kubenswrapper[4869]: E1001 15:07:30.022058 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2d195bae-5172-4387-869b-086f215963ff" containerName="marketplace-operator" Oct 01 15:07:30 crc kubenswrapper[4869]: I1001 15:07:30.022166 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="2d195bae-5172-4387-869b-086f215963ff" containerName="marketplace-operator" Oct 01 15:07:30 crc kubenswrapper[4869]: E1001 15:07:30.022318 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="87c849d7-d613-446d-9f2f-bdcf6da7e4e6" containerName="extract-content" Oct 01 15:07:30 crc kubenswrapper[4869]: I1001 15:07:30.022430 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="87c849d7-d613-446d-9f2f-bdcf6da7e4e6" containerName="extract-content" Oct 01 15:07:30 crc kubenswrapper[4869]: E1001 15:07:30.022579 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f6f8609b-7f80-4b8f-a371-bb1d10396a15" containerName="extract-utilities" Oct 01 15:07:30 crc kubenswrapper[4869]: I1001 15:07:30.022703 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="f6f8609b-7f80-4b8f-a371-bb1d10396a15" containerName="extract-utilities" Oct 01 15:07:30 crc kubenswrapper[4869]: E1001 15:07:30.022816 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f6f8609b-7f80-4b8f-a371-bb1d10396a15" containerName="extract-content" Oct 01 15:07:30 crc kubenswrapper[4869]: I1001 15:07:30.022935 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="f6f8609b-7f80-4b8f-a371-bb1d10396a15" containerName="extract-content" Oct 01 15:07:30 crc kubenswrapper[4869]: E1001 15:07:30.023060 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f6f8609b-7f80-4b8f-a371-bb1d10396a15" containerName="registry-server" Oct 01 15:07:30 crc kubenswrapper[4869]: I1001 15:07:30.023177 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="f6f8609b-7f80-4b8f-a371-bb1d10396a15" containerName="registry-server" Oct 01 15:07:30 crc kubenswrapper[4869]: E1001 15:07:30.023334 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="87c849d7-d613-446d-9f2f-bdcf6da7e4e6" containerName="extract-utilities" Oct 01 15:07:30 crc kubenswrapper[4869]: I1001 15:07:30.023477 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="87c849d7-d613-446d-9f2f-bdcf6da7e4e6" containerName="extract-utilities" Oct 01 15:07:30 crc kubenswrapper[4869]: E1001 15:07:30.023601 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="87c849d7-d613-446d-9f2f-bdcf6da7e4e6" containerName="registry-server" Oct 01 15:07:30 crc kubenswrapper[4869]: I1001 15:07:30.023711 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="87c849d7-d613-446d-9f2f-bdcf6da7e4e6" containerName="registry-server" Oct 01 15:07:30 crc kubenswrapper[4869]: E1001 15:07:30.023842 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1b0f8e72-9451-4c28-8e39-9e8c94096b80" containerName="extract-utilities" Oct 01 15:07:30 crc kubenswrapper[4869]: I1001 15:07:30.023949 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="1b0f8e72-9451-4c28-8e39-9e8c94096b80" containerName="extract-utilities" Oct 01 15:07:30 crc kubenswrapper[4869]: E1001 15:07:30.024074 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3d37704b-7106-4dcc-b91a-1de81e03d6a9" containerName="registry-server" Oct 01 15:07:30 crc kubenswrapper[4869]: I1001 15:07:30.024184 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="3d37704b-7106-4dcc-b91a-1de81e03d6a9" containerName="registry-server" Oct 01 15:07:30 crc kubenswrapper[4869]: I1001 15:07:30.024647 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="1b0f8e72-9451-4c28-8e39-9e8c94096b80" containerName="registry-server" Oct 01 15:07:30 crc kubenswrapper[4869]: I1001 15:07:30.024801 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="3d37704b-7106-4dcc-b91a-1de81e03d6a9" containerName="registry-server" Oct 01 15:07:30 crc kubenswrapper[4869]: I1001 15:07:30.024933 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="f6f8609b-7f80-4b8f-a371-bb1d10396a15" containerName="registry-server" Oct 01 15:07:30 crc kubenswrapper[4869]: I1001 15:07:30.025054 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="87c849d7-d613-446d-9f2f-bdcf6da7e4e6" containerName="registry-server" Oct 01 15:07:30 crc kubenswrapper[4869]: I1001 15:07:30.025402 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="2d195bae-5172-4387-869b-086f215963ff" containerName="marketplace-operator" Oct 01 15:07:30 crc kubenswrapper[4869]: I1001 15:07:30.029917 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-r6jsz"] Oct 01 15:07:30 crc kubenswrapper[4869]: I1001 15:07:30.029983 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-r6jsz" Oct 01 15:07:30 crc kubenswrapper[4869]: I1001 15:07:30.032458 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"certified-operators-dockercfg-4rs5g" Oct 01 15:07:30 crc kubenswrapper[4869]: I1001 15:07:30.204823 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-clqzs"] Oct 01 15:07:30 crc kubenswrapper[4869]: I1001 15:07:30.206061 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-clqzs" Oct 01 15:07:30 crc kubenswrapper[4869]: I1001 15:07:30.207878 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"community-operators-dockercfg-dmngl" Oct 01 15:07:30 crc kubenswrapper[4869]: I1001 15:07:30.216463 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fddf94b6-3137-4043-bbfd-28ae6650fb5a-utilities\") pod \"certified-operators-r6jsz\" (UID: \"fddf94b6-3137-4043-bbfd-28ae6650fb5a\") " pod="openshift-marketplace/certified-operators-r6jsz" Oct 01 15:07:30 crc kubenswrapper[4869]: I1001 15:07:30.216533 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fddf94b6-3137-4043-bbfd-28ae6650fb5a-catalog-content\") pod \"certified-operators-r6jsz\" (UID: \"fddf94b6-3137-4043-bbfd-28ae6650fb5a\") " pod="openshift-marketplace/certified-operators-r6jsz" Oct 01 15:07:30 crc kubenswrapper[4869]: I1001 15:07:30.216586 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fwmwm\" (UniqueName: \"kubernetes.io/projected/fddf94b6-3137-4043-bbfd-28ae6650fb5a-kube-api-access-fwmwm\") pod \"certified-operators-r6jsz\" (UID: \"fddf94b6-3137-4043-bbfd-28ae6650fb5a\") " pod="openshift-marketplace/certified-operators-r6jsz" Oct 01 15:07:30 crc kubenswrapper[4869]: I1001 15:07:30.219032 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-clqzs"] Oct 01 15:07:30 crc kubenswrapper[4869]: I1001 15:07:30.317396 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fwmwm\" (UniqueName: \"kubernetes.io/projected/fddf94b6-3137-4043-bbfd-28ae6650fb5a-kube-api-access-fwmwm\") pod \"certified-operators-r6jsz\" (UID: \"fddf94b6-3137-4043-bbfd-28ae6650fb5a\") " pod="openshift-marketplace/certified-operators-r6jsz" Oct 01 15:07:30 crc kubenswrapper[4869]: I1001 15:07:30.317471 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9bae054d-44c8-41e1-9383-128da7767e72-utilities\") pod \"community-operators-clqzs\" (UID: \"9bae054d-44c8-41e1-9383-128da7767e72\") " pod="openshift-marketplace/community-operators-clqzs" Oct 01 15:07:30 crc kubenswrapper[4869]: I1001 15:07:30.317516 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9bae054d-44c8-41e1-9383-128da7767e72-catalog-content\") pod \"community-operators-clqzs\" (UID: \"9bae054d-44c8-41e1-9383-128da7767e72\") " pod="openshift-marketplace/community-operators-clqzs" Oct 01 15:07:30 crc kubenswrapper[4869]: I1001 15:07:30.317588 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fddf94b6-3137-4043-bbfd-28ae6650fb5a-utilities\") pod \"certified-operators-r6jsz\" (UID: \"fddf94b6-3137-4043-bbfd-28ae6650fb5a\") " pod="openshift-marketplace/certified-operators-r6jsz" Oct 01 15:07:30 crc kubenswrapper[4869]: I1001 15:07:30.317664 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fddf94b6-3137-4043-bbfd-28ae6650fb5a-catalog-content\") pod \"certified-operators-r6jsz\" (UID: \"fddf94b6-3137-4043-bbfd-28ae6650fb5a\") " pod="openshift-marketplace/certified-operators-r6jsz" Oct 01 15:07:30 crc kubenswrapper[4869]: I1001 15:07:30.317707 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-l8ctq\" (UniqueName: \"kubernetes.io/projected/9bae054d-44c8-41e1-9383-128da7767e72-kube-api-access-l8ctq\") pod \"community-operators-clqzs\" (UID: \"9bae054d-44c8-41e1-9383-128da7767e72\") " pod="openshift-marketplace/community-operators-clqzs" Oct 01 15:07:30 crc kubenswrapper[4869]: I1001 15:07:30.318767 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fddf94b6-3137-4043-bbfd-28ae6650fb5a-utilities\") pod \"certified-operators-r6jsz\" (UID: \"fddf94b6-3137-4043-bbfd-28ae6650fb5a\") " pod="openshift-marketplace/certified-operators-r6jsz" Oct 01 15:07:30 crc kubenswrapper[4869]: I1001 15:07:30.319424 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fddf94b6-3137-4043-bbfd-28ae6650fb5a-catalog-content\") pod \"certified-operators-r6jsz\" (UID: \"fddf94b6-3137-4043-bbfd-28ae6650fb5a\") " pod="openshift-marketplace/certified-operators-r6jsz" Oct 01 15:07:30 crc kubenswrapper[4869]: I1001 15:07:30.339011 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fwmwm\" (UniqueName: \"kubernetes.io/projected/fddf94b6-3137-4043-bbfd-28ae6650fb5a-kube-api-access-fwmwm\") pod \"certified-operators-r6jsz\" (UID: \"fddf94b6-3137-4043-bbfd-28ae6650fb5a\") " pod="openshift-marketplace/certified-operators-r6jsz" Oct 01 15:07:30 crc kubenswrapper[4869]: I1001 15:07:30.347022 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-r6jsz" Oct 01 15:07:30 crc kubenswrapper[4869]: I1001 15:07:30.419056 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-l8ctq\" (UniqueName: \"kubernetes.io/projected/9bae054d-44c8-41e1-9383-128da7767e72-kube-api-access-l8ctq\") pod \"community-operators-clqzs\" (UID: \"9bae054d-44c8-41e1-9383-128da7767e72\") " pod="openshift-marketplace/community-operators-clqzs" Oct 01 15:07:30 crc kubenswrapper[4869]: I1001 15:07:30.419124 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9bae054d-44c8-41e1-9383-128da7767e72-utilities\") pod \"community-operators-clqzs\" (UID: \"9bae054d-44c8-41e1-9383-128da7767e72\") " pod="openshift-marketplace/community-operators-clqzs" Oct 01 15:07:30 crc kubenswrapper[4869]: I1001 15:07:30.419182 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9bae054d-44c8-41e1-9383-128da7767e72-catalog-content\") pod \"community-operators-clqzs\" (UID: \"9bae054d-44c8-41e1-9383-128da7767e72\") " pod="openshift-marketplace/community-operators-clqzs" Oct 01 15:07:30 crc kubenswrapper[4869]: I1001 15:07:30.419654 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9bae054d-44c8-41e1-9383-128da7767e72-catalog-content\") pod \"community-operators-clqzs\" (UID: \"9bae054d-44c8-41e1-9383-128da7767e72\") " pod="openshift-marketplace/community-operators-clqzs" Oct 01 15:07:30 crc kubenswrapper[4869]: I1001 15:07:30.419834 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9bae054d-44c8-41e1-9383-128da7767e72-utilities\") pod \"community-operators-clqzs\" (UID: \"9bae054d-44c8-41e1-9383-128da7767e72\") " pod="openshift-marketplace/community-operators-clqzs" Oct 01 15:07:30 crc kubenswrapper[4869]: I1001 15:07:30.435937 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-l8ctq\" (UniqueName: \"kubernetes.io/projected/9bae054d-44c8-41e1-9383-128da7767e72-kube-api-access-l8ctq\") pod \"community-operators-clqzs\" (UID: \"9bae054d-44c8-41e1-9383-128da7767e72\") " pod="openshift-marketplace/community-operators-clqzs" Oct 01 15:07:30 crc kubenswrapper[4869]: I1001 15:07:30.536548 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-clqzs" Oct 01 15:07:30 crc kubenswrapper[4869]: I1001 15:07:30.539615 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-r6jsz"] Oct 01 15:07:30 crc kubenswrapper[4869]: I1001 15:07:30.735758 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-clqzs"] Oct 01 15:07:30 crc kubenswrapper[4869]: W1001 15:07:30.748298 4869 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod9bae054d_44c8_41e1_9383_128da7767e72.slice/crio-0e9dfa4a7cb7c6635330d4403e1661a452a8ea7acc8a49324026b504e7dac374 WatchSource:0}: Error finding container 0e9dfa4a7cb7c6635330d4403e1661a452a8ea7acc8a49324026b504e7dac374: Status 404 returned error can't find the container with id 0e9dfa4a7cb7c6635330d4403e1661a452a8ea7acc8a49324026b504e7dac374 Oct 01 15:07:30 crc kubenswrapper[4869]: I1001 15:07:30.984695 4869 generic.go:334] "Generic (PLEG): container finished" podID="fddf94b6-3137-4043-bbfd-28ae6650fb5a" containerID="787980efe2dcf822fc4b37e382881a56c6d9c274f4b89c637b6d5db49ebce5c4" exitCode=0 Oct 01 15:07:30 crc kubenswrapper[4869]: I1001 15:07:30.984811 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-r6jsz" event={"ID":"fddf94b6-3137-4043-bbfd-28ae6650fb5a","Type":"ContainerDied","Data":"787980efe2dcf822fc4b37e382881a56c6d9c274f4b89c637b6d5db49ebce5c4"} Oct 01 15:07:30 crc kubenswrapper[4869]: I1001 15:07:30.985094 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-r6jsz" event={"ID":"fddf94b6-3137-4043-bbfd-28ae6650fb5a","Type":"ContainerStarted","Data":"9f4e2ebbf9e5dc59c589a9bd6af34811abca5d28a3ea49c020bf67a52d5ee2b9"} Oct 01 15:07:30 crc kubenswrapper[4869]: I1001 15:07:30.987688 4869 generic.go:334] "Generic (PLEG): container finished" podID="9bae054d-44c8-41e1-9383-128da7767e72" containerID="3f63e9fabbfdd18845b71b5b0464cd69f4c982643751a005150b193da13c96b0" exitCode=0 Oct 01 15:07:30 crc kubenswrapper[4869]: I1001 15:07:30.989288 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-clqzs" event={"ID":"9bae054d-44c8-41e1-9383-128da7767e72","Type":"ContainerDied","Data":"3f63e9fabbfdd18845b71b5b0464cd69f4c982643751a005150b193da13c96b0"} Oct 01 15:07:30 crc kubenswrapper[4869]: I1001 15:07:30.989309 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-clqzs" event={"ID":"9bae054d-44c8-41e1-9383-128da7767e72","Type":"ContainerStarted","Data":"0e9dfa4a7cb7c6635330d4403e1661a452a8ea7acc8a49324026b504e7dac374"} Oct 01 15:07:32 crc kubenswrapper[4869]: I1001 15:07:31.999905 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-clqzs" event={"ID":"9bae054d-44c8-41e1-9383-128da7767e72","Type":"ContainerStarted","Data":"7e08be4c58f81592e8bca0ae481381faf5e736f9c25f2d3ef3e2a4df579f8c72"} Oct 01 15:07:32 crc kubenswrapper[4869]: I1001 15:07:32.402875 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-zqdck"] Oct 01 15:07:32 crc kubenswrapper[4869]: I1001 15:07:32.403916 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-zqdck" Oct 01 15:07:32 crc kubenswrapper[4869]: I1001 15:07:32.405952 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-marketplace-dockercfg-x2ctb" Oct 01 15:07:32 crc kubenswrapper[4869]: I1001 15:07:32.414376 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-zqdck"] Oct 01 15:07:32 crc kubenswrapper[4869]: I1001 15:07:32.544782 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1ec94678-9587-4f84-b2ce-745d0321216b-utilities\") pod \"redhat-marketplace-zqdck\" (UID: \"1ec94678-9587-4f84-b2ce-745d0321216b\") " pod="openshift-marketplace/redhat-marketplace-zqdck" Oct 01 15:07:32 crc kubenswrapper[4869]: I1001 15:07:32.545123 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1ec94678-9587-4f84-b2ce-745d0321216b-catalog-content\") pod \"redhat-marketplace-zqdck\" (UID: \"1ec94678-9587-4f84-b2ce-745d0321216b\") " pod="openshift-marketplace/redhat-marketplace-zqdck" Oct 01 15:07:32 crc kubenswrapper[4869]: I1001 15:07:32.545179 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-m9q65\" (UniqueName: \"kubernetes.io/projected/1ec94678-9587-4f84-b2ce-745d0321216b-kube-api-access-m9q65\") pod \"redhat-marketplace-zqdck\" (UID: \"1ec94678-9587-4f84-b2ce-745d0321216b\") " pod="openshift-marketplace/redhat-marketplace-zqdck" Oct 01 15:07:32 crc kubenswrapper[4869]: I1001 15:07:32.604809 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-gxmfx"] Oct 01 15:07:32 crc kubenswrapper[4869]: I1001 15:07:32.605899 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-gxmfx" Oct 01 15:07:32 crc kubenswrapper[4869]: I1001 15:07:32.610867 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-operators-dockercfg-ct8rh" Oct 01 15:07:32 crc kubenswrapper[4869]: I1001 15:07:32.629815 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-gxmfx"] Oct 01 15:07:32 crc kubenswrapper[4869]: I1001 15:07:32.646707 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1ec94678-9587-4f84-b2ce-745d0321216b-catalog-content\") pod \"redhat-marketplace-zqdck\" (UID: \"1ec94678-9587-4f84-b2ce-745d0321216b\") " pod="openshift-marketplace/redhat-marketplace-zqdck" Oct 01 15:07:32 crc kubenswrapper[4869]: I1001 15:07:32.646777 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-m9q65\" (UniqueName: \"kubernetes.io/projected/1ec94678-9587-4f84-b2ce-745d0321216b-kube-api-access-m9q65\") pod \"redhat-marketplace-zqdck\" (UID: \"1ec94678-9587-4f84-b2ce-745d0321216b\") " pod="openshift-marketplace/redhat-marketplace-zqdck" Oct 01 15:07:32 crc kubenswrapper[4869]: I1001 15:07:32.646808 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1ec94678-9587-4f84-b2ce-745d0321216b-utilities\") pod \"redhat-marketplace-zqdck\" (UID: \"1ec94678-9587-4f84-b2ce-745d0321216b\") " pod="openshift-marketplace/redhat-marketplace-zqdck" Oct 01 15:07:32 crc kubenswrapper[4869]: I1001 15:07:32.647244 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1ec94678-9587-4f84-b2ce-745d0321216b-utilities\") pod \"redhat-marketplace-zqdck\" (UID: \"1ec94678-9587-4f84-b2ce-745d0321216b\") " pod="openshift-marketplace/redhat-marketplace-zqdck" Oct 01 15:07:32 crc kubenswrapper[4869]: I1001 15:07:32.647274 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1ec94678-9587-4f84-b2ce-745d0321216b-catalog-content\") pod \"redhat-marketplace-zqdck\" (UID: \"1ec94678-9587-4f84-b2ce-745d0321216b\") " pod="openshift-marketplace/redhat-marketplace-zqdck" Oct 01 15:07:32 crc kubenswrapper[4869]: I1001 15:07:32.666144 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-m9q65\" (UniqueName: \"kubernetes.io/projected/1ec94678-9587-4f84-b2ce-745d0321216b-kube-api-access-m9q65\") pod \"redhat-marketplace-zqdck\" (UID: \"1ec94678-9587-4f84-b2ce-745d0321216b\") " pod="openshift-marketplace/redhat-marketplace-zqdck" Oct 01 15:07:32 crc kubenswrapper[4869]: I1001 15:07:32.733529 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-zqdck" Oct 01 15:07:32 crc kubenswrapper[4869]: I1001 15:07:32.747879 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-n7pbm\" (UniqueName: \"kubernetes.io/projected/0861897c-f37b-416a-bc83-7e72df955845-kube-api-access-n7pbm\") pod \"redhat-operators-gxmfx\" (UID: \"0861897c-f37b-416a-bc83-7e72df955845\") " pod="openshift-marketplace/redhat-operators-gxmfx" Oct 01 15:07:32 crc kubenswrapper[4869]: I1001 15:07:32.748142 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0861897c-f37b-416a-bc83-7e72df955845-catalog-content\") pod \"redhat-operators-gxmfx\" (UID: \"0861897c-f37b-416a-bc83-7e72df955845\") " pod="openshift-marketplace/redhat-operators-gxmfx" Oct 01 15:07:32 crc kubenswrapper[4869]: I1001 15:07:32.748353 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0861897c-f37b-416a-bc83-7e72df955845-utilities\") pod \"redhat-operators-gxmfx\" (UID: \"0861897c-f37b-416a-bc83-7e72df955845\") " pod="openshift-marketplace/redhat-operators-gxmfx" Oct 01 15:07:32 crc kubenswrapper[4869]: I1001 15:07:32.850138 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0861897c-f37b-416a-bc83-7e72df955845-utilities\") pod \"redhat-operators-gxmfx\" (UID: \"0861897c-f37b-416a-bc83-7e72df955845\") " pod="openshift-marketplace/redhat-operators-gxmfx" Oct 01 15:07:32 crc kubenswrapper[4869]: I1001 15:07:32.850448 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-n7pbm\" (UniqueName: \"kubernetes.io/projected/0861897c-f37b-416a-bc83-7e72df955845-kube-api-access-n7pbm\") pod \"redhat-operators-gxmfx\" (UID: \"0861897c-f37b-416a-bc83-7e72df955845\") " pod="openshift-marketplace/redhat-operators-gxmfx" Oct 01 15:07:32 crc kubenswrapper[4869]: I1001 15:07:32.850473 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0861897c-f37b-416a-bc83-7e72df955845-catalog-content\") pod \"redhat-operators-gxmfx\" (UID: \"0861897c-f37b-416a-bc83-7e72df955845\") " pod="openshift-marketplace/redhat-operators-gxmfx" Oct 01 15:07:32 crc kubenswrapper[4869]: I1001 15:07:32.850993 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0861897c-f37b-416a-bc83-7e72df955845-catalog-content\") pod \"redhat-operators-gxmfx\" (UID: \"0861897c-f37b-416a-bc83-7e72df955845\") " pod="openshift-marketplace/redhat-operators-gxmfx" Oct 01 15:07:32 crc kubenswrapper[4869]: I1001 15:07:32.851059 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0861897c-f37b-416a-bc83-7e72df955845-utilities\") pod \"redhat-operators-gxmfx\" (UID: \"0861897c-f37b-416a-bc83-7e72df955845\") " pod="openshift-marketplace/redhat-operators-gxmfx" Oct 01 15:07:32 crc kubenswrapper[4869]: I1001 15:07:32.874850 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-n7pbm\" (UniqueName: \"kubernetes.io/projected/0861897c-f37b-416a-bc83-7e72df955845-kube-api-access-n7pbm\") pod \"redhat-operators-gxmfx\" (UID: \"0861897c-f37b-416a-bc83-7e72df955845\") " pod="openshift-marketplace/redhat-operators-gxmfx" Oct 01 15:07:32 crc kubenswrapper[4869]: I1001 15:07:32.926686 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-gxmfx" Oct 01 15:07:33 crc kubenswrapper[4869]: I1001 15:07:33.009627 4869 generic.go:334] "Generic (PLEG): container finished" podID="9bae054d-44c8-41e1-9383-128da7767e72" containerID="7e08be4c58f81592e8bca0ae481381faf5e736f9c25f2d3ef3e2a4df579f8c72" exitCode=0 Oct 01 15:07:33 crc kubenswrapper[4869]: I1001 15:07:33.009718 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-clqzs" event={"ID":"9bae054d-44c8-41e1-9383-128da7767e72","Type":"ContainerDied","Data":"7e08be4c58f81592e8bca0ae481381faf5e736f9c25f2d3ef3e2a4df579f8c72"} Oct 01 15:07:33 crc kubenswrapper[4869]: I1001 15:07:33.011914 4869 generic.go:334] "Generic (PLEG): container finished" podID="fddf94b6-3137-4043-bbfd-28ae6650fb5a" containerID="6da13ad1750c234d333b71cb38d43ba4b5c612be7480373bd8cf9b38d2fb4fa1" exitCode=0 Oct 01 15:07:33 crc kubenswrapper[4869]: I1001 15:07:33.011952 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-r6jsz" event={"ID":"fddf94b6-3137-4043-bbfd-28ae6650fb5a","Type":"ContainerDied","Data":"6da13ad1750c234d333b71cb38d43ba4b5c612be7480373bd8cf9b38d2fb4fa1"} Oct 01 15:07:33 crc kubenswrapper[4869]: I1001 15:07:33.098812 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-gxmfx"] Oct 01 15:07:33 crc kubenswrapper[4869]: I1001 15:07:33.125730 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-zqdck"] Oct 01 15:07:33 crc kubenswrapper[4869]: W1001 15:07:33.132591 4869 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod1ec94678_9587_4f84_b2ce_745d0321216b.slice/crio-4a2581357bcfa105d5942f79d6b2df264e831eb54905be3b12749d13e1f2e79b WatchSource:0}: Error finding container 4a2581357bcfa105d5942f79d6b2df264e831eb54905be3b12749d13e1f2e79b: Status 404 returned error can't find the container with id 4a2581357bcfa105d5942f79d6b2df264e831eb54905be3b12749d13e1f2e79b Oct 01 15:07:34 crc kubenswrapper[4869]: I1001 15:07:34.021296 4869 generic.go:334] "Generic (PLEG): container finished" podID="1ec94678-9587-4f84-b2ce-745d0321216b" containerID="5351a7a873b9d2ab0a43258c60d0dc35b166f98dedd0a0dbdd8633f786f04400" exitCode=0 Oct 01 15:07:34 crc kubenswrapper[4869]: I1001 15:07:34.021777 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-zqdck" event={"ID":"1ec94678-9587-4f84-b2ce-745d0321216b","Type":"ContainerDied","Data":"5351a7a873b9d2ab0a43258c60d0dc35b166f98dedd0a0dbdd8633f786f04400"} Oct 01 15:07:34 crc kubenswrapper[4869]: I1001 15:07:34.021802 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-zqdck" event={"ID":"1ec94678-9587-4f84-b2ce-745d0321216b","Type":"ContainerStarted","Data":"4a2581357bcfa105d5942f79d6b2df264e831eb54905be3b12749d13e1f2e79b"} Oct 01 15:07:34 crc kubenswrapper[4869]: I1001 15:07:34.027483 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-clqzs" event={"ID":"9bae054d-44c8-41e1-9383-128da7767e72","Type":"ContainerStarted","Data":"f663b223ffd2c0fdbb8b71ede13cb08ecc34ca35dbbc4e6dcd9ba3f3323a1163"} Oct 01 15:07:34 crc kubenswrapper[4869]: I1001 15:07:34.030208 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-r6jsz" event={"ID":"fddf94b6-3137-4043-bbfd-28ae6650fb5a","Type":"ContainerStarted","Data":"ee266faccac44ec4083b108c3563d4fe990268f81fa9da85216b7c7d0a57459b"} Oct 01 15:07:34 crc kubenswrapper[4869]: I1001 15:07:34.032131 4869 generic.go:334] "Generic (PLEG): container finished" podID="0861897c-f37b-416a-bc83-7e72df955845" containerID="a6e80d5c9601c239e4f3f09eb993e3b5f21f021b97bd8ee74061753dc6651872" exitCode=0 Oct 01 15:07:34 crc kubenswrapper[4869]: I1001 15:07:34.032187 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-gxmfx" event={"ID":"0861897c-f37b-416a-bc83-7e72df955845","Type":"ContainerDied","Data":"a6e80d5c9601c239e4f3f09eb993e3b5f21f021b97bd8ee74061753dc6651872"} Oct 01 15:07:34 crc kubenswrapper[4869]: I1001 15:07:34.032220 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-gxmfx" event={"ID":"0861897c-f37b-416a-bc83-7e72df955845","Type":"ContainerStarted","Data":"cd2915ee2e41bba3d00154e21b865bb63bd1e1ff9309650435d93a0885d7b29d"} Oct 01 15:07:34 crc kubenswrapper[4869]: I1001 15:07:34.065642 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-clqzs" podStartSLOduration=1.412750096 podStartE2EDuration="4.065616872s" podCreationTimestamp="2025-10-01 15:07:30 +0000 UTC" firstStartedPulling="2025-10-01 15:07:30.990113475 +0000 UTC m=+160.136956601" lastFinishedPulling="2025-10-01 15:07:33.642980261 +0000 UTC m=+162.789823377" observedRunningTime="2025-10-01 15:07:34.060989769 +0000 UTC m=+163.207832925" watchObservedRunningTime="2025-10-01 15:07:34.065616872 +0000 UTC m=+163.212459998" Oct 01 15:07:34 crc kubenswrapper[4869]: I1001 15:07:34.092521 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-r6jsz" podStartSLOduration=2.637783444 podStartE2EDuration="5.092506004s" podCreationTimestamp="2025-10-01 15:07:29 +0000 UTC" firstStartedPulling="2025-10-01 15:07:30.986496339 +0000 UTC m=+160.133339455" lastFinishedPulling="2025-10-01 15:07:33.441218899 +0000 UTC m=+162.588062015" observedRunningTime="2025-10-01 15:07:34.090884991 +0000 UTC m=+163.237728127" watchObservedRunningTime="2025-10-01 15:07:34.092506004 +0000 UTC m=+163.239349120" Oct 01 15:07:37 crc kubenswrapper[4869]: I1001 15:07:37.047941 4869 generic.go:334] "Generic (PLEG): container finished" podID="0861897c-f37b-416a-bc83-7e72df955845" containerID="9cc41a569e1847ccd85cd0808202a104049ea59b261f399314d0b40679fdfe2f" exitCode=0 Oct 01 15:07:37 crc kubenswrapper[4869]: I1001 15:07:37.048429 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-gxmfx" event={"ID":"0861897c-f37b-416a-bc83-7e72df955845","Type":"ContainerDied","Data":"9cc41a569e1847ccd85cd0808202a104049ea59b261f399314d0b40679fdfe2f"} Oct 01 15:07:37 crc kubenswrapper[4869]: I1001 15:07:37.054094 4869 generic.go:334] "Generic (PLEG): container finished" podID="1ec94678-9587-4f84-b2ce-745d0321216b" containerID="3f0073748d32dc2fb484117df88eeac94e562c04ee4fa4302278c2ccb6cddfbd" exitCode=0 Oct 01 15:07:37 crc kubenswrapper[4869]: I1001 15:07:37.054284 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-zqdck" event={"ID":"1ec94678-9587-4f84-b2ce-745d0321216b","Type":"ContainerDied","Data":"3f0073748d32dc2fb484117df88eeac94e562c04ee4fa4302278c2ccb6cddfbd"} Oct 01 15:07:39 crc kubenswrapper[4869]: I1001 15:07:39.065808 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-gxmfx" event={"ID":"0861897c-f37b-416a-bc83-7e72df955845","Type":"ContainerStarted","Data":"8b02d2e7efc22a7ca238dd01481c487226251046d26d0d2a6b86039b9d896945"} Oct 01 15:07:39 crc kubenswrapper[4869]: I1001 15:07:39.068035 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-zqdck" event={"ID":"1ec94678-9587-4f84-b2ce-745d0321216b","Type":"ContainerStarted","Data":"39d0958fbc9a42b86bf6c15d06e15ae4aef658cd7ae38bca91a81bc43f53c1c9"} Oct 01 15:07:39 crc kubenswrapper[4869]: I1001 15:07:39.081800 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-gxmfx" podStartSLOduration=2.56141285 podStartE2EDuration="7.081784156s" podCreationTimestamp="2025-10-01 15:07:32 +0000 UTC" firstStartedPulling="2025-10-01 15:07:34.033225434 +0000 UTC m=+163.180068550" lastFinishedPulling="2025-10-01 15:07:38.55359674 +0000 UTC m=+167.700439856" observedRunningTime="2025-10-01 15:07:39.080598105 +0000 UTC m=+168.227441231" watchObservedRunningTime="2025-10-01 15:07:39.081784156 +0000 UTC m=+168.228627282" Oct 01 15:07:39 crc kubenswrapper[4869]: I1001 15:07:39.097930 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-zqdck" podStartSLOduration=2.629547504 podStartE2EDuration="7.097909993s" podCreationTimestamp="2025-10-01 15:07:32 +0000 UTC" firstStartedPulling="2025-10-01 15:07:34.023151907 +0000 UTC m=+163.169995023" lastFinishedPulling="2025-10-01 15:07:38.491514356 +0000 UTC m=+167.638357512" observedRunningTime="2025-10-01 15:07:39.097687877 +0000 UTC m=+168.244530993" watchObservedRunningTime="2025-10-01 15:07:39.097909993 +0000 UTC m=+168.244753119" Oct 01 15:07:40 crc kubenswrapper[4869]: I1001 15:07:40.348088 4869 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-r6jsz" Oct 01 15:07:40 crc kubenswrapper[4869]: I1001 15:07:40.348159 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-r6jsz" Oct 01 15:07:40 crc kubenswrapper[4869]: I1001 15:07:40.406087 4869 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-r6jsz" Oct 01 15:07:40 crc kubenswrapper[4869]: I1001 15:07:40.537475 4869 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-clqzs" Oct 01 15:07:40 crc kubenswrapper[4869]: I1001 15:07:40.537528 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-clqzs" Oct 01 15:07:40 crc kubenswrapper[4869]: I1001 15:07:40.596376 4869 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-clqzs" Oct 01 15:07:41 crc kubenswrapper[4869]: I1001 15:07:41.117312 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-r6jsz" Oct 01 15:07:41 crc kubenswrapper[4869]: I1001 15:07:41.120087 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-clqzs" Oct 01 15:07:42 crc kubenswrapper[4869]: I1001 15:07:42.734325 4869 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-zqdck" Oct 01 15:07:42 crc kubenswrapper[4869]: I1001 15:07:42.734646 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-zqdck" Oct 01 15:07:42 crc kubenswrapper[4869]: I1001 15:07:42.788909 4869 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-zqdck" Oct 01 15:07:42 crc kubenswrapper[4869]: I1001 15:07:42.926829 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-gxmfx" Oct 01 15:07:42 crc kubenswrapper[4869]: I1001 15:07:42.927202 4869 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-gxmfx" Oct 01 15:07:43 crc kubenswrapper[4869]: I1001 15:07:43.130394 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-zqdck" Oct 01 15:07:43 crc kubenswrapper[4869]: I1001 15:07:43.354034 4869 patch_prober.go:28] interesting pod/machine-config-daemon-c86m8 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 01 15:07:43 crc kubenswrapper[4869]: I1001 15:07:43.354107 4869 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-c86m8" podUID="a4b64f7f-0b03-4f47-965b-9fde048b735c" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 01 15:07:43 crc kubenswrapper[4869]: I1001 15:07:43.988311 4869 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-gxmfx" podUID="0861897c-f37b-416a-bc83-7e72df955845" containerName="registry-server" probeResult="failure" output=< Oct 01 15:07:43 crc kubenswrapper[4869]: timeout: failed to connect service ":50051" within 1s Oct 01 15:07:43 crc kubenswrapper[4869]: > Oct 01 15:07:52 crc kubenswrapper[4869]: I1001 15:07:52.994576 4869 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-gxmfx" Oct 01 15:07:53 crc kubenswrapper[4869]: I1001 15:07:53.067573 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-gxmfx" Oct 01 15:08:13 crc kubenswrapper[4869]: I1001 15:08:13.354192 4869 patch_prober.go:28] interesting pod/machine-config-daemon-c86m8 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 01 15:08:13 crc kubenswrapper[4869]: I1001 15:08:13.354873 4869 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-c86m8" podUID="a4b64f7f-0b03-4f47-965b-9fde048b735c" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 01 15:08:13 crc kubenswrapper[4869]: I1001 15:08:13.354946 4869 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-c86m8" Oct 01 15:08:13 crc kubenswrapper[4869]: I1001 15:08:13.356047 4869 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"25d5b00d18bcf2d6a0845f99197fc189846fe0b873d6025b6799bfcb762624fd"} pod="openshift-machine-config-operator/machine-config-daemon-c86m8" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Oct 01 15:08:13 crc kubenswrapper[4869]: I1001 15:08:13.356146 4869 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-c86m8" podUID="a4b64f7f-0b03-4f47-965b-9fde048b735c" containerName="machine-config-daemon" containerID="cri-o://25d5b00d18bcf2d6a0845f99197fc189846fe0b873d6025b6799bfcb762624fd" gracePeriod=600 Oct 01 15:08:14 crc kubenswrapper[4869]: I1001 15:08:14.278565 4869 generic.go:334] "Generic (PLEG): container finished" podID="a4b64f7f-0b03-4f47-965b-9fde048b735c" containerID="25d5b00d18bcf2d6a0845f99197fc189846fe0b873d6025b6799bfcb762624fd" exitCode=0 Oct 01 15:08:14 crc kubenswrapper[4869]: I1001 15:08:14.278671 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-c86m8" event={"ID":"a4b64f7f-0b03-4f47-965b-9fde048b735c","Type":"ContainerDied","Data":"25d5b00d18bcf2d6a0845f99197fc189846fe0b873d6025b6799bfcb762624fd"} Oct 01 15:08:14 crc kubenswrapper[4869]: I1001 15:08:14.279121 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-c86m8" event={"ID":"a4b64f7f-0b03-4f47-965b-9fde048b735c","Type":"ContainerStarted","Data":"af6f12677fcb9642c558dd68c19ae34abd92bffe6eaa669049660c21b5cb4867"} Oct 01 15:10:13 crc kubenswrapper[4869]: I1001 15:10:13.354176 4869 patch_prober.go:28] interesting pod/machine-config-daemon-c86m8 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 01 15:10:13 crc kubenswrapper[4869]: I1001 15:10:13.354995 4869 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-c86m8" podUID="a4b64f7f-0b03-4f47-965b-9fde048b735c" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 01 15:10:43 crc kubenswrapper[4869]: I1001 15:10:43.354164 4869 patch_prober.go:28] interesting pod/machine-config-daemon-c86m8 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 01 15:10:43 crc kubenswrapper[4869]: I1001 15:10:43.354818 4869 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-c86m8" podUID="a4b64f7f-0b03-4f47-965b-9fde048b735c" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 01 15:11:10 crc kubenswrapper[4869]: I1001 15:11:10.096978 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/image-registry-66df7c8f76-hmdf8"] Oct 01 15:11:10 crc kubenswrapper[4869]: I1001 15:11:10.099346 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-66df7c8f76-hmdf8" Oct 01 15:11:10 crc kubenswrapper[4869]: I1001 15:11:10.121194 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-66df7c8f76-hmdf8"] Oct 01 15:11:10 crc kubenswrapper[4869]: I1001 15:11:10.218356 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/59ca477e-376d-4085-8c6f-6e238d9eda9b-ca-trust-extracted\") pod \"image-registry-66df7c8f76-hmdf8\" (UID: \"59ca477e-376d-4085-8c6f-6e238d9eda9b\") " pod="openshift-image-registry/image-registry-66df7c8f76-hmdf8" Oct 01 15:11:10 crc kubenswrapper[4869]: I1001 15:11:10.218405 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/59ca477e-376d-4085-8c6f-6e238d9eda9b-registry-certificates\") pod \"image-registry-66df7c8f76-hmdf8\" (UID: \"59ca477e-376d-4085-8c6f-6e238d9eda9b\") " pod="openshift-image-registry/image-registry-66df7c8f76-hmdf8" Oct 01 15:11:10 crc kubenswrapper[4869]: I1001 15:11:10.218448 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/59ca477e-376d-4085-8c6f-6e238d9eda9b-registry-tls\") pod \"image-registry-66df7c8f76-hmdf8\" (UID: \"59ca477e-376d-4085-8c6f-6e238d9eda9b\") " pod="openshift-image-registry/image-registry-66df7c8f76-hmdf8" Oct 01 15:11:10 crc kubenswrapper[4869]: I1001 15:11:10.218655 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-66df7c8f76-hmdf8\" (UID: \"59ca477e-376d-4085-8c6f-6e238d9eda9b\") " pod="openshift-image-registry/image-registry-66df7c8f76-hmdf8" Oct 01 15:11:10 crc kubenswrapper[4869]: I1001 15:11:10.218723 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/59ca477e-376d-4085-8c6f-6e238d9eda9b-trusted-ca\") pod \"image-registry-66df7c8f76-hmdf8\" (UID: \"59ca477e-376d-4085-8c6f-6e238d9eda9b\") " pod="openshift-image-registry/image-registry-66df7c8f76-hmdf8" Oct 01 15:11:10 crc kubenswrapper[4869]: I1001 15:11:10.218770 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-m8vwg\" (UniqueName: \"kubernetes.io/projected/59ca477e-376d-4085-8c6f-6e238d9eda9b-kube-api-access-m8vwg\") pod \"image-registry-66df7c8f76-hmdf8\" (UID: \"59ca477e-376d-4085-8c6f-6e238d9eda9b\") " pod="openshift-image-registry/image-registry-66df7c8f76-hmdf8" Oct 01 15:11:10 crc kubenswrapper[4869]: I1001 15:11:10.218807 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/59ca477e-376d-4085-8c6f-6e238d9eda9b-installation-pull-secrets\") pod \"image-registry-66df7c8f76-hmdf8\" (UID: \"59ca477e-376d-4085-8c6f-6e238d9eda9b\") " pod="openshift-image-registry/image-registry-66df7c8f76-hmdf8" Oct 01 15:11:10 crc kubenswrapper[4869]: I1001 15:11:10.218824 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/59ca477e-376d-4085-8c6f-6e238d9eda9b-bound-sa-token\") pod \"image-registry-66df7c8f76-hmdf8\" (UID: \"59ca477e-376d-4085-8c6f-6e238d9eda9b\") " pod="openshift-image-registry/image-registry-66df7c8f76-hmdf8" Oct 01 15:11:10 crc kubenswrapper[4869]: I1001 15:11:10.254413 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-66df7c8f76-hmdf8\" (UID: \"59ca477e-376d-4085-8c6f-6e238d9eda9b\") " pod="openshift-image-registry/image-registry-66df7c8f76-hmdf8" Oct 01 15:11:10 crc kubenswrapper[4869]: I1001 15:11:10.320045 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/59ca477e-376d-4085-8c6f-6e238d9eda9b-trusted-ca\") pod \"image-registry-66df7c8f76-hmdf8\" (UID: \"59ca477e-376d-4085-8c6f-6e238d9eda9b\") " pod="openshift-image-registry/image-registry-66df7c8f76-hmdf8" Oct 01 15:11:10 crc kubenswrapper[4869]: I1001 15:11:10.320131 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-m8vwg\" (UniqueName: \"kubernetes.io/projected/59ca477e-376d-4085-8c6f-6e238d9eda9b-kube-api-access-m8vwg\") pod \"image-registry-66df7c8f76-hmdf8\" (UID: \"59ca477e-376d-4085-8c6f-6e238d9eda9b\") " pod="openshift-image-registry/image-registry-66df7c8f76-hmdf8" Oct 01 15:11:10 crc kubenswrapper[4869]: I1001 15:11:10.320178 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/59ca477e-376d-4085-8c6f-6e238d9eda9b-installation-pull-secrets\") pod \"image-registry-66df7c8f76-hmdf8\" (UID: \"59ca477e-376d-4085-8c6f-6e238d9eda9b\") " pod="openshift-image-registry/image-registry-66df7c8f76-hmdf8" Oct 01 15:11:10 crc kubenswrapper[4869]: I1001 15:11:10.320213 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/59ca477e-376d-4085-8c6f-6e238d9eda9b-bound-sa-token\") pod \"image-registry-66df7c8f76-hmdf8\" (UID: \"59ca477e-376d-4085-8c6f-6e238d9eda9b\") " pod="openshift-image-registry/image-registry-66df7c8f76-hmdf8" Oct 01 15:11:10 crc kubenswrapper[4869]: I1001 15:11:10.320288 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/59ca477e-376d-4085-8c6f-6e238d9eda9b-ca-trust-extracted\") pod \"image-registry-66df7c8f76-hmdf8\" (UID: \"59ca477e-376d-4085-8c6f-6e238d9eda9b\") " pod="openshift-image-registry/image-registry-66df7c8f76-hmdf8" Oct 01 15:11:10 crc kubenswrapper[4869]: I1001 15:11:10.320332 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/59ca477e-376d-4085-8c6f-6e238d9eda9b-registry-certificates\") pod \"image-registry-66df7c8f76-hmdf8\" (UID: \"59ca477e-376d-4085-8c6f-6e238d9eda9b\") " pod="openshift-image-registry/image-registry-66df7c8f76-hmdf8" Oct 01 15:11:10 crc kubenswrapper[4869]: I1001 15:11:10.320406 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/59ca477e-376d-4085-8c6f-6e238d9eda9b-registry-tls\") pod \"image-registry-66df7c8f76-hmdf8\" (UID: \"59ca477e-376d-4085-8c6f-6e238d9eda9b\") " pod="openshift-image-registry/image-registry-66df7c8f76-hmdf8" Oct 01 15:11:10 crc kubenswrapper[4869]: I1001 15:11:10.321190 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/59ca477e-376d-4085-8c6f-6e238d9eda9b-ca-trust-extracted\") pod \"image-registry-66df7c8f76-hmdf8\" (UID: \"59ca477e-376d-4085-8c6f-6e238d9eda9b\") " pod="openshift-image-registry/image-registry-66df7c8f76-hmdf8" Oct 01 15:11:10 crc kubenswrapper[4869]: I1001 15:11:10.322114 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/59ca477e-376d-4085-8c6f-6e238d9eda9b-trusted-ca\") pod \"image-registry-66df7c8f76-hmdf8\" (UID: \"59ca477e-376d-4085-8c6f-6e238d9eda9b\") " pod="openshift-image-registry/image-registry-66df7c8f76-hmdf8" Oct 01 15:11:10 crc kubenswrapper[4869]: I1001 15:11:10.322349 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/59ca477e-376d-4085-8c6f-6e238d9eda9b-registry-certificates\") pod \"image-registry-66df7c8f76-hmdf8\" (UID: \"59ca477e-376d-4085-8c6f-6e238d9eda9b\") " pod="openshift-image-registry/image-registry-66df7c8f76-hmdf8" Oct 01 15:11:10 crc kubenswrapper[4869]: I1001 15:11:10.327374 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/59ca477e-376d-4085-8c6f-6e238d9eda9b-installation-pull-secrets\") pod \"image-registry-66df7c8f76-hmdf8\" (UID: \"59ca477e-376d-4085-8c6f-6e238d9eda9b\") " pod="openshift-image-registry/image-registry-66df7c8f76-hmdf8" Oct 01 15:11:10 crc kubenswrapper[4869]: I1001 15:11:10.327485 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/59ca477e-376d-4085-8c6f-6e238d9eda9b-registry-tls\") pod \"image-registry-66df7c8f76-hmdf8\" (UID: \"59ca477e-376d-4085-8c6f-6e238d9eda9b\") " pod="openshift-image-registry/image-registry-66df7c8f76-hmdf8" Oct 01 15:11:10 crc kubenswrapper[4869]: I1001 15:11:10.340320 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/59ca477e-376d-4085-8c6f-6e238d9eda9b-bound-sa-token\") pod \"image-registry-66df7c8f76-hmdf8\" (UID: \"59ca477e-376d-4085-8c6f-6e238d9eda9b\") " pod="openshift-image-registry/image-registry-66df7c8f76-hmdf8" Oct 01 15:11:10 crc kubenswrapper[4869]: I1001 15:11:10.343043 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-m8vwg\" (UniqueName: \"kubernetes.io/projected/59ca477e-376d-4085-8c6f-6e238d9eda9b-kube-api-access-m8vwg\") pod \"image-registry-66df7c8f76-hmdf8\" (UID: \"59ca477e-376d-4085-8c6f-6e238d9eda9b\") " pod="openshift-image-registry/image-registry-66df7c8f76-hmdf8" Oct 01 15:11:10 crc kubenswrapper[4869]: I1001 15:11:10.421221 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-66df7c8f76-hmdf8" Oct 01 15:11:10 crc kubenswrapper[4869]: I1001 15:11:10.617635 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-66df7c8f76-hmdf8"] Oct 01 15:11:11 crc kubenswrapper[4869]: I1001 15:11:11.435991 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-66df7c8f76-hmdf8" event={"ID":"59ca477e-376d-4085-8c6f-6e238d9eda9b","Type":"ContainerStarted","Data":"b65bc8e25ec4ec929fa738b9341aa2c6f2923c8e8204763a36892c1a142f8ebd"} Oct 01 15:11:11 crc kubenswrapper[4869]: I1001 15:11:11.436345 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-image-registry/image-registry-66df7c8f76-hmdf8" Oct 01 15:11:11 crc kubenswrapper[4869]: I1001 15:11:11.436369 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-66df7c8f76-hmdf8" event={"ID":"59ca477e-376d-4085-8c6f-6e238d9eda9b","Type":"ContainerStarted","Data":"7bdeeb9f418b0c4573281d787769e699650f5f8810ebb26451f2e23fa0df900a"} Oct 01 15:11:13 crc kubenswrapper[4869]: I1001 15:11:13.355196 4869 patch_prober.go:28] interesting pod/machine-config-daemon-c86m8 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 01 15:11:13 crc kubenswrapper[4869]: I1001 15:11:13.356477 4869 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-c86m8" podUID="a4b64f7f-0b03-4f47-965b-9fde048b735c" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 01 15:11:13 crc kubenswrapper[4869]: I1001 15:11:13.356535 4869 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-c86m8" Oct 01 15:11:13 crc kubenswrapper[4869]: I1001 15:11:13.357169 4869 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"af6f12677fcb9642c558dd68c19ae34abd92bffe6eaa669049660c21b5cb4867"} pod="openshift-machine-config-operator/machine-config-daemon-c86m8" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Oct 01 15:11:13 crc kubenswrapper[4869]: I1001 15:11:13.357226 4869 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-c86m8" podUID="a4b64f7f-0b03-4f47-965b-9fde048b735c" containerName="machine-config-daemon" containerID="cri-o://af6f12677fcb9642c558dd68c19ae34abd92bffe6eaa669049660c21b5cb4867" gracePeriod=600 Oct 01 15:11:14 crc kubenswrapper[4869]: I1001 15:11:14.457860 4869 generic.go:334] "Generic (PLEG): container finished" podID="a4b64f7f-0b03-4f47-965b-9fde048b735c" containerID="af6f12677fcb9642c558dd68c19ae34abd92bffe6eaa669049660c21b5cb4867" exitCode=0 Oct 01 15:11:14 crc kubenswrapper[4869]: I1001 15:11:14.458021 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-c86m8" event={"ID":"a4b64f7f-0b03-4f47-965b-9fde048b735c","Type":"ContainerDied","Data":"af6f12677fcb9642c558dd68c19ae34abd92bffe6eaa669049660c21b5cb4867"} Oct 01 15:11:14 crc kubenswrapper[4869]: I1001 15:11:14.458355 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-c86m8" event={"ID":"a4b64f7f-0b03-4f47-965b-9fde048b735c","Type":"ContainerStarted","Data":"d389e7566576d3e629333a96c6509eba4ec6eb584120d194f797cef044db86f8"} Oct 01 15:11:14 crc kubenswrapper[4869]: I1001 15:11:14.458407 4869 scope.go:117] "RemoveContainer" containerID="25d5b00d18bcf2d6a0845f99197fc189846fe0b873d6025b6799bfcb762624fd" Oct 01 15:11:14 crc kubenswrapper[4869]: I1001 15:11:14.479300 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/image-registry-66df7c8f76-hmdf8" podStartSLOduration=4.479281958 podStartE2EDuration="4.479281958s" podCreationTimestamp="2025-10-01 15:11:10 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-01 15:11:11.455316915 +0000 UTC m=+380.602160071" watchObservedRunningTime="2025-10-01 15:11:14.479281958 +0000 UTC m=+383.626125074" Oct 01 15:11:30 crc kubenswrapper[4869]: I1001 15:11:30.428668 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-image-registry/image-registry-66df7c8f76-hmdf8" Oct 01 15:11:30 crc kubenswrapper[4869]: I1001 15:11:30.475399 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-4zhjs"] Oct 01 15:11:55 crc kubenswrapper[4869]: I1001 15:11:55.509218 4869 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-image-registry/image-registry-697d97f7c8-4zhjs" podUID="79b5f958-f252-4703-b785-05b0d01a6e72" containerName="registry" containerID="cri-o://fcc73b0228a2a59b4c6372461cc50cb0cb9d077f35f62f827a155eab5b60ed3b" gracePeriod=30 Oct 01 15:11:55 crc kubenswrapper[4869]: I1001 15:11:55.730411 4869 generic.go:334] "Generic (PLEG): container finished" podID="79b5f958-f252-4703-b785-05b0d01a6e72" containerID="fcc73b0228a2a59b4c6372461cc50cb0cb9d077f35f62f827a155eab5b60ed3b" exitCode=0 Oct 01 15:11:55 crc kubenswrapper[4869]: I1001 15:11:55.730660 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-4zhjs" event={"ID":"79b5f958-f252-4703-b785-05b0d01a6e72","Type":"ContainerDied","Data":"fcc73b0228a2a59b4c6372461cc50cb0cb9d077f35f62f827a155eab5b60ed3b"} Oct 01 15:11:55 crc kubenswrapper[4869]: I1001 15:11:55.930937 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-4zhjs" Oct 01 15:11:56 crc kubenswrapper[4869]: I1001 15:11:56.101438 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/79b5f958-f252-4703-b785-05b0d01a6e72-ca-trust-extracted\") pod \"79b5f958-f252-4703-b785-05b0d01a6e72\" (UID: \"79b5f958-f252-4703-b785-05b0d01a6e72\") " Oct 01 15:11:56 crc kubenswrapper[4869]: I1001 15:11:56.101566 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/79b5f958-f252-4703-b785-05b0d01a6e72-bound-sa-token\") pod \"79b5f958-f252-4703-b785-05b0d01a6e72\" (UID: \"79b5f958-f252-4703-b785-05b0d01a6e72\") " Oct 01 15:11:56 crc kubenswrapper[4869]: I1001 15:11:56.101600 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-f5ldd\" (UniqueName: \"kubernetes.io/projected/79b5f958-f252-4703-b785-05b0d01a6e72-kube-api-access-f5ldd\") pod \"79b5f958-f252-4703-b785-05b0d01a6e72\" (UID: \"79b5f958-f252-4703-b785-05b0d01a6e72\") " Oct 01 15:11:56 crc kubenswrapper[4869]: I1001 15:11:56.101633 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/79b5f958-f252-4703-b785-05b0d01a6e72-trusted-ca\") pod \"79b5f958-f252-4703-b785-05b0d01a6e72\" (UID: \"79b5f958-f252-4703-b785-05b0d01a6e72\") " Oct 01 15:11:56 crc kubenswrapper[4869]: I1001 15:11:56.101681 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/79b5f958-f252-4703-b785-05b0d01a6e72-registry-tls\") pod \"79b5f958-f252-4703-b785-05b0d01a6e72\" (UID: \"79b5f958-f252-4703-b785-05b0d01a6e72\") " Oct 01 15:11:56 crc kubenswrapper[4869]: I1001 15:11:56.101720 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/79b5f958-f252-4703-b785-05b0d01a6e72-installation-pull-secrets\") pod \"79b5f958-f252-4703-b785-05b0d01a6e72\" (UID: \"79b5f958-f252-4703-b785-05b0d01a6e72\") " Oct 01 15:11:56 crc kubenswrapper[4869]: I1001 15:11:56.101769 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/79b5f958-f252-4703-b785-05b0d01a6e72-registry-certificates\") pod \"79b5f958-f252-4703-b785-05b0d01a6e72\" (UID: \"79b5f958-f252-4703-b785-05b0d01a6e72\") " Oct 01 15:11:56 crc kubenswrapper[4869]: I1001 15:11:56.101964 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-storage\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"79b5f958-f252-4703-b785-05b0d01a6e72\" (UID: \"79b5f958-f252-4703-b785-05b0d01a6e72\") " Oct 01 15:11:56 crc kubenswrapper[4869]: I1001 15:11:56.103510 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/79b5f958-f252-4703-b785-05b0d01a6e72-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "79b5f958-f252-4703-b785-05b0d01a6e72" (UID: "79b5f958-f252-4703-b785-05b0d01a6e72"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 15:11:56 crc kubenswrapper[4869]: I1001 15:11:56.103604 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/79b5f958-f252-4703-b785-05b0d01a6e72-registry-certificates" (OuterVolumeSpecName: "registry-certificates") pod "79b5f958-f252-4703-b785-05b0d01a6e72" (UID: "79b5f958-f252-4703-b785-05b0d01a6e72"). InnerVolumeSpecName "registry-certificates". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 15:11:56 crc kubenswrapper[4869]: I1001 15:11:56.111975 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/79b5f958-f252-4703-b785-05b0d01a6e72-installation-pull-secrets" (OuterVolumeSpecName: "installation-pull-secrets") pod "79b5f958-f252-4703-b785-05b0d01a6e72" (UID: "79b5f958-f252-4703-b785-05b0d01a6e72"). InnerVolumeSpecName "installation-pull-secrets". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 15:11:56 crc kubenswrapper[4869]: I1001 15:11:56.112595 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/79b5f958-f252-4703-b785-05b0d01a6e72-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "79b5f958-f252-4703-b785-05b0d01a6e72" (UID: "79b5f958-f252-4703-b785-05b0d01a6e72"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 15:11:56 crc kubenswrapper[4869]: I1001 15:11:56.113127 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/79b5f958-f252-4703-b785-05b0d01a6e72-kube-api-access-f5ldd" (OuterVolumeSpecName: "kube-api-access-f5ldd") pod "79b5f958-f252-4703-b785-05b0d01a6e72" (UID: "79b5f958-f252-4703-b785-05b0d01a6e72"). InnerVolumeSpecName "kube-api-access-f5ldd". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 15:11:56 crc kubenswrapper[4869]: I1001 15:11:56.114344 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/79b5f958-f252-4703-b785-05b0d01a6e72-registry-tls" (OuterVolumeSpecName: "registry-tls") pod "79b5f958-f252-4703-b785-05b0d01a6e72" (UID: "79b5f958-f252-4703-b785-05b0d01a6e72"). InnerVolumeSpecName "registry-tls". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 15:11:56 crc kubenswrapper[4869]: I1001 15:11:56.119457 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (OuterVolumeSpecName: "registry-storage") pod "79b5f958-f252-4703-b785-05b0d01a6e72" (UID: "79b5f958-f252-4703-b785-05b0d01a6e72"). InnerVolumeSpecName "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8". PluginName "kubernetes.io/csi", VolumeGidValue "" Oct 01 15:11:56 crc kubenswrapper[4869]: I1001 15:11:56.121982 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/79b5f958-f252-4703-b785-05b0d01a6e72-ca-trust-extracted" (OuterVolumeSpecName: "ca-trust-extracted") pod "79b5f958-f252-4703-b785-05b0d01a6e72" (UID: "79b5f958-f252-4703-b785-05b0d01a6e72"). InnerVolumeSpecName "ca-trust-extracted". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 15:11:56 crc kubenswrapper[4869]: I1001 15:11:56.204054 4869 reconciler_common.go:293] "Volume detached for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/79b5f958-f252-4703-b785-05b0d01a6e72-registry-tls\") on node \"crc\" DevicePath \"\"" Oct 01 15:11:56 crc kubenswrapper[4869]: I1001 15:11:56.204124 4869 reconciler_common.go:293] "Volume detached for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/79b5f958-f252-4703-b785-05b0d01a6e72-installation-pull-secrets\") on node \"crc\" DevicePath \"\"" Oct 01 15:11:56 crc kubenswrapper[4869]: I1001 15:11:56.204149 4869 reconciler_common.go:293] "Volume detached for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/79b5f958-f252-4703-b785-05b0d01a6e72-registry-certificates\") on node \"crc\" DevicePath \"\"" Oct 01 15:11:56 crc kubenswrapper[4869]: I1001 15:11:56.204170 4869 reconciler_common.go:293] "Volume detached for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/79b5f958-f252-4703-b785-05b0d01a6e72-ca-trust-extracted\") on node \"crc\" DevicePath \"\"" Oct 01 15:11:56 crc kubenswrapper[4869]: I1001 15:11:56.204189 4869 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/79b5f958-f252-4703-b785-05b0d01a6e72-bound-sa-token\") on node \"crc\" DevicePath \"\"" Oct 01 15:11:56 crc kubenswrapper[4869]: I1001 15:11:56.204207 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-f5ldd\" (UniqueName: \"kubernetes.io/projected/79b5f958-f252-4703-b785-05b0d01a6e72-kube-api-access-f5ldd\") on node \"crc\" DevicePath \"\"" Oct 01 15:11:56 crc kubenswrapper[4869]: I1001 15:11:56.204223 4869 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/79b5f958-f252-4703-b785-05b0d01a6e72-trusted-ca\") on node \"crc\" DevicePath \"\"" Oct 01 15:11:56 crc kubenswrapper[4869]: I1001 15:11:56.740947 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-4zhjs" event={"ID":"79b5f958-f252-4703-b785-05b0d01a6e72","Type":"ContainerDied","Data":"fdafdb3adfe9df9cf41c835d1421b9d5508d4e91abdac9bc1f398d9683e02490"} Oct 01 15:11:56 crc kubenswrapper[4869]: I1001 15:11:56.742106 4869 scope.go:117] "RemoveContainer" containerID="fcc73b0228a2a59b4c6372461cc50cb0cb9d077f35f62f827a155eab5b60ed3b" Oct 01 15:11:56 crc kubenswrapper[4869]: I1001 15:11:56.741020 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-4zhjs" Oct 01 15:11:56 crc kubenswrapper[4869]: I1001 15:11:56.777780 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-4zhjs"] Oct 01 15:11:56 crc kubenswrapper[4869]: I1001 15:11:56.780395 4869 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-4zhjs"] Oct 01 15:11:57 crc kubenswrapper[4869]: I1001 15:11:57.593623 4869 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="79b5f958-f252-4703-b785-05b0d01a6e72" path="/var/lib/kubelet/pods/79b5f958-f252-4703-b785-05b0d01a6e72/volumes" Oct 01 15:12:59 crc kubenswrapper[4869]: I1001 15:12:59.155689 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["cert-manager/cert-manager-cainjector-7f985d654d-zjw2v"] Oct 01 15:12:59 crc kubenswrapper[4869]: E1001 15:12:59.156431 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="79b5f958-f252-4703-b785-05b0d01a6e72" containerName="registry" Oct 01 15:12:59 crc kubenswrapper[4869]: I1001 15:12:59.156446 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="79b5f958-f252-4703-b785-05b0d01a6e72" containerName="registry" Oct 01 15:12:59 crc kubenswrapper[4869]: I1001 15:12:59.156539 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="79b5f958-f252-4703-b785-05b0d01a6e72" containerName="registry" Oct 01 15:12:59 crc kubenswrapper[4869]: I1001 15:12:59.156943 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-cainjector-7f985d654d-zjw2v" Oct 01 15:12:59 crc kubenswrapper[4869]: I1001 15:12:59.159797 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"cert-manager"/"openshift-service-ca.crt" Oct 01 15:12:59 crc kubenswrapper[4869]: I1001 15:12:59.160523 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"cert-manager"/"kube-root-ca.crt" Oct 01 15:12:59 crc kubenswrapper[4869]: I1001 15:12:59.161906 4869 reflector.go:368] Caches populated for *v1.Secret from object-"cert-manager"/"cert-manager-cainjector-dockercfg-6lq6q" Oct 01 15:12:59 crc kubenswrapper[4869]: I1001 15:12:59.178208 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-cainjector-7f985d654d-zjw2v"] Oct 01 15:12:59 crc kubenswrapper[4869]: I1001 15:12:59.194439 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tgcmm\" (UniqueName: \"kubernetes.io/projected/84af5e11-443d-4eae-b1cb-70397019f8f5-kube-api-access-tgcmm\") pod \"cert-manager-cainjector-7f985d654d-zjw2v\" (UID: \"84af5e11-443d-4eae-b1cb-70397019f8f5\") " pod="cert-manager/cert-manager-cainjector-7f985d654d-zjw2v" Oct 01 15:12:59 crc kubenswrapper[4869]: I1001 15:12:59.207375 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["cert-manager/cert-manager-5b446d88c5-s86t5"] Oct 01 15:12:59 crc kubenswrapper[4869]: I1001 15:12:59.208326 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-5b446d88c5-s86t5" Oct 01 15:12:59 crc kubenswrapper[4869]: I1001 15:12:59.216104 4869 reflector.go:368] Caches populated for *v1.Secret from object-"cert-manager"/"cert-manager-dockercfg-r9bxw" Oct 01 15:12:59 crc kubenswrapper[4869]: I1001 15:12:59.227295 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-5b446d88c5-s86t5"] Oct 01 15:12:59 crc kubenswrapper[4869]: I1001 15:12:59.237341 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["cert-manager/cert-manager-webhook-5655c58dd6-jpszr"] Oct 01 15:12:59 crc kubenswrapper[4869]: I1001 15:12:59.238009 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-webhook-5655c58dd6-jpszr" Oct 01 15:12:59 crc kubenswrapper[4869]: I1001 15:12:59.247627 4869 reflector.go:368] Caches populated for *v1.Secret from object-"cert-manager"/"cert-manager-webhook-dockercfg-5jqgq" Oct 01 15:12:59 crc kubenswrapper[4869]: I1001 15:12:59.256701 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-webhook-5655c58dd6-jpszr"] Oct 01 15:12:59 crc kubenswrapper[4869]: I1001 15:12:59.297423 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4hph2\" (UniqueName: \"kubernetes.io/projected/ccc7b391-9817-440d-a687-496243444ae9-kube-api-access-4hph2\") pod \"cert-manager-webhook-5655c58dd6-jpszr\" (UID: \"ccc7b391-9817-440d-a687-496243444ae9\") " pod="cert-manager/cert-manager-webhook-5655c58dd6-jpszr" Oct 01 15:12:59 crc kubenswrapper[4869]: I1001 15:12:59.297551 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tgcmm\" (UniqueName: \"kubernetes.io/projected/84af5e11-443d-4eae-b1cb-70397019f8f5-kube-api-access-tgcmm\") pod \"cert-manager-cainjector-7f985d654d-zjw2v\" (UID: \"84af5e11-443d-4eae-b1cb-70397019f8f5\") " pod="cert-manager/cert-manager-cainjector-7f985d654d-zjw2v" Oct 01 15:12:59 crc kubenswrapper[4869]: I1001 15:12:59.297612 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jkvrj\" (UniqueName: \"kubernetes.io/projected/d8389967-5803-4508-8abf-572bb2024d84-kube-api-access-jkvrj\") pod \"cert-manager-5b446d88c5-s86t5\" (UID: \"d8389967-5803-4508-8abf-572bb2024d84\") " pod="cert-manager/cert-manager-5b446d88c5-s86t5" Oct 01 15:12:59 crc kubenswrapper[4869]: I1001 15:12:59.327033 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tgcmm\" (UniqueName: \"kubernetes.io/projected/84af5e11-443d-4eae-b1cb-70397019f8f5-kube-api-access-tgcmm\") pod \"cert-manager-cainjector-7f985d654d-zjw2v\" (UID: \"84af5e11-443d-4eae-b1cb-70397019f8f5\") " pod="cert-manager/cert-manager-cainjector-7f985d654d-zjw2v" Oct 01 15:12:59 crc kubenswrapper[4869]: I1001 15:12:59.399016 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jkvrj\" (UniqueName: \"kubernetes.io/projected/d8389967-5803-4508-8abf-572bb2024d84-kube-api-access-jkvrj\") pod \"cert-manager-5b446d88c5-s86t5\" (UID: \"d8389967-5803-4508-8abf-572bb2024d84\") " pod="cert-manager/cert-manager-5b446d88c5-s86t5" Oct 01 15:12:59 crc kubenswrapper[4869]: I1001 15:12:59.399121 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4hph2\" (UniqueName: \"kubernetes.io/projected/ccc7b391-9817-440d-a687-496243444ae9-kube-api-access-4hph2\") pod \"cert-manager-webhook-5655c58dd6-jpszr\" (UID: \"ccc7b391-9817-440d-a687-496243444ae9\") " pod="cert-manager/cert-manager-webhook-5655c58dd6-jpszr" Oct 01 15:12:59 crc kubenswrapper[4869]: I1001 15:12:59.415747 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jkvrj\" (UniqueName: \"kubernetes.io/projected/d8389967-5803-4508-8abf-572bb2024d84-kube-api-access-jkvrj\") pod \"cert-manager-5b446d88c5-s86t5\" (UID: \"d8389967-5803-4508-8abf-572bb2024d84\") " pod="cert-manager/cert-manager-5b446d88c5-s86t5" Oct 01 15:12:59 crc kubenswrapper[4869]: I1001 15:12:59.418047 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4hph2\" (UniqueName: \"kubernetes.io/projected/ccc7b391-9817-440d-a687-496243444ae9-kube-api-access-4hph2\") pod \"cert-manager-webhook-5655c58dd6-jpszr\" (UID: \"ccc7b391-9817-440d-a687-496243444ae9\") " pod="cert-manager/cert-manager-webhook-5655c58dd6-jpszr" Oct 01 15:12:59 crc kubenswrapper[4869]: I1001 15:12:59.481572 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-cainjector-7f985d654d-zjw2v" Oct 01 15:12:59 crc kubenswrapper[4869]: I1001 15:12:59.525822 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-5b446d88c5-s86t5" Oct 01 15:12:59 crc kubenswrapper[4869]: I1001 15:12:59.564766 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-webhook-5655c58dd6-jpszr" Oct 01 15:12:59 crc kubenswrapper[4869]: I1001 15:12:59.701057 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-cainjector-7f985d654d-zjw2v"] Oct 01 15:12:59 crc kubenswrapper[4869]: W1001 15:12:59.716747 4869 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod84af5e11_443d_4eae_b1cb_70397019f8f5.slice/crio-9761bf3364ae24a36559977b24ba40b01f776c3fb61a339fb0d29bd78a51459e WatchSource:0}: Error finding container 9761bf3364ae24a36559977b24ba40b01f776c3fb61a339fb0d29bd78a51459e: Status 404 returned error can't find the container with id 9761bf3364ae24a36559977b24ba40b01f776c3fb61a339fb0d29bd78a51459e Oct 01 15:12:59 crc kubenswrapper[4869]: I1001 15:12:59.718984 4869 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Oct 01 15:12:59 crc kubenswrapper[4869]: I1001 15:12:59.806492 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-webhook-5655c58dd6-jpszr"] Oct 01 15:12:59 crc kubenswrapper[4869]: W1001 15:12:59.809419 4869 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podccc7b391_9817_440d_a687_496243444ae9.slice/crio-e955f87a013442500d5043c734b4f08222cafbdb2fccf92afdd367036b38c9f9 WatchSource:0}: Error finding container e955f87a013442500d5043c734b4f08222cafbdb2fccf92afdd367036b38c9f9: Status 404 returned error can't find the container with id e955f87a013442500d5043c734b4f08222cafbdb2fccf92afdd367036b38c9f9 Oct 01 15:12:59 crc kubenswrapper[4869]: I1001 15:12:59.967387 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-5b446d88c5-s86t5"] Oct 01 15:12:59 crc kubenswrapper[4869]: W1001 15:12:59.973700 4869 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podd8389967_5803_4508_8abf_572bb2024d84.slice/crio-010a1440f2163b8a445648a6645120f83f30df6967731b659236a8bc5c14911e WatchSource:0}: Error finding container 010a1440f2163b8a445648a6645120f83f30df6967731b659236a8bc5c14911e: Status 404 returned error can't find the container with id 010a1440f2163b8a445648a6645120f83f30df6967731b659236a8bc5c14911e Oct 01 15:13:00 crc kubenswrapper[4869]: I1001 15:13:00.139931 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-cainjector-7f985d654d-zjw2v" event={"ID":"84af5e11-443d-4eae-b1cb-70397019f8f5","Type":"ContainerStarted","Data":"9761bf3364ae24a36559977b24ba40b01f776c3fb61a339fb0d29bd78a51459e"} Oct 01 15:13:00 crc kubenswrapper[4869]: I1001 15:13:00.141287 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-5b446d88c5-s86t5" event={"ID":"d8389967-5803-4508-8abf-572bb2024d84","Type":"ContainerStarted","Data":"010a1440f2163b8a445648a6645120f83f30df6967731b659236a8bc5c14911e"} Oct 01 15:13:00 crc kubenswrapper[4869]: I1001 15:13:00.142497 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-webhook-5655c58dd6-jpszr" event={"ID":"ccc7b391-9817-440d-a687-496243444ae9","Type":"ContainerStarted","Data":"e955f87a013442500d5043c734b4f08222cafbdb2fccf92afdd367036b38c9f9"} Oct 01 15:13:04 crc kubenswrapper[4869]: I1001 15:13:04.189828 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-cainjector-7f985d654d-zjw2v" event={"ID":"84af5e11-443d-4eae-b1cb-70397019f8f5","Type":"ContainerStarted","Data":"b360f278cab51c649f43b924757c030bb6fb861c167ee10d4d0b0821f3b66562"} Oct 01 15:13:04 crc kubenswrapper[4869]: I1001 15:13:04.191409 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-5b446d88c5-s86t5" event={"ID":"d8389967-5803-4508-8abf-572bb2024d84","Type":"ContainerStarted","Data":"73a9f5e40d522f591fff646b9f64f12b51e0008835994da187c1cd1741b59d5b"} Oct 01 15:13:04 crc kubenswrapper[4869]: I1001 15:13:04.192704 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-webhook-5655c58dd6-jpszr" event={"ID":"ccc7b391-9817-440d-a687-496243444ae9","Type":"ContainerStarted","Data":"958f621db2bd14942c295b58cce89f4b2fcadf0f9125ffd938dac784f47c842c"} Oct 01 15:13:04 crc kubenswrapper[4869]: I1001 15:13:04.192891 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="cert-manager/cert-manager-webhook-5655c58dd6-jpszr" Oct 01 15:13:04 crc kubenswrapper[4869]: I1001 15:13:04.202204 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="cert-manager/cert-manager-cainjector-7f985d654d-zjw2v" podStartSLOduration=1.697064502 podStartE2EDuration="5.202188421s" podCreationTimestamp="2025-10-01 15:12:59 +0000 UTC" firstStartedPulling="2025-10-01 15:12:59.71873221 +0000 UTC m=+488.865575326" lastFinishedPulling="2025-10-01 15:13:03.223856119 +0000 UTC m=+492.370699245" observedRunningTime="2025-10-01 15:13:04.201067533 +0000 UTC m=+493.347910649" watchObservedRunningTime="2025-10-01 15:13:04.202188421 +0000 UTC m=+493.349031537" Oct 01 15:13:04 crc kubenswrapper[4869]: I1001 15:13:04.242241 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="cert-manager/cert-manager-5b446d88c5-s86t5" podStartSLOduration=1.935755149 podStartE2EDuration="5.242220524s" podCreationTimestamp="2025-10-01 15:12:59 +0000 UTC" firstStartedPulling="2025-10-01 15:12:59.976230322 +0000 UTC m=+489.123073448" lastFinishedPulling="2025-10-01 15:13:03.282695697 +0000 UTC m=+492.429538823" observedRunningTime="2025-10-01 15:13:04.21402677 +0000 UTC m=+493.360869886" watchObservedRunningTime="2025-10-01 15:13:04.242220524 +0000 UTC m=+493.389063660" Oct 01 15:13:04 crc kubenswrapper[4869]: I1001 15:13:04.243134 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="cert-manager/cert-manager-webhook-5655c58dd6-jpszr" podStartSLOduration=1.75908617 podStartE2EDuration="5.243127306s" podCreationTimestamp="2025-10-01 15:12:59 +0000 UTC" firstStartedPulling="2025-10-01 15:12:59.811212468 +0000 UTC m=+488.958055584" lastFinishedPulling="2025-10-01 15:13:03.295253594 +0000 UTC m=+492.442096720" observedRunningTime="2025-10-01 15:13:04.238997202 +0000 UTC m=+493.385840318" watchObservedRunningTime="2025-10-01 15:13:04.243127306 +0000 UTC m=+493.389970432" Oct 01 15:13:09 crc kubenswrapper[4869]: I1001 15:13:09.569121 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="cert-manager/cert-manager-webhook-5655c58dd6-jpszr" Oct 01 15:13:09 crc kubenswrapper[4869]: I1001 15:13:09.891978 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-27gqg"] Oct 01 15:13:10 crc kubenswrapper[4869]: I1001 15:13:10.230071 4869 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-27gqg" podUID="ebbefc55-bef9-4a03-a065-321bff3a75b4" containerName="ovn-controller" containerID="cri-o://ce48fdbad0049809bac4c2afd94d87133b935841c6a3805f8eacc26dd8527c62" gracePeriod=30 Oct 01 15:13:10 crc kubenswrapper[4869]: I1001 15:13:10.230110 4869 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-27gqg" podUID="ebbefc55-bef9-4a03-a065-321bff3a75b4" containerName="nbdb" containerID="cri-o://7afd925f4b10ef56213c985a7b92c49d60a62f9be8ade831c42641100327ea40" gracePeriod=30 Oct 01 15:13:10 crc kubenswrapper[4869]: I1001 15:13:10.230209 4869 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-27gqg" podUID="ebbefc55-bef9-4a03-a065-321bff3a75b4" containerName="northd" containerID="cri-o://7811ddaf0b31f53a6bc23e82dfa3a4bb5f9fa408b1f2e22dff3f71e9de8b00d2" gracePeriod=30 Oct 01 15:13:10 crc kubenswrapper[4869]: I1001 15:13:10.230246 4869 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-27gqg" podUID="ebbefc55-bef9-4a03-a065-321bff3a75b4" containerName="ovn-acl-logging" containerID="cri-o://c5f583b0b94050424796b550a344f0c6069c1608629fdd01ddfdb303add0d5f7" gracePeriod=30 Oct 01 15:13:10 crc kubenswrapper[4869]: I1001 15:13:10.230251 4869 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-27gqg" podUID="ebbefc55-bef9-4a03-a065-321bff3a75b4" containerName="kube-rbac-proxy-node" containerID="cri-o://c59f9083cd75c8bee85596c2a178cb931de40a37dd459433dd2c23d5b61d95f0" gracePeriod=30 Oct 01 15:13:10 crc kubenswrapper[4869]: I1001 15:13:10.230358 4869 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-27gqg" podUID="ebbefc55-bef9-4a03-a065-321bff3a75b4" containerName="sbdb" containerID="cri-o://59471b38e240e2b9a2ccfc1dee734c077c4f09fa10e0b55964d0fe73cd8af5a1" gracePeriod=30 Oct 01 15:13:10 crc kubenswrapper[4869]: I1001 15:13:10.230750 4869 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-27gqg" podUID="ebbefc55-bef9-4a03-a065-321bff3a75b4" containerName="kube-rbac-proxy-ovn-metrics" containerID="cri-o://e332dd7b45911e52db6ad9d474be312012028a9def590871b4d2a7a735c80ff0" gracePeriod=30 Oct 01 15:13:10 crc kubenswrapper[4869]: I1001 15:13:10.262443 4869 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-27gqg" podUID="ebbefc55-bef9-4a03-a065-321bff3a75b4" containerName="ovnkube-controller" containerID="cri-o://b3afd39d18311bca00ad07f623d9d405a8cd60f6d29fc32c2bf6509b450160dd" gracePeriod=30 Oct 01 15:13:10 crc kubenswrapper[4869]: I1001 15:13:10.578133 4869 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-27gqg_ebbefc55-bef9-4a03-a065-321bff3a75b4/ovn-acl-logging/0.log" Oct 01 15:13:10 crc kubenswrapper[4869]: I1001 15:13:10.579514 4869 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-27gqg_ebbefc55-bef9-4a03-a065-321bff3a75b4/ovn-controller/0.log" Oct 01 15:13:10 crc kubenswrapper[4869]: I1001 15:13:10.580413 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-27gqg" Oct 01 15:13:10 crc kubenswrapper[4869]: I1001 15:13:10.651793 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/ebbefc55-bef9-4a03-a065-321bff3a75b4-run-ovn\") pod \"ebbefc55-bef9-4a03-a065-321bff3a75b4\" (UID: \"ebbefc55-bef9-4a03-a065-321bff3a75b4\") " Oct 01 15:13:10 crc kubenswrapper[4869]: I1001 15:13:10.651848 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/ebbefc55-bef9-4a03-a065-321bff3a75b4-host-run-ovn-kubernetes\") pod \"ebbefc55-bef9-4a03-a065-321bff3a75b4\" (UID: \"ebbefc55-bef9-4a03-a065-321bff3a75b4\") " Oct 01 15:13:10 crc kubenswrapper[4869]: I1001 15:13:10.651871 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/ebbefc55-bef9-4a03-a065-321bff3a75b4-run-openvswitch\") pod \"ebbefc55-bef9-4a03-a065-321bff3a75b4\" (UID: \"ebbefc55-bef9-4a03-a065-321bff3a75b4\") " Oct 01 15:13:10 crc kubenswrapper[4869]: I1001 15:13:10.651898 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/ebbefc55-bef9-4a03-a065-321bff3a75b4-host-kubelet\") pod \"ebbefc55-bef9-4a03-a065-321bff3a75b4\" (UID: \"ebbefc55-bef9-4a03-a065-321bff3a75b4\") " Oct 01 15:13:10 crc kubenswrapper[4869]: I1001 15:13:10.651895 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/ebbefc55-bef9-4a03-a065-321bff3a75b4-run-ovn" (OuterVolumeSpecName: "run-ovn") pod "ebbefc55-bef9-4a03-a065-321bff3a75b4" (UID: "ebbefc55-bef9-4a03-a065-321bff3a75b4"). InnerVolumeSpecName "run-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 01 15:13:10 crc kubenswrapper[4869]: I1001 15:13:10.651928 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/ebbefc55-bef9-4a03-a065-321bff3a75b4-ovnkube-config\") pod \"ebbefc55-bef9-4a03-a065-321bff3a75b4\" (UID: \"ebbefc55-bef9-4a03-a065-321bff3a75b4\") " Oct 01 15:13:10 crc kubenswrapper[4869]: I1001 15:13:10.651956 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-v9mfd\" (UniqueName: \"kubernetes.io/projected/ebbefc55-bef9-4a03-a065-321bff3a75b4-kube-api-access-v9mfd\") pod \"ebbefc55-bef9-4a03-a065-321bff3a75b4\" (UID: \"ebbefc55-bef9-4a03-a065-321bff3a75b4\") " Oct 01 15:13:10 crc kubenswrapper[4869]: I1001 15:13:10.651977 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/ebbefc55-bef9-4a03-a065-321bff3a75b4-node-log\") pod \"ebbefc55-bef9-4a03-a065-321bff3a75b4\" (UID: \"ebbefc55-bef9-4a03-a065-321bff3a75b4\") " Oct 01 15:13:10 crc kubenswrapper[4869]: I1001 15:13:10.652011 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/ebbefc55-bef9-4a03-a065-321bff3a75b4-systemd-units\") pod \"ebbefc55-bef9-4a03-a065-321bff3a75b4\" (UID: \"ebbefc55-bef9-4a03-a065-321bff3a75b4\") " Oct 01 15:13:10 crc kubenswrapper[4869]: I1001 15:13:10.652032 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/ebbefc55-bef9-4a03-a065-321bff3a75b4-ovn-node-metrics-cert\") pod \"ebbefc55-bef9-4a03-a065-321bff3a75b4\" (UID: \"ebbefc55-bef9-4a03-a065-321bff3a75b4\") " Oct 01 15:13:10 crc kubenswrapper[4869]: I1001 15:13:10.652068 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/ebbefc55-bef9-4a03-a065-321bff3a75b4-log-socket\") pod \"ebbefc55-bef9-4a03-a065-321bff3a75b4\" (UID: \"ebbefc55-bef9-4a03-a065-321bff3a75b4\") " Oct 01 15:13:10 crc kubenswrapper[4869]: I1001 15:13:10.652134 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/ebbefc55-bef9-4a03-a065-321bff3a75b4-ovnkube-script-lib\") pod \"ebbefc55-bef9-4a03-a065-321bff3a75b4\" (UID: \"ebbefc55-bef9-4a03-a065-321bff3a75b4\") " Oct 01 15:13:10 crc kubenswrapper[4869]: I1001 15:13:10.652156 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/ebbefc55-bef9-4a03-a065-321bff3a75b4-host-cni-netd\") pod \"ebbefc55-bef9-4a03-a065-321bff3a75b4\" (UID: \"ebbefc55-bef9-4a03-a065-321bff3a75b4\") " Oct 01 15:13:10 crc kubenswrapper[4869]: I1001 15:13:10.652194 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/ebbefc55-bef9-4a03-a065-321bff3a75b4-env-overrides\") pod \"ebbefc55-bef9-4a03-a065-321bff3a75b4\" (UID: \"ebbefc55-bef9-4a03-a065-321bff3a75b4\") " Oct 01 15:13:10 crc kubenswrapper[4869]: I1001 15:13:10.652229 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/ebbefc55-bef9-4a03-a065-321bff3a75b4-host-slash\") pod \"ebbefc55-bef9-4a03-a065-321bff3a75b4\" (UID: \"ebbefc55-bef9-4a03-a065-321bff3a75b4\") " Oct 01 15:13:10 crc kubenswrapper[4869]: I1001 15:13:10.652248 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/ebbefc55-bef9-4a03-a065-321bff3a75b4-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ebbefc55-bef9-4a03-a065-321bff3a75b4\" (UID: \"ebbefc55-bef9-4a03-a065-321bff3a75b4\") " Oct 01 15:13:10 crc kubenswrapper[4869]: I1001 15:13:10.652298 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/ebbefc55-bef9-4a03-a065-321bff3a75b4-host-run-netns\") pod \"ebbefc55-bef9-4a03-a065-321bff3a75b4\" (UID: \"ebbefc55-bef9-4a03-a065-321bff3a75b4\") " Oct 01 15:13:10 crc kubenswrapper[4869]: I1001 15:13:10.652319 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/ebbefc55-bef9-4a03-a065-321bff3a75b4-run-systemd\") pod \"ebbefc55-bef9-4a03-a065-321bff3a75b4\" (UID: \"ebbefc55-bef9-4a03-a065-321bff3a75b4\") " Oct 01 15:13:10 crc kubenswrapper[4869]: I1001 15:13:10.652344 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/ebbefc55-bef9-4a03-a065-321bff3a75b4-host-cni-bin\") pod \"ebbefc55-bef9-4a03-a065-321bff3a75b4\" (UID: \"ebbefc55-bef9-4a03-a065-321bff3a75b4\") " Oct 01 15:13:10 crc kubenswrapper[4869]: I1001 15:13:10.652366 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/ebbefc55-bef9-4a03-a065-321bff3a75b4-var-lib-openvswitch\") pod \"ebbefc55-bef9-4a03-a065-321bff3a75b4\" (UID: \"ebbefc55-bef9-4a03-a065-321bff3a75b4\") " Oct 01 15:13:10 crc kubenswrapper[4869]: I1001 15:13:10.652390 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/ebbefc55-bef9-4a03-a065-321bff3a75b4-etc-openvswitch\") pod \"ebbefc55-bef9-4a03-a065-321bff3a75b4\" (UID: \"ebbefc55-bef9-4a03-a065-321bff3a75b4\") " Oct 01 15:13:10 crc kubenswrapper[4869]: I1001 15:13:10.652696 4869 reconciler_common.go:293] "Volume detached for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/ebbefc55-bef9-4a03-a065-321bff3a75b4-run-ovn\") on node \"crc\" DevicePath \"\"" Oct 01 15:13:10 crc kubenswrapper[4869]: I1001 15:13:10.651973 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/ebbefc55-bef9-4a03-a065-321bff3a75b4-host-kubelet" (OuterVolumeSpecName: "host-kubelet") pod "ebbefc55-bef9-4a03-a065-321bff3a75b4" (UID: "ebbefc55-bef9-4a03-a065-321bff3a75b4"). InnerVolumeSpecName "host-kubelet". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 01 15:13:10 crc kubenswrapper[4869]: I1001 15:13:10.651985 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/ebbefc55-bef9-4a03-a065-321bff3a75b4-host-run-ovn-kubernetes" (OuterVolumeSpecName: "host-run-ovn-kubernetes") pod "ebbefc55-bef9-4a03-a065-321bff3a75b4" (UID: "ebbefc55-bef9-4a03-a065-321bff3a75b4"). InnerVolumeSpecName "host-run-ovn-kubernetes". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 01 15:13:10 crc kubenswrapper[4869]: I1001 15:13:10.652023 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/ebbefc55-bef9-4a03-a065-321bff3a75b4-node-log" (OuterVolumeSpecName: "node-log") pod "ebbefc55-bef9-4a03-a065-321bff3a75b4" (UID: "ebbefc55-bef9-4a03-a065-321bff3a75b4"). InnerVolumeSpecName "node-log". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 01 15:13:10 crc kubenswrapper[4869]: I1001 15:13:10.652011 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/ebbefc55-bef9-4a03-a065-321bff3a75b4-run-openvswitch" (OuterVolumeSpecName: "run-openvswitch") pod "ebbefc55-bef9-4a03-a065-321bff3a75b4" (UID: "ebbefc55-bef9-4a03-a065-321bff3a75b4"). InnerVolumeSpecName "run-openvswitch". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 01 15:13:10 crc kubenswrapper[4869]: I1001 15:13:10.652062 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/ebbefc55-bef9-4a03-a065-321bff3a75b4-systemd-units" (OuterVolumeSpecName: "systemd-units") pod "ebbefc55-bef9-4a03-a065-321bff3a75b4" (UID: "ebbefc55-bef9-4a03-a065-321bff3a75b4"). InnerVolumeSpecName "systemd-units". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 01 15:13:10 crc kubenswrapper[4869]: I1001 15:13:10.652592 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ebbefc55-bef9-4a03-a065-321bff3a75b4-ovnkube-config" (OuterVolumeSpecName: "ovnkube-config") pod "ebbefc55-bef9-4a03-a065-321bff3a75b4" (UID: "ebbefc55-bef9-4a03-a065-321bff3a75b4"). InnerVolumeSpecName "ovnkube-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 15:13:10 crc kubenswrapper[4869]: I1001 15:13:10.652966 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/ebbefc55-bef9-4a03-a065-321bff3a75b4-host-cni-netd" (OuterVolumeSpecName: "host-cni-netd") pod "ebbefc55-bef9-4a03-a065-321bff3a75b4" (UID: "ebbefc55-bef9-4a03-a065-321bff3a75b4"). InnerVolumeSpecName "host-cni-netd". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 01 15:13:10 crc kubenswrapper[4869]: I1001 15:13:10.653039 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/ebbefc55-bef9-4a03-a065-321bff3a75b4-etc-openvswitch" (OuterVolumeSpecName: "etc-openvswitch") pod "ebbefc55-bef9-4a03-a065-321bff3a75b4" (UID: "ebbefc55-bef9-4a03-a065-321bff3a75b4"). InnerVolumeSpecName "etc-openvswitch". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 01 15:13:10 crc kubenswrapper[4869]: I1001 15:13:10.653069 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/ebbefc55-bef9-4a03-a065-321bff3a75b4-log-socket" (OuterVolumeSpecName: "log-socket") pod "ebbefc55-bef9-4a03-a065-321bff3a75b4" (UID: "ebbefc55-bef9-4a03-a065-321bff3a75b4"). InnerVolumeSpecName "log-socket". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 01 15:13:10 crc kubenswrapper[4869]: I1001 15:13:10.653136 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/ebbefc55-bef9-4a03-a065-321bff3a75b4-host-slash" (OuterVolumeSpecName: "host-slash") pod "ebbefc55-bef9-4a03-a065-321bff3a75b4" (UID: "ebbefc55-bef9-4a03-a065-321bff3a75b4"). InnerVolumeSpecName "host-slash". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 01 15:13:10 crc kubenswrapper[4869]: I1001 15:13:10.653456 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ebbefc55-bef9-4a03-a065-321bff3a75b4-ovnkube-script-lib" (OuterVolumeSpecName: "ovnkube-script-lib") pod "ebbefc55-bef9-4a03-a065-321bff3a75b4" (UID: "ebbefc55-bef9-4a03-a065-321bff3a75b4"). InnerVolumeSpecName "ovnkube-script-lib". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 15:13:10 crc kubenswrapper[4869]: I1001 15:13:10.653505 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/ebbefc55-bef9-4a03-a065-321bff3a75b4-host-cni-bin" (OuterVolumeSpecName: "host-cni-bin") pod "ebbefc55-bef9-4a03-a065-321bff3a75b4" (UID: "ebbefc55-bef9-4a03-a065-321bff3a75b4"). InnerVolumeSpecName "host-cni-bin". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 01 15:13:10 crc kubenswrapper[4869]: I1001 15:13:10.657574 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/ebbefc55-bef9-4a03-a065-321bff3a75b4-var-lib-openvswitch" (OuterVolumeSpecName: "var-lib-openvswitch") pod "ebbefc55-bef9-4a03-a065-321bff3a75b4" (UID: "ebbefc55-bef9-4a03-a065-321bff3a75b4"). InnerVolumeSpecName "var-lib-openvswitch". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 01 15:13:10 crc kubenswrapper[4869]: I1001 15:13:10.657745 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ebbefc55-bef9-4a03-a065-321bff3a75b4-env-overrides" (OuterVolumeSpecName: "env-overrides") pod "ebbefc55-bef9-4a03-a065-321bff3a75b4" (UID: "ebbefc55-bef9-4a03-a065-321bff3a75b4"). InnerVolumeSpecName "env-overrides". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 15:13:10 crc kubenswrapper[4869]: I1001 15:13:10.658372 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ebbefc55-bef9-4a03-a065-321bff3a75b4-kube-api-access-v9mfd" (OuterVolumeSpecName: "kube-api-access-v9mfd") pod "ebbefc55-bef9-4a03-a065-321bff3a75b4" (UID: "ebbefc55-bef9-4a03-a065-321bff3a75b4"). InnerVolumeSpecName "kube-api-access-v9mfd". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 15:13:10 crc kubenswrapper[4869]: I1001 15:13:10.658589 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ebbefc55-bef9-4a03-a065-321bff3a75b4-ovn-node-metrics-cert" (OuterVolumeSpecName: "ovn-node-metrics-cert") pod "ebbefc55-bef9-4a03-a065-321bff3a75b4" (UID: "ebbefc55-bef9-4a03-a065-321bff3a75b4"). InnerVolumeSpecName "ovn-node-metrics-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 15:13:10 crc kubenswrapper[4869]: I1001 15:13:10.658826 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/ebbefc55-bef9-4a03-a065-321bff3a75b4-host-var-lib-cni-networks-ovn-kubernetes" (OuterVolumeSpecName: "host-var-lib-cni-networks-ovn-kubernetes") pod "ebbefc55-bef9-4a03-a065-321bff3a75b4" (UID: "ebbefc55-bef9-4a03-a065-321bff3a75b4"). InnerVolumeSpecName "host-var-lib-cni-networks-ovn-kubernetes". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 01 15:13:10 crc kubenswrapper[4869]: I1001 15:13:10.658870 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/ebbefc55-bef9-4a03-a065-321bff3a75b4-host-run-netns" (OuterVolumeSpecName: "host-run-netns") pod "ebbefc55-bef9-4a03-a065-321bff3a75b4" (UID: "ebbefc55-bef9-4a03-a065-321bff3a75b4"). InnerVolumeSpecName "host-run-netns". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 01 15:13:10 crc kubenswrapper[4869]: I1001 15:13:10.662378 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-hwptl"] Oct 01 15:13:10 crc kubenswrapper[4869]: E1001 15:13:10.662880 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ebbefc55-bef9-4a03-a065-321bff3a75b4" containerName="ovn-controller" Oct 01 15:13:10 crc kubenswrapper[4869]: I1001 15:13:10.662904 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="ebbefc55-bef9-4a03-a065-321bff3a75b4" containerName="ovn-controller" Oct 01 15:13:10 crc kubenswrapper[4869]: E1001 15:13:10.662925 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ebbefc55-bef9-4a03-a065-321bff3a75b4" containerName="sbdb" Oct 01 15:13:10 crc kubenswrapper[4869]: I1001 15:13:10.662936 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="ebbefc55-bef9-4a03-a065-321bff3a75b4" containerName="sbdb" Oct 01 15:13:10 crc kubenswrapper[4869]: E1001 15:13:10.662955 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ebbefc55-bef9-4a03-a065-321bff3a75b4" containerName="kube-rbac-proxy-ovn-metrics" Oct 01 15:13:10 crc kubenswrapper[4869]: I1001 15:13:10.662963 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="ebbefc55-bef9-4a03-a065-321bff3a75b4" containerName="kube-rbac-proxy-ovn-metrics" Oct 01 15:13:10 crc kubenswrapper[4869]: E1001 15:13:10.662975 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ebbefc55-bef9-4a03-a065-321bff3a75b4" containerName="kubecfg-setup" Oct 01 15:13:10 crc kubenswrapper[4869]: I1001 15:13:10.662983 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="ebbefc55-bef9-4a03-a065-321bff3a75b4" containerName="kubecfg-setup" Oct 01 15:13:10 crc kubenswrapper[4869]: E1001 15:13:10.662996 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ebbefc55-bef9-4a03-a065-321bff3a75b4" containerName="kube-rbac-proxy-node" Oct 01 15:13:10 crc kubenswrapper[4869]: I1001 15:13:10.663005 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="ebbefc55-bef9-4a03-a065-321bff3a75b4" containerName="kube-rbac-proxy-node" Oct 01 15:13:10 crc kubenswrapper[4869]: E1001 15:13:10.663025 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ebbefc55-bef9-4a03-a065-321bff3a75b4" containerName="northd" Oct 01 15:13:10 crc kubenswrapper[4869]: I1001 15:13:10.663035 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="ebbefc55-bef9-4a03-a065-321bff3a75b4" containerName="northd" Oct 01 15:13:10 crc kubenswrapper[4869]: E1001 15:13:10.663051 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ebbefc55-bef9-4a03-a065-321bff3a75b4" containerName="ovnkube-controller" Oct 01 15:13:10 crc kubenswrapper[4869]: I1001 15:13:10.663061 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="ebbefc55-bef9-4a03-a065-321bff3a75b4" containerName="ovnkube-controller" Oct 01 15:13:10 crc kubenswrapper[4869]: E1001 15:13:10.663082 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ebbefc55-bef9-4a03-a065-321bff3a75b4" containerName="ovn-acl-logging" Oct 01 15:13:10 crc kubenswrapper[4869]: I1001 15:13:10.663095 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="ebbefc55-bef9-4a03-a065-321bff3a75b4" containerName="ovn-acl-logging" Oct 01 15:13:10 crc kubenswrapper[4869]: E1001 15:13:10.663105 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ebbefc55-bef9-4a03-a065-321bff3a75b4" containerName="nbdb" Oct 01 15:13:10 crc kubenswrapper[4869]: I1001 15:13:10.663114 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="ebbefc55-bef9-4a03-a065-321bff3a75b4" containerName="nbdb" Oct 01 15:13:10 crc kubenswrapper[4869]: I1001 15:13:10.663598 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="ebbefc55-bef9-4a03-a065-321bff3a75b4" containerName="ovn-acl-logging" Oct 01 15:13:10 crc kubenswrapper[4869]: I1001 15:13:10.663614 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="ebbefc55-bef9-4a03-a065-321bff3a75b4" containerName="ovnkube-controller" Oct 01 15:13:10 crc kubenswrapper[4869]: I1001 15:13:10.663629 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="ebbefc55-bef9-4a03-a065-321bff3a75b4" containerName="kube-rbac-proxy-ovn-metrics" Oct 01 15:13:10 crc kubenswrapper[4869]: I1001 15:13:10.663638 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="ebbefc55-bef9-4a03-a065-321bff3a75b4" containerName="sbdb" Oct 01 15:13:10 crc kubenswrapper[4869]: I1001 15:13:10.663652 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="ebbefc55-bef9-4a03-a065-321bff3a75b4" containerName="northd" Oct 01 15:13:10 crc kubenswrapper[4869]: I1001 15:13:10.663665 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="ebbefc55-bef9-4a03-a065-321bff3a75b4" containerName="ovn-controller" Oct 01 15:13:10 crc kubenswrapper[4869]: I1001 15:13:10.663672 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="ebbefc55-bef9-4a03-a065-321bff3a75b4" containerName="kube-rbac-proxy-node" Oct 01 15:13:10 crc kubenswrapper[4869]: I1001 15:13:10.663690 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="ebbefc55-bef9-4a03-a065-321bff3a75b4" containerName="nbdb" Oct 01 15:13:10 crc kubenswrapper[4869]: I1001 15:13:10.670221 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-hwptl" Oct 01 15:13:10 crc kubenswrapper[4869]: I1001 15:13:10.670548 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/ebbefc55-bef9-4a03-a065-321bff3a75b4-run-systemd" (OuterVolumeSpecName: "run-systemd") pod "ebbefc55-bef9-4a03-a065-321bff3a75b4" (UID: "ebbefc55-bef9-4a03-a065-321bff3a75b4"). InnerVolumeSpecName "run-systemd". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 01 15:13:10 crc kubenswrapper[4869]: I1001 15:13:10.754185 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/e59dd056-ee8a-4b8b-af8a-f2d17bb64328-ovnkube-config\") pod \"ovnkube-node-hwptl\" (UID: \"e59dd056-ee8a-4b8b-af8a-f2d17bb64328\") " pod="openshift-ovn-kubernetes/ovnkube-node-hwptl" Oct 01 15:13:10 crc kubenswrapper[4869]: I1001 15:13:10.754229 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/e59dd056-ee8a-4b8b-af8a-f2d17bb64328-run-systemd\") pod \"ovnkube-node-hwptl\" (UID: \"e59dd056-ee8a-4b8b-af8a-f2d17bb64328\") " pod="openshift-ovn-kubernetes/ovnkube-node-hwptl" Oct 01 15:13:10 crc kubenswrapper[4869]: I1001 15:13:10.754268 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/e59dd056-ee8a-4b8b-af8a-f2d17bb64328-host-run-ovn-kubernetes\") pod \"ovnkube-node-hwptl\" (UID: \"e59dd056-ee8a-4b8b-af8a-f2d17bb64328\") " pod="openshift-ovn-kubernetes/ovnkube-node-hwptl" Oct 01 15:13:10 crc kubenswrapper[4869]: I1001 15:13:10.754295 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/e59dd056-ee8a-4b8b-af8a-f2d17bb64328-node-log\") pod \"ovnkube-node-hwptl\" (UID: \"e59dd056-ee8a-4b8b-af8a-f2d17bb64328\") " pod="openshift-ovn-kubernetes/ovnkube-node-hwptl" Oct 01 15:13:10 crc kubenswrapper[4869]: I1001 15:13:10.754317 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/e59dd056-ee8a-4b8b-af8a-f2d17bb64328-env-overrides\") pod \"ovnkube-node-hwptl\" (UID: \"e59dd056-ee8a-4b8b-af8a-f2d17bb64328\") " pod="openshift-ovn-kubernetes/ovnkube-node-hwptl" Oct 01 15:13:10 crc kubenswrapper[4869]: I1001 15:13:10.754337 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/e59dd056-ee8a-4b8b-af8a-f2d17bb64328-ovn-node-metrics-cert\") pod \"ovnkube-node-hwptl\" (UID: \"e59dd056-ee8a-4b8b-af8a-f2d17bb64328\") " pod="openshift-ovn-kubernetes/ovnkube-node-hwptl" Oct 01 15:13:10 crc kubenswrapper[4869]: I1001 15:13:10.754353 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/e59dd056-ee8a-4b8b-af8a-f2d17bb64328-ovnkube-script-lib\") pod \"ovnkube-node-hwptl\" (UID: \"e59dd056-ee8a-4b8b-af8a-f2d17bb64328\") " pod="openshift-ovn-kubernetes/ovnkube-node-hwptl" Oct 01 15:13:10 crc kubenswrapper[4869]: I1001 15:13:10.754434 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/e59dd056-ee8a-4b8b-af8a-f2d17bb64328-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-hwptl\" (UID: \"e59dd056-ee8a-4b8b-af8a-f2d17bb64328\") " pod="openshift-ovn-kubernetes/ovnkube-node-hwptl" Oct 01 15:13:10 crc kubenswrapper[4869]: I1001 15:13:10.754508 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/e59dd056-ee8a-4b8b-af8a-f2d17bb64328-var-lib-openvswitch\") pod \"ovnkube-node-hwptl\" (UID: \"e59dd056-ee8a-4b8b-af8a-f2d17bb64328\") " pod="openshift-ovn-kubernetes/ovnkube-node-hwptl" Oct 01 15:13:10 crc kubenswrapper[4869]: I1001 15:13:10.754545 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/e59dd056-ee8a-4b8b-af8a-f2d17bb64328-run-openvswitch\") pod \"ovnkube-node-hwptl\" (UID: \"e59dd056-ee8a-4b8b-af8a-f2d17bb64328\") " pod="openshift-ovn-kubernetes/ovnkube-node-hwptl" Oct 01 15:13:10 crc kubenswrapper[4869]: I1001 15:13:10.754564 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/e59dd056-ee8a-4b8b-af8a-f2d17bb64328-host-slash\") pod \"ovnkube-node-hwptl\" (UID: \"e59dd056-ee8a-4b8b-af8a-f2d17bb64328\") " pod="openshift-ovn-kubernetes/ovnkube-node-hwptl" Oct 01 15:13:10 crc kubenswrapper[4869]: I1001 15:13:10.754578 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/e59dd056-ee8a-4b8b-af8a-f2d17bb64328-etc-openvswitch\") pod \"ovnkube-node-hwptl\" (UID: \"e59dd056-ee8a-4b8b-af8a-f2d17bb64328\") " pod="openshift-ovn-kubernetes/ovnkube-node-hwptl" Oct 01 15:13:10 crc kubenswrapper[4869]: I1001 15:13:10.754604 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/e59dd056-ee8a-4b8b-af8a-f2d17bb64328-host-kubelet\") pod \"ovnkube-node-hwptl\" (UID: \"e59dd056-ee8a-4b8b-af8a-f2d17bb64328\") " pod="openshift-ovn-kubernetes/ovnkube-node-hwptl" Oct 01 15:13:10 crc kubenswrapper[4869]: I1001 15:13:10.754619 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/e59dd056-ee8a-4b8b-af8a-f2d17bb64328-host-cni-netd\") pod \"ovnkube-node-hwptl\" (UID: \"e59dd056-ee8a-4b8b-af8a-f2d17bb64328\") " pod="openshift-ovn-kubernetes/ovnkube-node-hwptl" Oct 01 15:13:10 crc kubenswrapper[4869]: I1001 15:13:10.754638 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hrkr8\" (UniqueName: \"kubernetes.io/projected/e59dd056-ee8a-4b8b-af8a-f2d17bb64328-kube-api-access-hrkr8\") pod \"ovnkube-node-hwptl\" (UID: \"e59dd056-ee8a-4b8b-af8a-f2d17bb64328\") " pod="openshift-ovn-kubernetes/ovnkube-node-hwptl" Oct 01 15:13:10 crc kubenswrapper[4869]: I1001 15:13:10.754795 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/e59dd056-ee8a-4b8b-af8a-f2d17bb64328-run-ovn\") pod \"ovnkube-node-hwptl\" (UID: \"e59dd056-ee8a-4b8b-af8a-f2d17bb64328\") " pod="openshift-ovn-kubernetes/ovnkube-node-hwptl" Oct 01 15:13:10 crc kubenswrapper[4869]: I1001 15:13:10.754918 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/e59dd056-ee8a-4b8b-af8a-f2d17bb64328-systemd-units\") pod \"ovnkube-node-hwptl\" (UID: \"e59dd056-ee8a-4b8b-af8a-f2d17bb64328\") " pod="openshift-ovn-kubernetes/ovnkube-node-hwptl" Oct 01 15:13:10 crc kubenswrapper[4869]: I1001 15:13:10.754950 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/e59dd056-ee8a-4b8b-af8a-f2d17bb64328-host-cni-bin\") pod \"ovnkube-node-hwptl\" (UID: \"e59dd056-ee8a-4b8b-af8a-f2d17bb64328\") " pod="openshift-ovn-kubernetes/ovnkube-node-hwptl" Oct 01 15:13:10 crc kubenswrapper[4869]: I1001 15:13:10.755006 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/e59dd056-ee8a-4b8b-af8a-f2d17bb64328-log-socket\") pod \"ovnkube-node-hwptl\" (UID: \"e59dd056-ee8a-4b8b-af8a-f2d17bb64328\") " pod="openshift-ovn-kubernetes/ovnkube-node-hwptl" Oct 01 15:13:10 crc kubenswrapper[4869]: I1001 15:13:10.755039 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/e59dd056-ee8a-4b8b-af8a-f2d17bb64328-host-run-netns\") pod \"ovnkube-node-hwptl\" (UID: \"e59dd056-ee8a-4b8b-af8a-f2d17bb64328\") " pod="openshift-ovn-kubernetes/ovnkube-node-hwptl" Oct 01 15:13:10 crc kubenswrapper[4869]: I1001 15:13:10.755160 4869 reconciler_common.go:293] "Volume detached for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/ebbefc55-bef9-4a03-a065-321bff3a75b4-host-run-ovn-kubernetes\") on node \"crc\" DevicePath \"\"" Oct 01 15:13:10 crc kubenswrapper[4869]: I1001 15:13:10.755182 4869 reconciler_common.go:293] "Volume detached for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/ebbefc55-bef9-4a03-a065-321bff3a75b4-run-openvswitch\") on node \"crc\" DevicePath \"\"" Oct 01 15:13:10 crc kubenswrapper[4869]: I1001 15:13:10.755202 4869 reconciler_common.go:293] "Volume detached for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/ebbefc55-bef9-4a03-a065-321bff3a75b4-host-kubelet\") on node \"crc\" DevicePath \"\"" Oct 01 15:13:10 crc kubenswrapper[4869]: I1001 15:13:10.755221 4869 reconciler_common.go:293] "Volume detached for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/ebbefc55-bef9-4a03-a065-321bff3a75b4-node-log\") on node \"crc\" DevicePath \"\"" Oct 01 15:13:10 crc kubenswrapper[4869]: I1001 15:13:10.755241 4869 reconciler_common.go:293] "Volume detached for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/ebbefc55-bef9-4a03-a065-321bff3a75b4-ovnkube-config\") on node \"crc\" DevicePath \"\"" Oct 01 15:13:10 crc kubenswrapper[4869]: I1001 15:13:10.755286 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-v9mfd\" (UniqueName: \"kubernetes.io/projected/ebbefc55-bef9-4a03-a065-321bff3a75b4-kube-api-access-v9mfd\") on node \"crc\" DevicePath \"\"" Oct 01 15:13:10 crc kubenswrapper[4869]: I1001 15:13:10.755305 4869 reconciler_common.go:293] "Volume detached for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/ebbefc55-bef9-4a03-a065-321bff3a75b4-systemd-units\") on node \"crc\" DevicePath \"\"" Oct 01 15:13:10 crc kubenswrapper[4869]: I1001 15:13:10.755322 4869 reconciler_common.go:293] "Volume detached for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/ebbefc55-bef9-4a03-a065-321bff3a75b4-ovn-node-metrics-cert\") on node \"crc\" DevicePath \"\"" Oct 01 15:13:10 crc kubenswrapper[4869]: I1001 15:13:10.755340 4869 reconciler_common.go:293] "Volume detached for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/ebbefc55-bef9-4a03-a065-321bff3a75b4-log-socket\") on node \"crc\" DevicePath \"\"" Oct 01 15:13:10 crc kubenswrapper[4869]: I1001 15:13:10.755357 4869 reconciler_common.go:293] "Volume detached for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/ebbefc55-bef9-4a03-a065-321bff3a75b4-ovnkube-script-lib\") on node \"crc\" DevicePath \"\"" Oct 01 15:13:10 crc kubenswrapper[4869]: I1001 15:13:10.755376 4869 reconciler_common.go:293] "Volume detached for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/ebbefc55-bef9-4a03-a065-321bff3a75b4-host-cni-netd\") on node \"crc\" DevicePath \"\"" Oct 01 15:13:10 crc kubenswrapper[4869]: I1001 15:13:10.755393 4869 reconciler_common.go:293] "Volume detached for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/ebbefc55-bef9-4a03-a065-321bff3a75b4-env-overrides\") on node \"crc\" DevicePath \"\"" Oct 01 15:13:10 crc kubenswrapper[4869]: I1001 15:13:10.755413 4869 reconciler_common.go:293] "Volume detached for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/ebbefc55-bef9-4a03-a065-321bff3a75b4-host-slash\") on node \"crc\" DevicePath \"\"" Oct 01 15:13:10 crc kubenswrapper[4869]: I1001 15:13:10.755433 4869 reconciler_common.go:293] "Volume detached for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/ebbefc55-bef9-4a03-a065-321bff3a75b4-host-var-lib-cni-networks-ovn-kubernetes\") on node \"crc\" DevicePath \"\"" Oct 01 15:13:10 crc kubenswrapper[4869]: I1001 15:13:10.755452 4869 reconciler_common.go:293] "Volume detached for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/ebbefc55-bef9-4a03-a065-321bff3a75b4-host-run-netns\") on node \"crc\" DevicePath \"\"" Oct 01 15:13:10 crc kubenswrapper[4869]: I1001 15:13:10.755468 4869 reconciler_common.go:293] "Volume detached for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/ebbefc55-bef9-4a03-a065-321bff3a75b4-run-systemd\") on node \"crc\" DevicePath \"\"" Oct 01 15:13:10 crc kubenswrapper[4869]: I1001 15:13:10.755484 4869 reconciler_common.go:293] "Volume detached for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/ebbefc55-bef9-4a03-a065-321bff3a75b4-host-cni-bin\") on node \"crc\" DevicePath \"\"" Oct 01 15:13:10 crc kubenswrapper[4869]: I1001 15:13:10.755502 4869 reconciler_common.go:293] "Volume detached for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/ebbefc55-bef9-4a03-a065-321bff3a75b4-var-lib-openvswitch\") on node \"crc\" DevicePath \"\"" Oct 01 15:13:10 crc kubenswrapper[4869]: I1001 15:13:10.755520 4869 reconciler_common.go:293] "Volume detached for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/ebbefc55-bef9-4a03-a065-321bff3a75b4-etc-openvswitch\") on node \"crc\" DevicePath \"\"" Oct 01 15:13:10 crc kubenswrapper[4869]: I1001 15:13:10.857316 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/e59dd056-ee8a-4b8b-af8a-f2d17bb64328-systemd-units\") pod \"ovnkube-node-hwptl\" (UID: \"e59dd056-ee8a-4b8b-af8a-f2d17bb64328\") " pod="openshift-ovn-kubernetes/ovnkube-node-hwptl" Oct 01 15:13:10 crc kubenswrapper[4869]: I1001 15:13:10.857478 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/e59dd056-ee8a-4b8b-af8a-f2d17bb64328-systemd-units\") pod \"ovnkube-node-hwptl\" (UID: \"e59dd056-ee8a-4b8b-af8a-f2d17bb64328\") " pod="openshift-ovn-kubernetes/ovnkube-node-hwptl" Oct 01 15:13:10 crc kubenswrapper[4869]: I1001 15:13:10.857945 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/e59dd056-ee8a-4b8b-af8a-f2d17bb64328-host-cni-bin\") pod \"ovnkube-node-hwptl\" (UID: \"e59dd056-ee8a-4b8b-af8a-f2d17bb64328\") " pod="openshift-ovn-kubernetes/ovnkube-node-hwptl" Oct 01 15:13:10 crc kubenswrapper[4869]: I1001 15:13:10.857683 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/e59dd056-ee8a-4b8b-af8a-f2d17bb64328-host-cni-bin\") pod \"ovnkube-node-hwptl\" (UID: \"e59dd056-ee8a-4b8b-af8a-f2d17bb64328\") " pod="openshift-ovn-kubernetes/ovnkube-node-hwptl" Oct 01 15:13:10 crc kubenswrapper[4869]: I1001 15:13:10.858068 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/e59dd056-ee8a-4b8b-af8a-f2d17bb64328-log-socket\") pod \"ovnkube-node-hwptl\" (UID: \"e59dd056-ee8a-4b8b-af8a-f2d17bb64328\") " pod="openshift-ovn-kubernetes/ovnkube-node-hwptl" Oct 01 15:13:10 crc kubenswrapper[4869]: I1001 15:13:10.858099 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/e59dd056-ee8a-4b8b-af8a-f2d17bb64328-host-run-netns\") pod \"ovnkube-node-hwptl\" (UID: \"e59dd056-ee8a-4b8b-af8a-f2d17bb64328\") " pod="openshift-ovn-kubernetes/ovnkube-node-hwptl" Oct 01 15:13:10 crc kubenswrapper[4869]: I1001 15:13:10.858138 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/e59dd056-ee8a-4b8b-af8a-f2d17bb64328-ovnkube-config\") pod \"ovnkube-node-hwptl\" (UID: \"e59dd056-ee8a-4b8b-af8a-f2d17bb64328\") " pod="openshift-ovn-kubernetes/ovnkube-node-hwptl" Oct 01 15:13:10 crc kubenswrapper[4869]: I1001 15:13:10.858172 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/e59dd056-ee8a-4b8b-af8a-f2d17bb64328-run-systemd\") pod \"ovnkube-node-hwptl\" (UID: \"e59dd056-ee8a-4b8b-af8a-f2d17bb64328\") " pod="openshift-ovn-kubernetes/ovnkube-node-hwptl" Oct 01 15:13:10 crc kubenswrapper[4869]: I1001 15:13:10.858216 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/e59dd056-ee8a-4b8b-af8a-f2d17bb64328-node-log\") pod \"ovnkube-node-hwptl\" (UID: \"e59dd056-ee8a-4b8b-af8a-f2d17bb64328\") " pod="openshift-ovn-kubernetes/ovnkube-node-hwptl" Oct 01 15:13:10 crc kubenswrapper[4869]: I1001 15:13:10.858245 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/e59dd056-ee8a-4b8b-af8a-f2d17bb64328-host-run-ovn-kubernetes\") pod \"ovnkube-node-hwptl\" (UID: \"e59dd056-ee8a-4b8b-af8a-f2d17bb64328\") " pod="openshift-ovn-kubernetes/ovnkube-node-hwptl" Oct 01 15:13:10 crc kubenswrapper[4869]: I1001 15:13:10.858317 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/e59dd056-ee8a-4b8b-af8a-f2d17bb64328-env-overrides\") pod \"ovnkube-node-hwptl\" (UID: \"e59dd056-ee8a-4b8b-af8a-f2d17bb64328\") " pod="openshift-ovn-kubernetes/ovnkube-node-hwptl" Oct 01 15:13:10 crc kubenswrapper[4869]: I1001 15:13:10.858358 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/e59dd056-ee8a-4b8b-af8a-f2d17bb64328-ovn-node-metrics-cert\") pod \"ovnkube-node-hwptl\" (UID: \"e59dd056-ee8a-4b8b-af8a-f2d17bb64328\") " pod="openshift-ovn-kubernetes/ovnkube-node-hwptl" Oct 01 15:13:10 crc kubenswrapper[4869]: I1001 15:13:10.858389 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/e59dd056-ee8a-4b8b-af8a-f2d17bb64328-ovnkube-script-lib\") pod \"ovnkube-node-hwptl\" (UID: \"e59dd056-ee8a-4b8b-af8a-f2d17bb64328\") " pod="openshift-ovn-kubernetes/ovnkube-node-hwptl" Oct 01 15:13:10 crc kubenswrapper[4869]: I1001 15:13:10.858432 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/e59dd056-ee8a-4b8b-af8a-f2d17bb64328-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-hwptl\" (UID: \"e59dd056-ee8a-4b8b-af8a-f2d17bb64328\") " pod="openshift-ovn-kubernetes/ovnkube-node-hwptl" Oct 01 15:13:10 crc kubenswrapper[4869]: I1001 15:13:10.858469 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/e59dd056-ee8a-4b8b-af8a-f2d17bb64328-var-lib-openvswitch\") pod \"ovnkube-node-hwptl\" (UID: \"e59dd056-ee8a-4b8b-af8a-f2d17bb64328\") " pod="openshift-ovn-kubernetes/ovnkube-node-hwptl" Oct 01 15:13:10 crc kubenswrapper[4869]: I1001 15:13:10.858499 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/e59dd056-ee8a-4b8b-af8a-f2d17bb64328-run-openvswitch\") pod \"ovnkube-node-hwptl\" (UID: \"e59dd056-ee8a-4b8b-af8a-f2d17bb64328\") " pod="openshift-ovn-kubernetes/ovnkube-node-hwptl" Oct 01 15:13:10 crc kubenswrapper[4869]: I1001 15:13:10.858534 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/e59dd056-ee8a-4b8b-af8a-f2d17bb64328-host-slash\") pod \"ovnkube-node-hwptl\" (UID: \"e59dd056-ee8a-4b8b-af8a-f2d17bb64328\") " pod="openshift-ovn-kubernetes/ovnkube-node-hwptl" Oct 01 15:13:10 crc kubenswrapper[4869]: I1001 15:13:10.858599 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/e59dd056-ee8a-4b8b-af8a-f2d17bb64328-etc-openvswitch\") pod \"ovnkube-node-hwptl\" (UID: \"e59dd056-ee8a-4b8b-af8a-f2d17bb64328\") " pod="openshift-ovn-kubernetes/ovnkube-node-hwptl" Oct 01 15:13:10 crc kubenswrapper[4869]: I1001 15:13:10.858649 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/e59dd056-ee8a-4b8b-af8a-f2d17bb64328-host-kubelet\") pod \"ovnkube-node-hwptl\" (UID: \"e59dd056-ee8a-4b8b-af8a-f2d17bb64328\") " pod="openshift-ovn-kubernetes/ovnkube-node-hwptl" Oct 01 15:13:10 crc kubenswrapper[4869]: I1001 15:13:10.858704 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/e59dd056-ee8a-4b8b-af8a-f2d17bb64328-host-cni-netd\") pod \"ovnkube-node-hwptl\" (UID: \"e59dd056-ee8a-4b8b-af8a-f2d17bb64328\") " pod="openshift-ovn-kubernetes/ovnkube-node-hwptl" Oct 01 15:13:10 crc kubenswrapper[4869]: I1001 15:13:10.858734 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hrkr8\" (UniqueName: \"kubernetes.io/projected/e59dd056-ee8a-4b8b-af8a-f2d17bb64328-kube-api-access-hrkr8\") pod \"ovnkube-node-hwptl\" (UID: \"e59dd056-ee8a-4b8b-af8a-f2d17bb64328\") " pod="openshift-ovn-kubernetes/ovnkube-node-hwptl" Oct 01 15:13:10 crc kubenswrapper[4869]: I1001 15:13:10.858769 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/e59dd056-ee8a-4b8b-af8a-f2d17bb64328-run-ovn\") pod \"ovnkube-node-hwptl\" (UID: \"e59dd056-ee8a-4b8b-af8a-f2d17bb64328\") " pod="openshift-ovn-kubernetes/ovnkube-node-hwptl" Oct 01 15:13:10 crc kubenswrapper[4869]: I1001 15:13:10.858873 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/e59dd056-ee8a-4b8b-af8a-f2d17bb64328-run-ovn\") pod \"ovnkube-node-hwptl\" (UID: \"e59dd056-ee8a-4b8b-af8a-f2d17bb64328\") " pod="openshift-ovn-kubernetes/ovnkube-node-hwptl" Oct 01 15:13:10 crc kubenswrapper[4869]: I1001 15:13:10.858915 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/e59dd056-ee8a-4b8b-af8a-f2d17bb64328-log-socket\") pod \"ovnkube-node-hwptl\" (UID: \"e59dd056-ee8a-4b8b-af8a-f2d17bb64328\") " pod="openshift-ovn-kubernetes/ovnkube-node-hwptl" Oct 01 15:13:10 crc kubenswrapper[4869]: I1001 15:13:10.858952 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/e59dd056-ee8a-4b8b-af8a-f2d17bb64328-host-run-netns\") pod \"ovnkube-node-hwptl\" (UID: \"e59dd056-ee8a-4b8b-af8a-f2d17bb64328\") " pod="openshift-ovn-kubernetes/ovnkube-node-hwptl" Oct 01 15:13:10 crc kubenswrapper[4869]: I1001 15:13:10.859430 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/e59dd056-ee8a-4b8b-af8a-f2d17bb64328-host-run-ovn-kubernetes\") pod \"ovnkube-node-hwptl\" (UID: \"e59dd056-ee8a-4b8b-af8a-f2d17bb64328\") " pod="openshift-ovn-kubernetes/ovnkube-node-hwptl" Oct 01 15:13:10 crc kubenswrapper[4869]: I1001 15:13:10.859482 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/e59dd056-ee8a-4b8b-af8a-f2d17bb64328-run-systemd\") pod \"ovnkube-node-hwptl\" (UID: \"e59dd056-ee8a-4b8b-af8a-f2d17bb64328\") " pod="openshift-ovn-kubernetes/ovnkube-node-hwptl" Oct 01 15:13:10 crc kubenswrapper[4869]: I1001 15:13:10.859516 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/e59dd056-ee8a-4b8b-af8a-f2d17bb64328-node-log\") pod \"ovnkube-node-hwptl\" (UID: \"e59dd056-ee8a-4b8b-af8a-f2d17bb64328\") " pod="openshift-ovn-kubernetes/ovnkube-node-hwptl" Oct 01 15:13:10 crc kubenswrapper[4869]: I1001 15:13:10.859443 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/e59dd056-ee8a-4b8b-af8a-f2d17bb64328-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-hwptl\" (UID: \"e59dd056-ee8a-4b8b-af8a-f2d17bb64328\") " pod="openshift-ovn-kubernetes/ovnkube-node-hwptl" Oct 01 15:13:10 crc kubenswrapper[4869]: I1001 15:13:10.860035 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/e59dd056-ee8a-4b8b-af8a-f2d17bb64328-env-overrides\") pod \"ovnkube-node-hwptl\" (UID: \"e59dd056-ee8a-4b8b-af8a-f2d17bb64328\") " pod="openshift-ovn-kubernetes/ovnkube-node-hwptl" Oct 01 15:13:10 crc kubenswrapper[4869]: I1001 15:13:10.860081 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/e59dd056-ee8a-4b8b-af8a-f2d17bb64328-etc-openvswitch\") pod \"ovnkube-node-hwptl\" (UID: \"e59dd056-ee8a-4b8b-af8a-f2d17bb64328\") " pod="openshift-ovn-kubernetes/ovnkube-node-hwptl" Oct 01 15:13:10 crc kubenswrapper[4869]: I1001 15:13:10.860108 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/e59dd056-ee8a-4b8b-af8a-f2d17bb64328-var-lib-openvswitch\") pod \"ovnkube-node-hwptl\" (UID: \"e59dd056-ee8a-4b8b-af8a-f2d17bb64328\") " pod="openshift-ovn-kubernetes/ovnkube-node-hwptl" Oct 01 15:13:10 crc kubenswrapper[4869]: I1001 15:13:10.860116 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/e59dd056-ee8a-4b8b-af8a-f2d17bb64328-ovnkube-config\") pod \"ovnkube-node-hwptl\" (UID: \"e59dd056-ee8a-4b8b-af8a-f2d17bb64328\") " pod="openshift-ovn-kubernetes/ovnkube-node-hwptl" Oct 01 15:13:10 crc kubenswrapper[4869]: I1001 15:13:10.860133 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/e59dd056-ee8a-4b8b-af8a-f2d17bb64328-run-openvswitch\") pod \"ovnkube-node-hwptl\" (UID: \"e59dd056-ee8a-4b8b-af8a-f2d17bb64328\") " pod="openshift-ovn-kubernetes/ovnkube-node-hwptl" Oct 01 15:13:10 crc kubenswrapper[4869]: I1001 15:13:10.860156 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/e59dd056-ee8a-4b8b-af8a-f2d17bb64328-host-slash\") pod \"ovnkube-node-hwptl\" (UID: \"e59dd056-ee8a-4b8b-af8a-f2d17bb64328\") " pod="openshift-ovn-kubernetes/ovnkube-node-hwptl" Oct 01 15:13:10 crc kubenswrapper[4869]: I1001 15:13:10.860181 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/e59dd056-ee8a-4b8b-af8a-f2d17bb64328-host-cni-netd\") pod \"ovnkube-node-hwptl\" (UID: \"e59dd056-ee8a-4b8b-af8a-f2d17bb64328\") " pod="openshift-ovn-kubernetes/ovnkube-node-hwptl" Oct 01 15:13:10 crc kubenswrapper[4869]: I1001 15:13:10.860230 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/e59dd056-ee8a-4b8b-af8a-f2d17bb64328-host-kubelet\") pod \"ovnkube-node-hwptl\" (UID: \"e59dd056-ee8a-4b8b-af8a-f2d17bb64328\") " pod="openshift-ovn-kubernetes/ovnkube-node-hwptl" Oct 01 15:13:10 crc kubenswrapper[4869]: I1001 15:13:10.861014 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/e59dd056-ee8a-4b8b-af8a-f2d17bb64328-ovnkube-script-lib\") pod \"ovnkube-node-hwptl\" (UID: \"e59dd056-ee8a-4b8b-af8a-f2d17bb64328\") " pod="openshift-ovn-kubernetes/ovnkube-node-hwptl" Oct 01 15:13:10 crc kubenswrapper[4869]: I1001 15:13:10.866643 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/e59dd056-ee8a-4b8b-af8a-f2d17bb64328-ovn-node-metrics-cert\") pod \"ovnkube-node-hwptl\" (UID: \"e59dd056-ee8a-4b8b-af8a-f2d17bb64328\") " pod="openshift-ovn-kubernetes/ovnkube-node-hwptl" Oct 01 15:13:10 crc kubenswrapper[4869]: I1001 15:13:10.878213 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hrkr8\" (UniqueName: \"kubernetes.io/projected/e59dd056-ee8a-4b8b-af8a-f2d17bb64328-kube-api-access-hrkr8\") pod \"ovnkube-node-hwptl\" (UID: \"e59dd056-ee8a-4b8b-af8a-f2d17bb64328\") " pod="openshift-ovn-kubernetes/ovnkube-node-hwptl" Oct 01 15:13:10 crc kubenswrapper[4869]: I1001 15:13:10.986526 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-hwptl" Oct 01 15:13:11 crc kubenswrapper[4869]: I1001 15:13:11.243891 4869 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-27gqg_ebbefc55-bef9-4a03-a065-321bff3a75b4/ovn-acl-logging/0.log" Oct 01 15:13:11 crc kubenswrapper[4869]: I1001 15:13:11.245167 4869 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-27gqg_ebbefc55-bef9-4a03-a065-321bff3a75b4/ovn-controller/0.log" Oct 01 15:13:11 crc kubenswrapper[4869]: I1001 15:13:11.245890 4869 generic.go:334] "Generic (PLEG): container finished" podID="ebbefc55-bef9-4a03-a065-321bff3a75b4" containerID="b3afd39d18311bca00ad07f623d9d405a8cd60f6d29fc32c2bf6509b450160dd" exitCode=0 Oct 01 15:13:11 crc kubenswrapper[4869]: I1001 15:13:11.245925 4869 generic.go:334] "Generic (PLEG): container finished" podID="ebbefc55-bef9-4a03-a065-321bff3a75b4" containerID="59471b38e240e2b9a2ccfc1dee734c077c4f09fa10e0b55964d0fe73cd8af5a1" exitCode=0 Oct 01 15:13:11 crc kubenswrapper[4869]: I1001 15:13:11.245933 4869 generic.go:334] "Generic (PLEG): container finished" podID="ebbefc55-bef9-4a03-a065-321bff3a75b4" containerID="7afd925f4b10ef56213c985a7b92c49d60a62f9be8ade831c42641100327ea40" exitCode=0 Oct 01 15:13:11 crc kubenswrapper[4869]: I1001 15:13:11.245940 4869 generic.go:334] "Generic (PLEG): container finished" podID="ebbefc55-bef9-4a03-a065-321bff3a75b4" containerID="7811ddaf0b31f53a6bc23e82dfa3a4bb5f9fa408b1f2e22dff3f71e9de8b00d2" exitCode=0 Oct 01 15:13:11 crc kubenswrapper[4869]: I1001 15:13:11.245924 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-27gqg" event={"ID":"ebbefc55-bef9-4a03-a065-321bff3a75b4","Type":"ContainerDied","Data":"b3afd39d18311bca00ad07f623d9d405a8cd60f6d29fc32c2bf6509b450160dd"} Oct 01 15:13:11 crc kubenswrapper[4869]: I1001 15:13:11.245960 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-27gqg" Oct 01 15:13:11 crc kubenswrapper[4869]: I1001 15:13:11.246009 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-27gqg" event={"ID":"ebbefc55-bef9-4a03-a065-321bff3a75b4","Type":"ContainerDied","Data":"59471b38e240e2b9a2ccfc1dee734c077c4f09fa10e0b55964d0fe73cd8af5a1"} Oct 01 15:13:11 crc kubenswrapper[4869]: I1001 15:13:11.245947 4869 generic.go:334] "Generic (PLEG): container finished" podID="ebbefc55-bef9-4a03-a065-321bff3a75b4" containerID="e332dd7b45911e52db6ad9d474be312012028a9def590871b4d2a7a735c80ff0" exitCode=0 Oct 01 15:13:11 crc kubenswrapper[4869]: I1001 15:13:11.246050 4869 generic.go:334] "Generic (PLEG): container finished" podID="ebbefc55-bef9-4a03-a065-321bff3a75b4" containerID="c59f9083cd75c8bee85596c2a178cb931de40a37dd459433dd2c23d5b61d95f0" exitCode=0 Oct 01 15:13:11 crc kubenswrapper[4869]: I1001 15:13:11.246051 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-27gqg" event={"ID":"ebbefc55-bef9-4a03-a065-321bff3a75b4","Type":"ContainerDied","Data":"7afd925f4b10ef56213c985a7b92c49d60a62f9be8ade831c42641100327ea40"} Oct 01 15:13:11 crc kubenswrapper[4869]: I1001 15:13:11.246070 4869 scope.go:117] "RemoveContainer" containerID="b3afd39d18311bca00ad07f623d9d405a8cd60f6d29fc32c2bf6509b450160dd" Oct 01 15:13:11 crc kubenswrapper[4869]: I1001 15:13:11.246085 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-27gqg" event={"ID":"ebbefc55-bef9-4a03-a065-321bff3a75b4","Type":"ContainerDied","Data":"7811ddaf0b31f53a6bc23e82dfa3a4bb5f9fa408b1f2e22dff3f71e9de8b00d2"} Oct 01 15:13:11 crc kubenswrapper[4869]: I1001 15:13:11.246116 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-27gqg" event={"ID":"ebbefc55-bef9-4a03-a065-321bff3a75b4","Type":"ContainerDied","Data":"e332dd7b45911e52db6ad9d474be312012028a9def590871b4d2a7a735c80ff0"} Oct 01 15:13:11 crc kubenswrapper[4869]: I1001 15:13:11.246147 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-27gqg" event={"ID":"ebbefc55-bef9-4a03-a065-321bff3a75b4","Type":"ContainerDied","Data":"c59f9083cd75c8bee85596c2a178cb931de40a37dd459433dd2c23d5b61d95f0"} Oct 01 15:13:11 crc kubenswrapper[4869]: I1001 15:13:11.246183 4869 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"c5f583b0b94050424796b550a344f0c6069c1608629fdd01ddfdb303add0d5f7"} Oct 01 15:13:11 crc kubenswrapper[4869]: I1001 15:13:11.246214 4869 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"ce48fdbad0049809bac4c2afd94d87133b935841c6a3805f8eacc26dd8527c62"} Oct 01 15:13:11 crc kubenswrapper[4869]: I1001 15:13:11.246233 4869 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"992afa5f68050b8a1ec99aaf070b3016494713d2d0d51fef7bd1296ad29546fd"} Oct 01 15:13:11 crc kubenswrapper[4869]: I1001 15:13:11.246057 4869 generic.go:334] "Generic (PLEG): container finished" podID="ebbefc55-bef9-4a03-a065-321bff3a75b4" containerID="c5f583b0b94050424796b550a344f0c6069c1608629fdd01ddfdb303add0d5f7" exitCode=143 Oct 01 15:13:11 crc kubenswrapper[4869]: I1001 15:13:11.246290 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-27gqg" event={"ID":"ebbefc55-bef9-4a03-a065-321bff3a75b4","Type":"ContainerDied","Data":"c5f583b0b94050424796b550a344f0c6069c1608629fdd01ddfdb303add0d5f7"} Oct 01 15:13:11 crc kubenswrapper[4869]: I1001 15:13:11.246296 4869 generic.go:334] "Generic (PLEG): container finished" podID="ebbefc55-bef9-4a03-a065-321bff3a75b4" containerID="ce48fdbad0049809bac4c2afd94d87133b935841c6a3805f8eacc26dd8527c62" exitCode=143 Oct 01 15:13:11 crc kubenswrapper[4869]: I1001 15:13:11.246320 4869 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"b3afd39d18311bca00ad07f623d9d405a8cd60f6d29fc32c2bf6509b450160dd"} Oct 01 15:13:11 crc kubenswrapper[4869]: I1001 15:13:11.246341 4869 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"59471b38e240e2b9a2ccfc1dee734c077c4f09fa10e0b55964d0fe73cd8af5a1"} Oct 01 15:13:11 crc kubenswrapper[4869]: I1001 15:13:11.246358 4869 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"7afd925f4b10ef56213c985a7b92c49d60a62f9be8ade831c42641100327ea40"} Oct 01 15:13:11 crc kubenswrapper[4869]: I1001 15:13:11.246376 4869 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"7811ddaf0b31f53a6bc23e82dfa3a4bb5f9fa408b1f2e22dff3f71e9de8b00d2"} Oct 01 15:13:11 crc kubenswrapper[4869]: I1001 15:13:11.246392 4869 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"e332dd7b45911e52db6ad9d474be312012028a9def590871b4d2a7a735c80ff0"} Oct 01 15:13:11 crc kubenswrapper[4869]: I1001 15:13:11.246407 4869 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"c59f9083cd75c8bee85596c2a178cb931de40a37dd459433dd2c23d5b61d95f0"} Oct 01 15:13:11 crc kubenswrapper[4869]: I1001 15:13:11.246423 4869 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"c5f583b0b94050424796b550a344f0c6069c1608629fdd01ddfdb303add0d5f7"} Oct 01 15:13:11 crc kubenswrapper[4869]: I1001 15:13:11.246440 4869 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"ce48fdbad0049809bac4c2afd94d87133b935841c6a3805f8eacc26dd8527c62"} Oct 01 15:13:11 crc kubenswrapper[4869]: I1001 15:13:11.246456 4869 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"992afa5f68050b8a1ec99aaf070b3016494713d2d0d51fef7bd1296ad29546fd"} Oct 01 15:13:11 crc kubenswrapper[4869]: I1001 15:13:11.246478 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-27gqg" event={"ID":"ebbefc55-bef9-4a03-a065-321bff3a75b4","Type":"ContainerDied","Data":"ce48fdbad0049809bac4c2afd94d87133b935841c6a3805f8eacc26dd8527c62"} Oct 01 15:13:11 crc kubenswrapper[4869]: I1001 15:13:11.246506 4869 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"b3afd39d18311bca00ad07f623d9d405a8cd60f6d29fc32c2bf6509b450160dd"} Oct 01 15:13:11 crc kubenswrapper[4869]: I1001 15:13:11.246525 4869 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"59471b38e240e2b9a2ccfc1dee734c077c4f09fa10e0b55964d0fe73cd8af5a1"} Oct 01 15:13:11 crc kubenswrapper[4869]: I1001 15:13:11.246544 4869 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"7afd925f4b10ef56213c985a7b92c49d60a62f9be8ade831c42641100327ea40"} Oct 01 15:13:11 crc kubenswrapper[4869]: I1001 15:13:11.246560 4869 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"7811ddaf0b31f53a6bc23e82dfa3a4bb5f9fa408b1f2e22dff3f71e9de8b00d2"} Oct 01 15:13:11 crc kubenswrapper[4869]: I1001 15:13:11.246575 4869 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"e332dd7b45911e52db6ad9d474be312012028a9def590871b4d2a7a735c80ff0"} Oct 01 15:13:11 crc kubenswrapper[4869]: I1001 15:13:11.246590 4869 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"c59f9083cd75c8bee85596c2a178cb931de40a37dd459433dd2c23d5b61d95f0"} Oct 01 15:13:11 crc kubenswrapper[4869]: I1001 15:13:11.246605 4869 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"c5f583b0b94050424796b550a344f0c6069c1608629fdd01ddfdb303add0d5f7"} Oct 01 15:13:11 crc kubenswrapper[4869]: I1001 15:13:11.246619 4869 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"ce48fdbad0049809bac4c2afd94d87133b935841c6a3805f8eacc26dd8527c62"} Oct 01 15:13:11 crc kubenswrapper[4869]: I1001 15:13:11.246633 4869 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"992afa5f68050b8a1ec99aaf070b3016494713d2d0d51fef7bd1296ad29546fd"} Oct 01 15:13:11 crc kubenswrapper[4869]: I1001 15:13:11.246656 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-27gqg" event={"ID":"ebbefc55-bef9-4a03-a065-321bff3a75b4","Type":"ContainerDied","Data":"494cc5f315d900d7ca2031138d77f2eac5461180d1368ed046cc0e0509e13a78"} Oct 01 15:13:11 crc kubenswrapper[4869]: I1001 15:13:11.246680 4869 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"b3afd39d18311bca00ad07f623d9d405a8cd60f6d29fc32c2bf6509b450160dd"} Oct 01 15:13:11 crc kubenswrapper[4869]: I1001 15:13:11.246699 4869 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"59471b38e240e2b9a2ccfc1dee734c077c4f09fa10e0b55964d0fe73cd8af5a1"} Oct 01 15:13:11 crc kubenswrapper[4869]: I1001 15:13:11.246714 4869 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"7afd925f4b10ef56213c985a7b92c49d60a62f9be8ade831c42641100327ea40"} Oct 01 15:13:11 crc kubenswrapper[4869]: I1001 15:13:11.246729 4869 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"7811ddaf0b31f53a6bc23e82dfa3a4bb5f9fa408b1f2e22dff3f71e9de8b00d2"} Oct 01 15:13:11 crc kubenswrapper[4869]: I1001 15:13:11.246744 4869 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"e332dd7b45911e52db6ad9d474be312012028a9def590871b4d2a7a735c80ff0"} Oct 01 15:13:11 crc kubenswrapper[4869]: I1001 15:13:11.246760 4869 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"c59f9083cd75c8bee85596c2a178cb931de40a37dd459433dd2c23d5b61d95f0"} Oct 01 15:13:11 crc kubenswrapper[4869]: I1001 15:13:11.246776 4869 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"c5f583b0b94050424796b550a344f0c6069c1608629fdd01ddfdb303add0d5f7"} Oct 01 15:13:11 crc kubenswrapper[4869]: I1001 15:13:11.246792 4869 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"ce48fdbad0049809bac4c2afd94d87133b935841c6a3805f8eacc26dd8527c62"} Oct 01 15:13:11 crc kubenswrapper[4869]: I1001 15:13:11.246807 4869 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"992afa5f68050b8a1ec99aaf070b3016494713d2d0d51fef7bd1296ad29546fd"} Oct 01 15:13:11 crc kubenswrapper[4869]: I1001 15:13:11.250033 4869 generic.go:334] "Generic (PLEG): container finished" podID="e59dd056-ee8a-4b8b-af8a-f2d17bb64328" containerID="8785731dc316904d74b92ac91e3e06e6a8ac4ac82b6d27ebb3921f1222935c0a" exitCode=0 Oct 01 15:13:11 crc kubenswrapper[4869]: I1001 15:13:11.250144 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-hwptl" event={"ID":"e59dd056-ee8a-4b8b-af8a-f2d17bb64328","Type":"ContainerDied","Data":"8785731dc316904d74b92ac91e3e06e6a8ac4ac82b6d27ebb3921f1222935c0a"} Oct 01 15:13:11 crc kubenswrapper[4869]: I1001 15:13:11.250188 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-hwptl" event={"ID":"e59dd056-ee8a-4b8b-af8a-f2d17bb64328","Type":"ContainerStarted","Data":"ca2fcb197ec9ee52977462d23eebd7f2b6dc36ab55b5665aa1fdecfa91e68c5c"} Oct 01 15:13:11 crc kubenswrapper[4869]: I1001 15:13:11.264323 4869 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-j98s2_69635c7a-0025-4ea2-a1b6-fc7776c2be11/kube-multus/0.log" Oct 01 15:13:11 crc kubenswrapper[4869]: I1001 15:13:11.264395 4869 generic.go:334] "Generic (PLEG): container finished" podID="69635c7a-0025-4ea2-a1b6-fc7776c2be11" containerID="a96142295c608f4cce284b8e003ed2a5c2954ce0542b28d04f3f63710c6c65e0" exitCode=2 Oct 01 15:13:11 crc kubenswrapper[4869]: I1001 15:13:11.264464 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-j98s2" event={"ID":"69635c7a-0025-4ea2-a1b6-fc7776c2be11","Type":"ContainerDied","Data":"a96142295c608f4cce284b8e003ed2a5c2954ce0542b28d04f3f63710c6c65e0"} Oct 01 15:13:11 crc kubenswrapper[4869]: I1001 15:13:11.265072 4869 scope.go:117] "RemoveContainer" containerID="a96142295c608f4cce284b8e003ed2a5c2954ce0542b28d04f3f63710c6c65e0" Oct 01 15:13:11 crc kubenswrapper[4869]: I1001 15:13:11.287600 4869 scope.go:117] "RemoveContainer" containerID="59471b38e240e2b9a2ccfc1dee734c077c4f09fa10e0b55964d0fe73cd8af5a1" Oct 01 15:13:11 crc kubenswrapper[4869]: I1001 15:13:11.347622 4869 scope.go:117] "RemoveContainer" containerID="7afd925f4b10ef56213c985a7b92c49d60a62f9be8ade831c42641100327ea40" Oct 01 15:13:11 crc kubenswrapper[4869]: I1001 15:13:11.366583 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-27gqg"] Oct 01 15:13:11 crc kubenswrapper[4869]: I1001 15:13:11.372810 4869 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-27gqg"] Oct 01 15:13:11 crc kubenswrapper[4869]: I1001 15:13:11.420540 4869 scope.go:117] "RemoveContainer" containerID="7811ddaf0b31f53a6bc23e82dfa3a4bb5f9fa408b1f2e22dff3f71e9de8b00d2" Oct 01 15:13:11 crc kubenswrapper[4869]: I1001 15:13:11.435365 4869 scope.go:117] "RemoveContainer" containerID="e332dd7b45911e52db6ad9d474be312012028a9def590871b4d2a7a735c80ff0" Oct 01 15:13:11 crc kubenswrapper[4869]: I1001 15:13:11.457617 4869 scope.go:117] "RemoveContainer" containerID="c59f9083cd75c8bee85596c2a178cb931de40a37dd459433dd2c23d5b61d95f0" Oct 01 15:13:11 crc kubenswrapper[4869]: I1001 15:13:11.479416 4869 scope.go:117] "RemoveContainer" containerID="c5f583b0b94050424796b550a344f0c6069c1608629fdd01ddfdb303add0d5f7" Oct 01 15:13:11 crc kubenswrapper[4869]: I1001 15:13:11.501935 4869 scope.go:117] "RemoveContainer" containerID="ce48fdbad0049809bac4c2afd94d87133b935841c6a3805f8eacc26dd8527c62" Oct 01 15:13:11 crc kubenswrapper[4869]: I1001 15:13:11.531791 4869 scope.go:117] "RemoveContainer" containerID="992afa5f68050b8a1ec99aaf070b3016494713d2d0d51fef7bd1296ad29546fd" Oct 01 15:13:11 crc kubenswrapper[4869]: I1001 15:13:11.546160 4869 scope.go:117] "RemoveContainer" containerID="b3afd39d18311bca00ad07f623d9d405a8cd60f6d29fc32c2bf6509b450160dd" Oct 01 15:13:11 crc kubenswrapper[4869]: E1001 15:13:11.546538 4869 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b3afd39d18311bca00ad07f623d9d405a8cd60f6d29fc32c2bf6509b450160dd\": container with ID starting with b3afd39d18311bca00ad07f623d9d405a8cd60f6d29fc32c2bf6509b450160dd not found: ID does not exist" containerID="b3afd39d18311bca00ad07f623d9d405a8cd60f6d29fc32c2bf6509b450160dd" Oct 01 15:13:11 crc kubenswrapper[4869]: I1001 15:13:11.546576 4869 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b3afd39d18311bca00ad07f623d9d405a8cd60f6d29fc32c2bf6509b450160dd"} err="failed to get container status \"b3afd39d18311bca00ad07f623d9d405a8cd60f6d29fc32c2bf6509b450160dd\": rpc error: code = NotFound desc = could not find container \"b3afd39d18311bca00ad07f623d9d405a8cd60f6d29fc32c2bf6509b450160dd\": container with ID starting with b3afd39d18311bca00ad07f623d9d405a8cd60f6d29fc32c2bf6509b450160dd not found: ID does not exist" Oct 01 15:13:11 crc kubenswrapper[4869]: I1001 15:13:11.546602 4869 scope.go:117] "RemoveContainer" containerID="59471b38e240e2b9a2ccfc1dee734c077c4f09fa10e0b55964d0fe73cd8af5a1" Oct 01 15:13:11 crc kubenswrapper[4869]: E1001 15:13:11.547141 4869 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"59471b38e240e2b9a2ccfc1dee734c077c4f09fa10e0b55964d0fe73cd8af5a1\": container with ID starting with 59471b38e240e2b9a2ccfc1dee734c077c4f09fa10e0b55964d0fe73cd8af5a1 not found: ID does not exist" containerID="59471b38e240e2b9a2ccfc1dee734c077c4f09fa10e0b55964d0fe73cd8af5a1" Oct 01 15:13:11 crc kubenswrapper[4869]: I1001 15:13:11.547167 4869 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"59471b38e240e2b9a2ccfc1dee734c077c4f09fa10e0b55964d0fe73cd8af5a1"} err="failed to get container status \"59471b38e240e2b9a2ccfc1dee734c077c4f09fa10e0b55964d0fe73cd8af5a1\": rpc error: code = NotFound desc = could not find container \"59471b38e240e2b9a2ccfc1dee734c077c4f09fa10e0b55964d0fe73cd8af5a1\": container with ID starting with 59471b38e240e2b9a2ccfc1dee734c077c4f09fa10e0b55964d0fe73cd8af5a1 not found: ID does not exist" Oct 01 15:13:11 crc kubenswrapper[4869]: I1001 15:13:11.547184 4869 scope.go:117] "RemoveContainer" containerID="7afd925f4b10ef56213c985a7b92c49d60a62f9be8ade831c42641100327ea40" Oct 01 15:13:11 crc kubenswrapper[4869]: E1001 15:13:11.547566 4869 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7afd925f4b10ef56213c985a7b92c49d60a62f9be8ade831c42641100327ea40\": container with ID starting with 7afd925f4b10ef56213c985a7b92c49d60a62f9be8ade831c42641100327ea40 not found: ID does not exist" containerID="7afd925f4b10ef56213c985a7b92c49d60a62f9be8ade831c42641100327ea40" Oct 01 15:13:11 crc kubenswrapper[4869]: I1001 15:13:11.547613 4869 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7afd925f4b10ef56213c985a7b92c49d60a62f9be8ade831c42641100327ea40"} err="failed to get container status \"7afd925f4b10ef56213c985a7b92c49d60a62f9be8ade831c42641100327ea40\": rpc error: code = NotFound desc = could not find container \"7afd925f4b10ef56213c985a7b92c49d60a62f9be8ade831c42641100327ea40\": container with ID starting with 7afd925f4b10ef56213c985a7b92c49d60a62f9be8ade831c42641100327ea40 not found: ID does not exist" Oct 01 15:13:11 crc kubenswrapper[4869]: I1001 15:13:11.547651 4869 scope.go:117] "RemoveContainer" containerID="7811ddaf0b31f53a6bc23e82dfa3a4bb5f9fa408b1f2e22dff3f71e9de8b00d2" Oct 01 15:13:11 crc kubenswrapper[4869]: E1001 15:13:11.548050 4869 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7811ddaf0b31f53a6bc23e82dfa3a4bb5f9fa408b1f2e22dff3f71e9de8b00d2\": container with ID starting with 7811ddaf0b31f53a6bc23e82dfa3a4bb5f9fa408b1f2e22dff3f71e9de8b00d2 not found: ID does not exist" containerID="7811ddaf0b31f53a6bc23e82dfa3a4bb5f9fa408b1f2e22dff3f71e9de8b00d2" Oct 01 15:13:11 crc kubenswrapper[4869]: I1001 15:13:11.548082 4869 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7811ddaf0b31f53a6bc23e82dfa3a4bb5f9fa408b1f2e22dff3f71e9de8b00d2"} err="failed to get container status \"7811ddaf0b31f53a6bc23e82dfa3a4bb5f9fa408b1f2e22dff3f71e9de8b00d2\": rpc error: code = NotFound desc = could not find container \"7811ddaf0b31f53a6bc23e82dfa3a4bb5f9fa408b1f2e22dff3f71e9de8b00d2\": container with ID starting with 7811ddaf0b31f53a6bc23e82dfa3a4bb5f9fa408b1f2e22dff3f71e9de8b00d2 not found: ID does not exist" Oct 01 15:13:11 crc kubenswrapper[4869]: I1001 15:13:11.548109 4869 scope.go:117] "RemoveContainer" containerID="e332dd7b45911e52db6ad9d474be312012028a9def590871b4d2a7a735c80ff0" Oct 01 15:13:11 crc kubenswrapper[4869]: E1001 15:13:11.548556 4869 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e332dd7b45911e52db6ad9d474be312012028a9def590871b4d2a7a735c80ff0\": container with ID starting with e332dd7b45911e52db6ad9d474be312012028a9def590871b4d2a7a735c80ff0 not found: ID does not exist" containerID="e332dd7b45911e52db6ad9d474be312012028a9def590871b4d2a7a735c80ff0" Oct 01 15:13:11 crc kubenswrapper[4869]: I1001 15:13:11.548611 4869 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e332dd7b45911e52db6ad9d474be312012028a9def590871b4d2a7a735c80ff0"} err="failed to get container status \"e332dd7b45911e52db6ad9d474be312012028a9def590871b4d2a7a735c80ff0\": rpc error: code = NotFound desc = could not find container \"e332dd7b45911e52db6ad9d474be312012028a9def590871b4d2a7a735c80ff0\": container with ID starting with e332dd7b45911e52db6ad9d474be312012028a9def590871b4d2a7a735c80ff0 not found: ID does not exist" Oct 01 15:13:11 crc kubenswrapper[4869]: I1001 15:13:11.548630 4869 scope.go:117] "RemoveContainer" containerID="c59f9083cd75c8bee85596c2a178cb931de40a37dd459433dd2c23d5b61d95f0" Oct 01 15:13:11 crc kubenswrapper[4869]: E1001 15:13:11.548900 4869 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c59f9083cd75c8bee85596c2a178cb931de40a37dd459433dd2c23d5b61d95f0\": container with ID starting with c59f9083cd75c8bee85596c2a178cb931de40a37dd459433dd2c23d5b61d95f0 not found: ID does not exist" containerID="c59f9083cd75c8bee85596c2a178cb931de40a37dd459433dd2c23d5b61d95f0" Oct 01 15:13:11 crc kubenswrapper[4869]: I1001 15:13:11.548924 4869 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c59f9083cd75c8bee85596c2a178cb931de40a37dd459433dd2c23d5b61d95f0"} err="failed to get container status \"c59f9083cd75c8bee85596c2a178cb931de40a37dd459433dd2c23d5b61d95f0\": rpc error: code = NotFound desc = could not find container \"c59f9083cd75c8bee85596c2a178cb931de40a37dd459433dd2c23d5b61d95f0\": container with ID starting with c59f9083cd75c8bee85596c2a178cb931de40a37dd459433dd2c23d5b61d95f0 not found: ID does not exist" Oct 01 15:13:11 crc kubenswrapper[4869]: I1001 15:13:11.548941 4869 scope.go:117] "RemoveContainer" containerID="c5f583b0b94050424796b550a344f0c6069c1608629fdd01ddfdb303add0d5f7" Oct 01 15:13:11 crc kubenswrapper[4869]: E1001 15:13:11.549207 4869 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c5f583b0b94050424796b550a344f0c6069c1608629fdd01ddfdb303add0d5f7\": container with ID starting with c5f583b0b94050424796b550a344f0c6069c1608629fdd01ddfdb303add0d5f7 not found: ID does not exist" containerID="c5f583b0b94050424796b550a344f0c6069c1608629fdd01ddfdb303add0d5f7" Oct 01 15:13:11 crc kubenswrapper[4869]: I1001 15:13:11.549231 4869 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c5f583b0b94050424796b550a344f0c6069c1608629fdd01ddfdb303add0d5f7"} err="failed to get container status \"c5f583b0b94050424796b550a344f0c6069c1608629fdd01ddfdb303add0d5f7\": rpc error: code = NotFound desc = could not find container \"c5f583b0b94050424796b550a344f0c6069c1608629fdd01ddfdb303add0d5f7\": container with ID starting with c5f583b0b94050424796b550a344f0c6069c1608629fdd01ddfdb303add0d5f7 not found: ID does not exist" Oct 01 15:13:11 crc kubenswrapper[4869]: I1001 15:13:11.549245 4869 scope.go:117] "RemoveContainer" containerID="ce48fdbad0049809bac4c2afd94d87133b935841c6a3805f8eacc26dd8527c62" Oct 01 15:13:11 crc kubenswrapper[4869]: E1001 15:13:11.549515 4869 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ce48fdbad0049809bac4c2afd94d87133b935841c6a3805f8eacc26dd8527c62\": container with ID starting with ce48fdbad0049809bac4c2afd94d87133b935841c6a3805f8eacc26dd8527c62 not found: ID does not exist" containerID="ce48fdbad0049809bac4c2afd94d87133b935841c6a3805f8eacc26dd8527c62" Oct 01 15:13:11 crc kubenswrapper[4869]: I1001 15:13:11.549574 4869 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ce48fdbad0049809bac4c2afd94d87133b935841c6a3805f8eacc26dd8527c62"} err="failed to get container status \"ce48fdbad0049809bac4c2afd94d87133b935841c6a3805f8eacc26dd8527c62\": rpc error: code = NotFound desc = could not find container \"ce48fdbad0049809bac4c2afd94d87133b935841c6a3805f8eacc26dd8527c62\": container with ID starting with ce48fdbad0049809bac4c2afd94d87133b935841c6a3805f8eacc26dd8527c62 not found: ID does not exist" Oct 01 15:13:11 crc kubenswrapper[4869]: I1001 15:13:11.549595 4869 scope.go:117] "RemoveContainer" containerID="992afa5f68050b8a1ec99aaf070b3016494713d2d0d51fef7bd1296ad29546fd" Oct 01 15:13:11 crc kubenswrapper[4869]: E1001 15:13:11.550022 4869 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"992afa5f68050b8a1ec99aaf070b3016494713d2d0d51fef7bd1296ad29546fd\": container with ID starting with 992afa5f68050b8a1ec99aaf070b3016494713d2d0d51fef7bd1296ad29546fd not found: ID does not exist" containerID="992afa5f68050b8a1ec99aaf070b3016494713d2d0d51fef7bd1296ad29546fd" Oct 01 15:13:11 crc kubenswrapper[4869]: I1001 15:13:11.550050 4869 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"992afa5f68050b8a1ec99aaf070b3016494713d2d0d51fef7bd1296ad29546fd"} err="failed to get container status \"992afa5f68050b8a1ec99aaf070b3016494713d2d0d51fef7bd1296ad29546fd\": rpc error: code = NotFound desc = could not find container \"992afa5f68050b8a1ec99aaf070b3016494713d2d0d51fef7bd1296ad29546fd\": container with ID starting with 992afa5f68050b8a1ec99aaf070b3016494713d2d0d51fef7bd1296ad29546fd not found: ID does not exist" Oct 01 15:13:11 crc kubenswrapper[4869]: I1001 15:13:11.550068 4869 scope.go:117] "RemoveContainer" containerID="b3afd39d18311bca00ad07f623d9d405a8cd60f6d29fc32c2bf6509b450160dd" Oct 01 15:13:11 crc kubenswrapper[4869]: I1001 15:13:11.550331 4869 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b3afd39d18311bca00ad07f623d9d405a8cd60f6d29fc32c2bf6509b450160dd"} err="failed to get container status \"b3afd39d18311bca00ad07f623d9d405a8cd60f6d29fc32c2bf6509b450160dd\": rpc error: code = NotFound desc = could not find container \"b3afd39d18311bca00ad07f623d9d405a8cd60f6d29fc32c2bf6509b450160dd\": container with ID starting with b3afd39d18311bca00ad07f623d9d405a8cd60f6d29fc32c2bf6509b450160dd not found: ID does not exist" Oct 01 15:13:11 crc kubenswrapper[4869]: I1001 15:13:11.550366 4869 scope.go:117] "RemoveContainer" containerID="59471b38e240e2b9a2ccfc1dee734c077c4f09fa10e0b55964d0fe73cd8af5a1" Oct 01 15:13:11 crc kubenswrapper[4869]: I1001 15:13:11.550617 4869 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"59471b38e240e2b9a2ccfc1dee734c077c4f09fa10e0b55964d0fe73cd8af5a1"} err="failed to get container status \"59471b38e240e2b9a2ccfc1dee734c077c4f09fa10e0b55964d0fe73cd8af5a1\": rpc error: code = NotFound desc = could not find container \"59471b38e240e2b9a2ccfc1dee734c077c4f09fa10e0b55964d0fe73cd8af5a1\": container with ID starting with 59471b38e240e2b9a2ccfc1dee734c077c4f09fa10e0b55964d0fe73cd8af5a1 not found: ID does not exist" Oct 01 15:13:11 crc kubenswrapper[4869]: I1001 15:13:11.550638 4869 scope.go:117] "RemoveContainer" containerID="7afd925f4b10ef56213c985a7b92c49d60a62f9be8ade831c42641100327ea40" Oct 01 15:13:11 crc kubenswrapper[4869]: I1001 15:13:11.550982 4869 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7afd925f4b10ef56213c985a7b92c49d60a62f9be8ade831c42641100327ea40"} err="failed to get container status \"7afd925f4b10ef56213c985a7b92c49d60a62f9be8ade831c42641100327ea40\": rpc error: code = NotFound desc = could not find container \"7afd925f4b10ef56213c985a7b92c49d60a62f9be8ade831c42641100327ea40\": container with ID starting with 7afd925f4b10ef56213c985a7b92c49d60a62f9be8ade831c42641100327ea40 not found: ID does not exist" Oct 01 15:13:11 crc kubenswrapper[4869]: I1001 15:13:11.551007 4869 scope.go:117] "RemoveContainer" containerID="7811ddaf0b31f53a6bc23e82dfa3a4bb5f9fa408b1f2e22dff3f71e9de8b00d2" Oct 01 15:13:11 crc kubenswrapper[4869]: I1001 15:13:11.551412 4869 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7811ddaf0b31f53a6bc23e82dfa3a4bb5f9fa408b1f2e22dff3f71e9de8b00d2"} err="failed to get container status \"7811ddaf0b31f53a6bc23e82dfa3a4bb5f9fa408b1f2e22dff3f71e9de8b00d2\": rpc error: code = NotFound desc = could not find container \"7811ddaf0b31f53a6bc23e82dfa3a4bb5f9fa408b1f2e22dff3f71e9de8b00d2\": container with ID starting with 7811ddaf0b31f53a6bc23e82dfa3a4bb5f9fa408b1f2e22dff3f71e9de8b00d2 not found: ID does not exist" Oct 01 15:13:11 crc kubenswrapper[4869]: I1001 15:13:11.551432 4869 scope.go:117] "RemoveContainer" containerID="e332dd7b45911e52db6ad9d474be312012028a9def590871b4d2a7a735c80ff0" Oct 01 15:13:11 crc kubenswrapper[4869]: I1001 15:13:11.551967 4869 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e332dd7b45911e52db6ad9d474be312012028a9def590871b4d2a7a735c80ff0"} err="failed to get container status \"e332dd7b45911e52db6ad9d474be312012028a9def590871b4d2a7a735c80ff0\": rpc error: code = NotFound desc = could not find container \"e332dd7b45911e52db6ad9d474be312012028a9def590871b4d2a7a735c80ff0\": container with ID starting with e332dd7b45911e52db6ad9d474be312012028a9def590871b4d2a7a735c80ff0 not found: ID does not exist" Oct 01 15:13:11 crc kubenswrapper[4869]: I1001 15:13:11.552035 4869 scope.go:117] "RemoveContainer" containerID="c59f9083cd75c8bee85596c2a178cb931de40a37dd459433dd2c23d5b61d95f0" Oct 01 15:13:11 crc kubenswrapper[4869]: I1001 15:13:11.552515 4869 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c59f9083cd75c8bee85596c2a178cb931de40a37dd459433dd2c23d5b61d95f0"} err="failed to get container status \"c59f9083cd75c8bee85596c2a178cb931de40a37dd459433dd2c23d5b61d95f0\": rpc error: code = NotFound desc = could not find container \"c59f9083cd75c8bee85596c2a178cb931de40a37dd459433dd2c23d5b61d95f0\": container with ID starting with c59f9083cd75c8bee85596c2a178cb931de40a37dd459433dd2c23d5b61d95f0 not found: ID does not exist" Oct 01 15:13:11 crc kubenswrapper[4869]: I1001 15:13:11.552540 4869 scope.go:117] "RemoveContainer" containerID="c5f583b0b94050424796b550a344f0c6069c1608629fdd01ddfdb303add0d5f7" Oct 01 15:13:11 crc kubenswrapper[4869]: I1001 15:13:11.552832 4869 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c5f583b0b94050424796b550a344f0c6069c1608629fdd01ddfdb303add0d5f7"} err="failed to get container status \"c5f583b0b94050424796b550a344f0c6069c1608629fdd01ddfdb303add0d5f7\": rpc error: code = NotFound desc = could not find container \"c5f583b0b94050424796b550a344f0c6069c1608629fdd01ddfdb303add0d5f7\": container with ID starting with c5f583b0b94050424796b550a344f0c6069c1608629fdd01ddfdb303add0d5f7 not found: ID does not exist" Oct 01 15:13:11 crc kubenswrapper[4869]: I1001 15:13:11.552864 4869 scope.go:117] "RemoveContainer" containerID="ce48fdbad0049809bac4c2afd94d87133b935841c6a3805f8eacc26dd8527c62" Oct 01 15:13:11 crc kubenswrapper[4869]: I1001 15:13:11.553189 4869 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ce48fdbad0049809bac4c2afd94d87133b935841c6a3805f8eacc26dd8527c62"} err="failed to get container status \"ce48fdbad0049809bac4c2afd94d87133b935841c6a3805f8eacc26dd8527c62\": rpc error: code = NotFound desc = could not find container \"ce48fdbad0049809bac4c2afd94d87133b935841c6a3805f8eacc26dd8527c62\": container with ID starting with ce48fdbad0049809bac4c2afd94d87133b935841c6a3805f8eacc26dd8527c62 not found: ID does not exist" Oct 01 15:13:11 crc kubenswrapper[4869]: I1001 15:13:11.553222 4869 scope.go:117] "RemoveContainer" containerID="992afa5f68050b8a1ec99aaf070b3016494713d2d0d51fef7bd1296ad29546fd" Oct 01 15:13:11 crc kubenswrapper[4869]: I1001 15:13:11.553596 4869 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"992afa5f68050b8a1ec99aaf070b3016494713d2d0d51fef7bd1296ad29546fd"} err="failed to get container status \"992afa5f68050b8a1ec99aaf070b3016494713d2d0d51fef7bd1296ad29546fd\": rpc error: code = NotFound desc = could not find container \"992afa5f68050b8a1ec99aaf070b3016494713d2d0d51fef7bd1296ad29546fd\": container with ID starting with 992afa5f68050b8a1ec99aaf070b3016494713d2d0d51fef7bd1296ad29546fd not found: ID does not exist" Oct 01 15:13:11 crc kubenswrapper[4869]: I1001 15:13:11.553628 4869 scope.go:117] "RemoveContainer" containerID="b3afd39d18311bca00ad07f623d9d405a8cd60f6d29fc32c2bf6509b450160dd" Oct 01 15:13:11 crc kubenswrapper[4869]: I1001 15:13:11.554005 4869 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b3afd39d18311bca00ad07f623d9d405a8cd60f6d29fc32c2bf6509b450160dd"} err="failed to get container status \"b3afd39d18311bca00ad07f623d9d405a8cd60f6d29fc32c2bf6509b450160dd\": rpc error: code = NotFound desc = could not find container \"b3afd39d18311bca00ad07f623d9d405a8cd60f6d29fc32c2bf6509b450160dd\": container with ID starting with b3afd39d18311bca00ad07f623d9d405a8cd60f6d29fc32c2bf6509b450160dd not found: ID does not exist" Oct 01 15:13:11 crc kubenswrapper[4869]: I1001 15:13:11.554027 4869 scope.go:117] "RemoveContainer" containerID="59471b38e240e2b9a2ccfc1dee734c077c4f09fa10e0b55964d0fe73cd8af5a1" Oct 01 15:13:11 crc kubenswrapper[4869]: I1001 15:13:11.554432 4869 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"59471b38e240e2b9a2ccfc1dee734c077c4f09fa10e0b55964d0fe73cd8af5a1"} err="failed to get container status \"59471b38e240e2b9a2ccfc1dee734c077c4f09fa10e0b55964d0fe73cd8af5a1\": rpc error: code = NotFound desc = could not find container \"59471b38e240e2b9a2ccfc1dee734c077c4f09fa10e0b55964d0fe73cd8af5a1\": container with ID starting with 59471b38e240e2b9a2ccfc1dee734c077c4f09fa10e0b55964d0fe73cd8af5a1 not found: ID does not exist" Oct 01 15:13:11 crc kubenswrapper[4869]: I1001 15:13:11.554453 4869 scope.go:117] "RemoveContainer" containerID="7afd925f4b10ef56213c985a7b92c49d60a62f9be8ade831c42641100327ea40" Oct 01 15:13:11 crc kubenswrapper[4869]: I1001 15:13:11.555120 4869 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7afd925f4b10ef56213c985a7b92c49d60a62f9be8ade831c42641100327ea40"} err="failed to get container status \"7afd925f4b10ef56213c985a7b92c49d60a62f9be8ade831c42641100327ea40\": rpc error: code = NotFound desc = could not find container \"7afd925f4b10ef56213c985a7b92c49d60a62f9be8ade831c42641100327ea40\": container with ID starting with 7afd925f4b10ef56213c985a7b92c49d60a62f9be8ade831c42641100327ea40 not found: ID does not exist" Oct 01 15:13:11 crc kubenswrapper[4869]: I1001 15:13:11.555145 4869 scope.go:117] "RemoveContainer" containerID="7811ddaf0b31f53a6bc23e82dfa3a4bb5f9fa408b1f2e22dff3f71e9de8b00d2" Oct 01 15:13:11 crc kubenswrapper[4869]: I1001 15:13:11.555406 4869 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7811ddaf0b31f53a6bc23e82dfa3a4bb5f9fa408b1f2e22dff3f71e9de8b00d2"} err="failed to get container status \"7811ddaf0b31f53a6bc23e82dfa3a4bb5f9fa408b1f2e22dff3f71e9de8b00d2\": rpc error: code = NotFound desc = could not find container \"7811ddaf0b31f53a6bc23e82dfa3a4bb5f9fa408b1f2e22dff3f71e9de8b00d2\": container with ID starting with 7811ddaf0b31f53a6bc23e82dfa3a4bb5f9fa408b1f2e22dff3f71e9de8b00d2 not found: ID does not exist" Oct 01 15:13:11 crc kubenswrapper[4869]: I1001 15:13:11.555429 4869 scope.go:117] "RemoveContainer" containerID="e332dd7b45911e52db6ad9d474be312012028a9def590871b4d2a7a735c80ff0" Oct 01 15:13:11 crc kubenswrapper[4869]: I1001 15:13:11.555854 4869 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e332dd7b45911e52db6ad9d474be312012028a9def590871b4d2a7a735c80ff0"} err="failed to get container status \"e332dd7b45911e52db6ad9d474be312012028a9def590871b4d2a7a735c80ff0\": rpc error: code = NotFound desc = could not find container \"e332dd7b45911e52db6ad9d474be312012028a9def590871b4d2a7a735c80ff0\": container with ID starting with e332dd7b45911e52db6ad9d474be312012028a9def590871b4d2a7a735c80ff0 not found: ID does not exist" Oct 01 15:13:11 crc kubenswrapper[4869]: I1001 15:13:11.555884 4869 scope.go:117] "RemoveContainer" containerID="c59f9083cd75c8bee85596c2a178cb931de40a37dd459433dd2c23d5b61d95f0" Oct 01 15:13:11 crc kubenswrapper[4869]: I1001 15:13:11.556185 4869 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c59f9083cd75c8bee85596c2a178cb931de40a37dd459433dd2c23d5b61d95f0"} err="failed to get container status \"c59f9083cd75c8bee85596c2a178cb931de40a37dd459433dd2c23d5b61d95f0\": rpc error: code = NotFound desc = could not find container \"c59f9083cd75c8bee85596c2a178cb931de40a37dd459433dd2c23d5b61d95f0\": container with ID starting with c59f9083cd75c8bee85596c2a178cb931de40a37dd459433dd2c23d5b61d95f0 not found: ID does not exist" Oct 01 15:13:11 crc kubenswrapper[4869]: I1001 15:13:11.556210 4869 scope.go:117] "RemoveContainer" containerID="c5f583b0b94050424796b550a344f0c6069c1608629fdd01ddfdb303add0d5f7" Oct 01 15:13:11 crc kubenswrapper[4869]: I1001 15:13:11.556532 4869 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c5f583b0b94050424796b550a344f0c6069c1608629fdd01ddfdb303add0d5f7"} err="failed to get container status \"c5f583b0b94050424796b550a344f0c6069c1608629fdd01ddfdb303add0d5f7\": rpc error: code = NotFound desc = could not find container \"c5f583b0b94050424796b550a344f0c6069c1608629fdd01ddfdb303add0d5f7\": container with ID starting with c5f583b0b94050424796b550a344f0c6069c1608629fdd01ddfdb303add0d5f7 not found: ID does not exist" Oct 01 15:13:11 crc kubenswrapper[4869]: I1001 15:13:11.556554 4869 scope.go:117] "RemoveContainer" containerID="ce48fdbad0049809bac4c2afd94d87133b935841c6a3805f8eacc26dd8527c62" Oct 01 15:13:11 crc kubenswrapper[4869]: I1001 15:13:11.557010 4869 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ce48fdbad0049809bac4c2afd94d87133b935841c6a3805f8eacc26dd8527c62"} err="failed to get container status \"ce48fdbad0049809bac4c2afd94d87133b935841c6a3805f8eacc26dd8527c62\": rpc error: code = NotFound desc = could not find container \"ce48fdbad0049809bac4c2afd94d87133b935841c6a3805f8eacc26dd8527c62\": container with ID starting with ce48fdbad0049809bac4c2afd94d87133b935841c6a3805f8eacc26dd8527c62 not found: ID does not exist" Oct 01 15:13:11 crc kubenswrapper[4869]: I1001 15:13:11.557051 4869 scope.go:117] "RemoveContainer" containerID="992afa5f68050b8a1ec99aaf070b3016494713d2d0d51fef7bd1296ad29546fd" Oct 01 15:13:11 crc kubenswrapper[4869]: I1001 15:13:11.557485 4869 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"992afa5f68050b8a1ec99aaf070b3016494713d2d0d51fef7bd1296ad29546fd"} err="failed to get container status \"992afa5f68050b8a1ec99aaf070b3016494713d2d0d51fef7bd1296ad29546fd\": rpc error: code = NotFound desc = could not find container \"992afa5f68050b8a1ec99aaf070b3016494713d2d0d51fef7bd1296ad29546fd\": container with ID starting with 992afa5f68050b8a1ec99aaf070b3016494713d2d0d51fef7bd1296ad29546fd not found: ID does not exist" Oct 01 15:13:11 crc kubenswrapper[4869]: I1001 15:13:11.557511 4869 scope.go:117] "RemoveContainer" containerID="b3afd39d18311bca00ad07f623d9d405a8cd60f6d29fc32c2bf6509b450160dd" Oct 01 15:13:11 crc kubenswrapper[4869]: I1001 15:13:11.557874 4869 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b3afd39d18311bca00ad07f623d9d405a8cd60f6d29fc32c2bf6509b450160dd"} err="failed to get container status \"b3afd39d18311bca00ad07f623d9d405a8cd60f6d29fc32c2bf6509b450160dd\": rpc error: code = NotFound desc = could not find container \"b3afd39d18311bca00ad07f623d9d405a8cd60f6d29fc32c2bf6509b450160dd\": container with ID starting with b3afd39d18311bca00ad07f623d9d405a8cd60f6d29fc32c2bf6509b450160dd not found: ID does not exist" Oct 01 15:13:11 crc kubenswrapper[4869]: I1001 15:13:11.557918 4869 scope.go:117] "RemoveContainer" containerID="59471b38e240e2b9a2ccfc1dee734c077c4f09fa10e0b55964d0fe73cd8af5a1" Oct 01 15:13:11 crc kubenswrapper[4869]: I1001 15:13:11.558242 4869 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"59471b38e240e2b9a2ccfc1dee734c077c4f09fa10e0b55964d0fe73cd8af5a1"} err="failed to get container status \"59471b38e240e2b9a2ccfc1dee734c077c4f09fa10e0b55964d0fe73cd8af5a1\": rpc error: code = NotFound desc = could not find container \"59471b38e240e2b9a2ccfc1dee734c077c4f09fa10e0b55964d0fe73cd8af5a1\": container with ID starting with 59471b38e240e2b9a2ccfc1dee734c077c4f09fa10e0b55964d0fe73cd8af5a1 not found: ID does not exist" Oct 01 15:13:11 crc kubenswrapper[4869]: I1001 15:13:11.558282 4869 scope.go:117] "RemoveContainer" containerID="7afd925f4b10ef56213c985a7b92c49d60a62f9be8ade831c42641100327ea40" Oct 01 15:13:11 crc kubenswrapper[4869]: I1001 15:13:11.558615 4869 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7afd925f4b10ef56213c985a7b92c49d60a62f9be8ade831c42641100327ea40"} err="failed to get container status \"7afd925f4b10ef56213c985a7b92c49d60a62f9be8ade831c42641100327ea40\": rpc error: code = NotFound desc = could not find container \"7afd925f4b10ef56213c985a7b92c49d60a62f9be8ade831c42641100327ea40\": container with ID starting with 7afd925f4b10ef56213c985a7b92c49d60a62f9be8ade831c42641100327ea40 not found: ID does not exist" Oct 01 15:13:11 crc kubenswrapper[4869]: I1001 15:13:11.558640 4869 scope.go:117] "RemoveContainer" containerID="7811ddaf0b31f53a6bc23e82dfa3a4bb5f9fa408b1f2e22dff3f71e9de8b00d2" Oct 01 15:13:11 crc kubenswrapper[4869]: I1001 15:13:11.558894 4869 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7811ddaf0b31f53a6bc23e82dfa3a4bb5f9fa408b1f2e22dff3f71e9de8b00d2"} err="failed to get container status \"7811ddaf0b31f53a6bc23e82dfa3a4bb5f9fa408b1f2e22dff3f71e9de8b00d2\": rpc error: code = NotFound desc = could not find container \"7811ddaf0b31f53a6bc23e82dfa3a4bb5f9fa408b1f2e22dff3f71e9de8b00d2\": container with ID starting with 7811ddaf0b31f53a6bc23e82dfa3a4bb5f9fa408b1f2e22dff3f71e9de8b00d2 not found: ID does not exist" Oct 01 15:13:11 crc kubenswrapper[4869]: I1001 15:13:11.558914 4869 scope.go:117] "RemoveContainer" containerID="e332dd7b45911e52db6ad9d474be312012028a9def590871b4d2a7a735c80ff0" Oct 01 15:13:11 crc kubenswrapper[4869]: I1001 15:13:11.559818 4869 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e332dd7b45911e52db6ad9d474be312012028a9def590871b4d2a7a735c80ff0"} err="failed to get container status \"e332dd7b45911e52db6ad9d474be312012028a9def590871b4d2a7a735c80ff0\": rpc error: code = NotFound desc = could not find container \"e332dd7b45911e52db6ad9d474be312012028a9def590871b4d2a7a735c80ff0\": container with ID starting with e332dd7b45911e52db6ad9d474be312012028a9def590871b4d2a7a735c80ff0 not found: ID does not exist" Oct 01 15:13:11 crc kubenswrapper[4869]: I1001 15:13:11.559857 4869 scope.go:117] "RemoveContainer" containerID="c59f9083cd75c8bee85596c2a178cb931de40a37dd459433dd2c23d5b61d95f0" Oct 01 15:13:11 crc kubenswrapper[4869]: I1001 15:13:11.560157 4869 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c59f9083cd75c8bee85596c2a178cb931de40a37dd459433dd2c23d5b61d95f0"} err="failed to get container status \"c59f9083cd75c8bee85596c2a178cb931de40a37dd459433dd2c23d5b61d95f0\": rpc error: code = NotFound desc = could not find container \"c59f9083cd75c8bee85596c2a178cb931de40a37dd459433dd2c23d5b61d95f0\": container with ID starting with c59f9083cd75c8bee85596c2a178cb931de40a37dd459433dd2c23d5b61d95f0 not found: ID does not exist" Oct 01 15:13:11 crc kubenswrapper[4869]: I1001 15:13:11.560194 4869 scope.go:117] "RemoveContainer" containerID="c5f583b0b94050424796b550a344f0c6069c1608629fdd01ddfdb303add0d5f7" Oct 01 15:13:11 crc kubenswrapper[4869]: I1001 15:13:11.560468 4869 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c5f583b0b94050424796b550a344f0c6069c1608629fdd01ddfdb303add0d5f7"} err="failed to get container status \"c5f583b0b94050424796b550a344f0c6069c1608629fdd01ddfdb303add0d5f7\": rpc error: code = NotFound desc = could not find container \"c5f583b0b94050424796b550a344f0c6069c1608629fdd01ddfdb303add0d5f7\": container with ID starting with c5f583b0b94050424796b550a344f0c6069c1608629fdd01ddfdb303add0d5f7 not found: ID does not exist" Oct 01 15:13:11 crc kubenswrapper[4869]: I1001 15:13:11.560488 4869 scope.go:117] "RemoveContainer" containerID="ce48fdbad0049809bac4c2afd94d87133b935841c6a3805f8eacc26dd8527c62" Oct 01 15:13:11 crc kubenswrapper[4869]: I1001 15:13:11.560949 4869 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ce48fdbad0049809bac4c2afd94d87133b935841c6a3805f8eacc26dd8527c62"} err="failed to get container status \"ce48fdbad0049809bac4c2afd94d87133b935841c6a3805f8eacc26dd8527c62\": rpc error: code = NotFound desc = could not find container \"ce48fdbad0049809bac4c2afd94d87133b935841c6a3805f8eacc26dd8527c62\": container with ID starting with ce48fdbad0049809bac4c2afd94d87133b935841c6a3805f8eacc26dd8527c62 not found: ID does not exist" Oct 01 15:13:11 crc kubenswrapper[4869]: I1001 15:13:11.560971 4869 scope.go:117] "RemoveContainer" containerID="992afa5f68050b8a1ec99aaf070b3016494713d2d0d51fef7bd1296ad29546fd" Oct 01 15:13:11 crc kubenswrapper[4869]: I1001 15:13:11.561349 4869 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"992afa5f68050b8a1ec99aaf070b3016494713d2d0d51fef7bd1296ad29546fd"} err="failed to get container status \"992afa5f68050b8a1ec99aaf070b3016494713d2d0d51fef7bd1296ad29546fd\": rpc error: code = NotFound desc = could not find container \"992afa5f68050b8a1ec99aaf070b3016494713d2d0d51fef7bd1296ad29546fd\": container with ID starting with 992afa5f68050b8a1ec99aaf070b3016494713d2d0d51fef7bd1296ad29546fd not found: ID does not exist" Oct 01 15:13:11 crc kubenswrapper[4869]: I1001 15:13:11.561372 4869 scope.go:117] "RemoveContainer" containerID="b3afd39d18311bca00ad07f623d9d405a8cd60f6d29fc32c2bf6509b450160dd" Oct 01 15:13:11 crc kubenswrapper[4869]: I1001 15:13:11.561613 4869 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b3afd39d18311bca00ad07f623d9d405a8cd60f6d29fc32c2bf6509b450160dd"} err="failed to get container status \"b3afd39d18311bca00ad07f623d9d405a8cd60f6d29fc32c2bf6509b450160dd\": rpc error: code = NotFound desc = could not find container \"b3afd39d18311bca00ad07f623d9d405a8cd60f6d29fc32c2bf6509b450160dd\": container with ID starting with b3afd39d18311bca00ad07f623d9d405a8cd60f6d29fc32c2bf6509b450160dd not found: ID does not exist" Oct 01 15:13:11 crc kubenswrapper[4869]: I1001 15:13:11.561629 4869 scope.go:117] "RemoveContainer" containerID="59471b38e240e2b9a2ccfc1dee734c077c4f09fa10e0b55964d0fe73cd8af5a1" Oct 01 15:13:11 crc kubenswrapper[4869]: I1001 15:13:11.561860 4869 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"59471b38e240e2b9a2ccfc1dee734c077c4f09fa10e0b55964d0fe73cd8af5a1"} err="failed to get container status \"59471b38e240e2b9a2ccfc1dee734c077c4f09fa10e0b55964d0fe73cd8af5a1\": rpc error: code = NotFound desc = could not find container \"59471b38e240e2b9a2ccfc1dee734c077c4f09fa10e0b55964d0fe73cd8af5a1\": container with ID starting with 59471b38e240e2b9a2ccfc1dee734c077c4f09fa10e0b55964d0fe73cd8af5a1 not found: ID does not exist" Oct 01 15:13:11 crc kubenswrapper[4869]: I1001 15:13:11.561882 4869 scope.go:117] "RemoveContainer" containerID="7afd925f4b10ef56213c985a7b92c49d60a62f9be8ade831c42641100327ea40" Oct 01 15:13:11 crc kubenswrapper[4869]: I1001 15:13:11.562142 4869 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7afd925f4b10ef56213c985a7b92c49d60a62f9be8ade831c42641100327ea40"} err="failed to get container status \"7afd925f4b10ef56213c985a7b92c49d60a62f9be8ade831c42641100327ea40\": rpc error: code = NotFound desc = could not find container \"7afd925f4b10ef56213c985a7b92c49d60a62f9be8ade831c42641100327ea40\": container with ID starting with 7afd925f4b10ef56213c985a7b92c49d60a62f9be8ade831c42641100327ea40 not found: ID does not exist" Oct 01 15:13:11 crc kubenswrapper[4869]: I1001 15:13:11.562163 4869 scope.go:117] "RemoveContainer" containerID="7811ddaf0b31f53a6bc23e82dfa3a4bb5f9fa408b1f2e22dff3f71e9de8b00d2" Oct 01 15:13:11 crc kubenswrapper[4869]: I1001 15:13:11.562526 4869 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7811ddaf0b31f53a6bc23e82dfa3a4bb5f9fa408b1f2e22dff3f71e9de8b00d2"} err="failed to get container status \"7811ddaf0b31f53a6bc23e82dfa3a4bb5f9fa408b1f2e22dff3f71e9de8b00d2\": rpc error: code = NotFound desc = could not find container \"7811ddaf0b31f53a6bc23e82dfa3a4bb5f9fa408b1f2e22dff3f71e9de8b00d2\": container with ID starting with 7811ddaf0b31f53a6bc23e82dfa3a4bb5f9fa408b1f2e22dff3f71e9de8b00d2 not found: ID does not exist" Oct 01 15:13:11 crc kubenswrapper[4869]: I1001 15:13:11.562547 4869 scope.go:117] "RemoveContainer" containerID="e332dd7b45911e52db6ad9d474be312012028a9def590871b4d2a7a735c80ff0" Oct 01 15:13:11 crc kubenswrapper[4869]: I1001 15:13:11.562733 4869 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e332dd7b45911e52db6ad9d474be312012028a9def590871b4d2a7a735c80ff0"} err="failed to get container status \"e332dd7b45911e52db6ad9d474be312012028a9def590871b4d2a7a735c80ff0\": rpc error: code = NotFound desc = could not find container \"e332dd7b45911e52db6ad9d474be312012028a9def590871b4d2a7a735c80ff0\": container with ID starting with e332dd7b45911e52db6ad9d474be312012028a9def590871b4d2a7a735c80ff0 not found: ID does not exist" Oct 01 15:13:11 crc kubenswrapper[4869]: I1001 15:13:11.562754 4869 scope.go:117] "RemoveContainer" containerID="c59f9083cd75c8bee85596c2a178cb931de40a37dd459433dd2c23d5b61d95f0" Oct 01 15:13:11 crc kubenswrapper[4869]: I1001 15:13:11.563111 4869 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c59f9083cd75c8bee85596c2a178cb931de40a37dd459433dd2c23d5b61d95f0"} err="failed to get container status \"c59f9083cd75c8bee85596c2a178cb931de40a37dd459433dd2c23d5b61d95f0\": rpc error: code = NotFound desc = could not find container \"c59f9083cd75c8bee85596c2a178cb931de40a37dd459433dd2c23d5b61d95f0\": container with ID starting with c59f9083cd75c8bee85596c2a178cb931de40a37dd459433dd2c23d5b61d95f0 not found: ID does not exist" Oct 01 15:13:11 crc kubenswrapper[4869]: I1001 15:13:11.588693 4869 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ebbefc55-bef9-4a03-a065-321bff3a75b4" path="/var/lib/kubelet/pods/ebbefc55-bef9-4a03-a065-321bff3a75b4/volumes" Oct 01 15:13:12 crc kubenswrapper[4869]: I1001 15:13:12.274729 4869 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-j98s2_69635c7a-0025-4ea2-a1b6-fc7776c2be11/kube-multus/0.log" Oct 01 15:13:12 crc kubenswrapper[4869]: I1001 15:13:12.274864 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-j98s2" event={"ID":"69635c7a-0025-4ea2-a1b6-fc7776c2be11","Type":"ContainerStarted","Data":"0d07a07f8e16b3518653ee7182c156e179b8db3fe7a268474bcdba8514fee23a"} Oct 01 15:13:12 crc kubenswrapper[4869]: I1001 15:13:12.281193 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-hwptl" event={"ID":"e59dd056-ee8a-4b8b-af8a-f2d17bb64328","Type":"ContainerStarted","Data":"81cca54f46970efd5ff5809429eeec4a728bf655c2c589faf78085f676810023"} Oct 01 15:13:12 crc kubenswrapper[4869]: I1001 15:13:12.281315 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-hwptl" event={"ID":"e59dd056-ee8a-4b8b-af8a-f2d17bb64328","Type":"ContainerStarted","Data":"8ea5a9bd59e2417a6e26e303d1ff61bac358fdc4e5005aea62d1eba001891d35"} Oct 01 15:13:12 crc kubenswrapper[4869]: I1001 15:13:12.281349 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-hwptl" event={"ID":"e59dd056-ee8a-4b8b-af8a-f2d17bb64328","Type":"ContainerStarted","Data":"1db0ca793c563ce6f124fdfd193d25e13a934035e33adf98d1ea63276b54230e"} Oct 01 15:13:12 crc kubenswrapper[4869]: I1001 15:13:12.281378 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-hwptl" event={"ID":"e59dd056-ee8a-4b8b-af8a-f2d17bb64328","Type":"ContainerStarted","Data":"26dadc687f9abb9b4489f275ca81e765b1d71fd71831f91daa4cb5ed61fb13fc"} Oct 01 15:13:12 crc kubenswrapper[4869]: I1001 15:13:12.281403 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-hwptl" event={"ID":"e59dd056-ee8a-4b8b-af8a-f2d17bb64328","Type":"ContainerStarted","Data":"39be158e70b090ef2e8396643be95077cbc48b2b87fbf43115d42e1b843d118b"} Oct 01 15:13:12 crc kubenswrapper[4869]: I1001 15:13:12.281428 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-hwptl" event={"ID":"e59dd056-ee8a-4b8b-af8a-f2d17bb64328","Type":"ContainerStarted","Data":"449b8774a0ae3ec1fcc79ad0c73b8df3590f1e66013af653585824a84986f56d"} Oct 01 15:13:13 crc kubenswrapper[4869]: I1001 15:13:13.354809 4869 patch_prober.go:28] interesting pod/machine-config-daemon-c86m8 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 01 15:13:13 crc kubenswrapper[4869]: I1001 15:13:13.355216 4869 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-c86m8" podUID="a4b64f7f-0b03-4f47-965b-9fde048b735c" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 01 15:13:14 crc kubenswrapper[4869]: I1001 15:13:14.303944 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-hwptl" event={"ID":"e59dd056-ee8a-4b8b-af8a-f2d17bb64328","Type":"ContainerStarted","Data":"9ce3064f055da9ee56385a8d87a85a17fd4102903086099fa6df3ad98e9f5126"} Oct 01 15:13:17 crc kubenswrapper[4869]: I1001 15:13:17.331636 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-hwptl" event={"ID":"e59dd056-ee8a-4b8b-af8a-f2d17bb64328","Type":"ContainerStarted","Data":"1ff3d000c4a70608d9fb534c02ed2cd5fba7fd74811d3dca4c2505c0a82b1d35"} Oct 01 15:13:17 crc kubenswrapper[4869]: I1001 15:13:17.333181 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-hwptl" Oct 01 15:13:17 crc kubenswrapper[4869]: I1001 15:13:17.333204 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-hwptl" Oct 01 15:13:17 crc kubenswrapper[4869]: I1001 15:13:17.333245 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-hwptl" Oct 01 15:13:17 crc kubenswrapper[4869]: I1001 15:13:17.373213 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-hwptl" Oct 01 15:13:17 crc kubenswrapper[4869]: I1001 15:13:17.373359 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-hwptl" Oct 01 15:13:17 crc kubenswrapper[4869]: I1001 15:13:17.380343 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ovn-kubernetes/ovnkube-node-hwptl" podStartSLOduration=7.380316916 podStartE2EDuration="7.380316916s" podCreationTimestamp="2025-10-01 15:13:10 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-01 15:13:17.374223052 +0000 UTC m=+506.521066238" watchObservedRunningTime="2025-10-01 15:13:17.380316916 +0000 UTC m=+506.527160072" Oct 01 15:13:41 crc kubenswrapper[4869]: I1001 15:13:41.050427 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-hwptl" Oct 01 15:13:43 crc kubenswrapper[4869]: I1001 15:13:43.354586 4869 patch_prober.go:28] interesting pod/machine-config-daemon-c86m8 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 01 15:13:43 crc kubenswrapper[4869]: I1001 15:13:43.354919 4869 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-c86m8" podUID="a4b64f7f-0b03-4f47-965b-9fde048b735c" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 01 15:13:52 crc kubenswrapper[4869]: I1001 15:13:52.791336 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bcwzr4g"] Oct 01 15:13:52 crc kubenswrapper[4869]: I1001 15:13:52.792857 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bcwzr4g" Oct 01 15:13:52 crc kubenswrapper[4869]: I1001 15:13:52.795224 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"default-dockercfg-vmwhc" Oct 01 15:13:52 crc kubenswrapper[4869]: I1001 15:13:52.808750 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bcwzr4g"] Oct 01 15:13:52 crc kubenswrapper[4869]: I1001 15:13:52.942922 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/56c96162-675d-4e1a-9945-a0a0b16de0d2-bundle\") pod \"9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bcwzr4g\" (UID: \"56c96162-675d-4e1a-9945-a0a0b16de0d2\") " pod="openshift-marketplace/9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bcwzr4g" Oct 01 15:13:52 crc kubenswrapper[4869]: I1001 15:13:52.942992 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/56c96162-675d-4e1a-9945-a0a0b16de0d2-util\") pod \"9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bcwzr4g\" (UID: \"56c96162-675d-4e1a-9945-a0a0b16de0d2\") " pod="openshift-marketplace/9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bcwzr4g" Oct 01 15:13:52 crc kubenswrapper[4869]: I1001 15:13:52.943184 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jzmwx\" (UniqueName: \"kubernetes.io/projected/56c96162-675d-4e1a-9945-a0a0b16de0d2-kube-api-access-jzmwx\") pod \"9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bcwzr4g\" (UID: \"56c96162-675d-4e1a-9945-a0a0b16de0d2\") " pod="openshift-marketplace/9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bcwzr4g" Oct 01 15:13:53 crc kubenswrapper[4869]: I1001 15:13:53.044715 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/56c96162-675d-4e1a-9945-a0a0b16de0d2-bundle\") pod \"9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bcwzr4g\" (UID: \"56c96162-675d-4e1a-9945-a0a0b16de0d2\") " pod="openshift-marketplace/9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bcwzr4g" Oct 01 15:13:53 crc kubenswrapper[4869]: I1001 15:13:53.044785 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/56c96162-675d-4e1a-9945-a0a0b16de0d2-util\") pod \"9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bcwzr4g\" (UID: \"56c96162-675d-4e1a-9945-a0a0b16de0d2\") " pod="openshift-marketplace/9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bcwzr4g" Oct 01 15:13:53 crc kubenswrapper[4869]: I1001 15:13:53.044851 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jzmwx\" (UniqueName: \"kubernetes.io/projected/56c96162-675d-4e1a-9945-a0a0b16de0d2-kube-api-access-jzmwx\") pod \"9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bcwzr4g\" (UID: \"56c96162-675d-4e1a-9945-a0a0b16de0d2\") " pod="openshift-marketplace/9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bcwzr4g" Oct 01 15:13:53 crc kubenswrapper[4869]: I1001 15:13:53.045670 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/56c96162-675d-4e1a-9945-a0a0b16de0d2-bundle\") pod \"9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bcwzr4g\" (UID: \"56c96162-675d-4e1a-9945-a0a0b16de0d2\") " pod="openshift-marketplace/9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bcwzr4g" Oct 01 15:13:53 crc kubenswrapper[4869]: I1001 15:13:53.045732 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/56c96162-675d-4e1a-9945-a0a0b16de0d2-util\") pod \"9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bcwzr4g\" (UID: \"56c96162-675d-4e1a-9945-a0a0b16de0d2\") " pod="openshift-marketplace/9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bcwzr4g" Oct 01 15:13:53 crc kubenswrapper[4869]: I1001 15:13:53.080339 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jzmwx\" (UniqueName: \"kubernetes.io/projected/56c96162-675d-4e1a-9945-a0a0b16de0d2-kube-api-access-jzmwx\") pod \"9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bcwzr4g\" (UID: \"56c96162-675d-4e1a-9945-a0a0b16de0d2\") " pod="openshift-marketplace/9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bcwzr4g" Oct 01 15:13:53 crc kubenswrapper[4869]: I1001 15:13:53.106463 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bcwzr4g" Oct 01 15:13:53 crc kubenswrapper[4869]: I1001 15:13:53.626563 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bcwzr4g"] Oct 01 15:13:53 crc kubenswrapper[4869]: W1001 15:13:53.634664 4869 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod56c96162_675d_4e1a_9945_a0a0b16de0d2.slice/crio-629ac435051ab319fd6de82a4f93a32a4ad298b75ff1fc7df7eb698e9e05988b WatchSource:0}: Error finding container 629ac435051ab319fd6de82a4f93a32a4ad298b75ff1fc7df7eb698e9e05988b: Status 404 returned error can't find the container with id 629ac435051ab319fd6de82a4f93a32a4ad298b75ff1fc7df7eb698e9e05988b Oct 01 15:13:54 crc kubenswrapper[4869]: I1001 15:13:54.587408 4869 generic.go:334] "Generic (PLEG): container finished" podID="56c96162-675d-4e1a-9945-a0a0b16de0d2" containerID="3ec96cef07301e079b13bb975e52c4a1d069cdf7240a83f33d59422aba68fbc6" exitCode=0 Oct 01 15:13:54 crc kubenswrapper[4869]: I1001 15:13:54.587521 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bcwzr4g" event={"ID":"56c96162-675d-4e1a-9945-a0a0b16de0d2","Type":"ContainerDied","Data":"3ec96cef07301e079b13bb975e52c4a1d069cdf7240a83f33d59422aba68fbc6"} Oct 01 15:13:54 crc kubenswrapper[4869]: I1001 15:13:54.587954 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bcwzr4g" event={"ID":"56c96162-675d-4e1a-9945-a0a0b16de0d2","Type":"ContainerStarted","Data":"629ac435051ab319fd6de82a4f93a32a4ad298b75ff1fc7df7eb698e9e05988b"} Oct 01 15:13:56 crc kubenswrapper[4869]: I1001 15:13:56.608466 4869 generic.go:334] "Generic (PLEG): container finished" podID="56c96162-675d-4e1a-9945-a0a0b16de0d2" containerID="7440ca1f19f87deb32a16677342ffda19ba13d5a638d545c79ab2984bb0a2604" exitCode=0 Oct 01 15:13:56 crc kubenswrapper[4869]: I1001 15:13:56.608535 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bcwzr4g" event={"ID":"56c96162-675d-4e1a-9945-a0a0b16de0d2","Type":"ContainerDied","Data":"7440ca1f19f87deb32a16677342ffda19ba13d5a638d545c79ab2984bb0a2604"} Oct 01 15:13:57 crc kubenswrapper[4869]: I1001 15:13:57.632217 4869 generic.go:334] "Generic (PLEG): container finished" podID="56c96162-675d-4e1a-9945-a0a0b16de0d2" containerID="8251fb3198517a377d465ebda53b1cba3644d7475f1a59b3c278ac18a67c2726" exitCode=0 Oct 01 15:13:57 crc kubenswrapper[4869]: I1001 15:13:57.632305 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bcwzr4g" event={"ID":"56c96162-675d-4e1a-9945-a0a0b16de0d2","Type":"ContainerDied","Data":"8251fb3198517a377d465ebda53b1cba3644d7475f1a59b3c278ac18a67c2726"} Oct 01 15:13:58 crc kubenswrapper[4869]: I1001 15:13:58.978945 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bcwzr4g" Oct 01 15:13:59 crc kubenswrapper[4869]: I1001 15:13:59.144354 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/56c96162-675d-4e1a-9945-a0a0b16de0d2-bundle\") pod \"56c96162-675d-4e1a-9945-a0a0b16de0d2\" (UID: \"56c96162-675d-4e1a-9945-a0a0b16de0d2\") " Oct 01 15:13:59 crc kubenswrapper[4869]: I1001 15:13:59.144425 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jzmwx\" (UniqueName: \"kubernetes.io/projected/56c96162-675d-4e1a-9945-a0a0b16de0d2-kube-api-access-jzmwx\") pod \"56c96162-675d-4e1a-9945-a0a0b16de0d2\" (UID: \"56c96162-675d-4e1a-9945-a0a0b16de0d2\") " Oct 01 15:13:59 crc kubenswrapper[4869]: I1001 15:13:59.144512 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/56c96162-675d-4e1a-9945-a0a0b16de0d2-util\") pod \"56c96162-675d-4e1a-9945-a0a0b16de0d2\" (UID: \"56c96162-675d-4e1a-9945-a0a0b16de0d2\") " Oct 01 15:13:59 crc kubenswrapper[4869]: I1001 15:13:59.146072 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/56c96162-675d-4e1a-9945-a0a0b16de0d2-bundle" (OuterVolumeSpecName: "bundle") pod "56c96162-675d-4e1a-9945-a0a0b16de0d2" (UID: "56c96162-675d-4e1a-9945-a0a0b16de0d2"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 15:13:59 crc kubenswrapper[4869]: I1001 15:13:59.154358 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/56c96162-675d-4e1a-9945-a0a0b16de0d2-kube-api-access-jzmwx" (OuterVolumeSpecName: "kube-api-access-jzmwx") pod "56c96162-675d-4e1a-9945-a0a0b16de0d2" (UID: "56c96162-675d-4e1a-9945-a0a0b16de0d2"). InnerVolumeSpecName "kube-api-access-jzmwx". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 15:13:59 crc kubenswrapper[4869]: I1001 15:13:59.165468 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/56c96162-675d-4e1a-9945-a0a0b16de0d2-util" (OuterVolumeSpecName: "util") pod "56c96162-675d-4e1a-9945-a0a0b16de0d2" (UID: "56c96162-675d-4e1a-9945-a0a0b16de0d2"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 15:13:59 crc kubenswrapper[4869]: I1001 15:13:59.245426 4869 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/56c96162-675d-4e1a-9945-a0a0b16de0d2-util\") on node \"crc\" DevicePath \"\"" Oct 01 15:13:59 crc kubenswrapper[4869]: I1001 15:13:59.245459 4869 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/56c96162-675d-4e1a-9945-a0a0b16de0d2-bundle\") on node \"crc\" DevicePath \"\"" Oct 01 15:13:59 crc kubenswrapper[4869]: I1001 15:13:59.245469 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jzmwx\" (UniqueName: \"kubernetes.io/projected/56c96162-675d-4e1a-9945-a0a0b16de0d2-kube-api-access-jzmwx\") on node \"crc\" DevicePath \"\"" Oct 01 15:13:59 crc kubenswrapper[4869]: I1001 15:13:59.650521 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bcwzr4g" event={"ID":"56c96162-675d-4e1a-9945-a0a0b16de0d2","Type":"ContainerDied","Data":"629ac435051ab319fd6de82a4f93a32a4ad298b75ff1fc7df7eb698e9e05988b"} Oct 01 15:13:59 crc kubenswrapper[4869]: I1001 15:13:59.650588 4869 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="629ac435051ab319fd6de82a4f93a32a4ad298b75ff1fc7df7eb698e9e05988b" Oct 01 15:13:59 crc kubenswrapper[4869]: I1001 15:13:59.650639 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bcwzr4g" Oct 01 15:14:00 crc kubenswrapper[4869]: I1001 15:14:00.753523 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-operator-5d6f6cfd66-5whxj"] Oct 01 15:14:00 crc kubenswrapper[4869]: E1001 15:14:00.754051 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="56c96162-675d-4e1a-9945-a0a0b16de0d2" containerName="util" Oct 01 15:14:00 crc kubenswrapper[4869]: I1001 15:14:00.754065 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="56c96162-675d-4e1a-9945-a0a0b16de0d2" containerName="util" Oct 01 15:14:00 crc kubenswrapper[4869]: E1001 15:14:00.754079 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="56c96162-675d-4e1a-9945-a0a0b16de0d2" containerName="pull" Oct 01 15:14:00 crc kubenswrapper[4869]: I1001 15:14:00.754086 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="56c96162-675d-4e1a-9945-a0a0b16de0d2" containerName="pull" Oct 01 15:14:00 crc kubenswrapper[4869]: E1001 15:14:00.754102 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="56c96162-675d-4e1a-9945-a0a0b16de0d2" containerName="extract" Oct 01 15:14:00 crc kubenswrapper[4869]: I1001 15:14:00.754111 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="56c96162-675d-4e1a-9945-a0a0b16de0d2" containerName="extract" Oct 01 15:14:00 crc kubenswrapper[4869]: I1001 15:14:00.754240 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="56c96162-675d-4e1a-9945-a0a0b16de0d2" containerName="extract" Oct 01 15:14:00 crc kubenswrapper[4869]: I1001 15:14:00.754695 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-operator-5d6f6cfd66-5whxj" Oct 01 15:14:00 crc kubenswrapper[4869]: I1001 15:14:00.756723 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"nmstate-operator-dockercfg-z482m" Oct 01 15:14:00 crc kubenswrapper[4869]: I1001 15:14:00.756735 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-nmstate"/"kube-root-ca.crt" Oct 01 15:14:00 crc kubenswrapper[4869]: I1001 15:14:00.756799 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-nmstate"/"openshift-service-ca.crt" Oct 01 15:14:00 crc kubenswrapper[4869]: I1001 15:14:00.766014 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-operator-5d6f6cfd66-5whxj"] Oct 01 15:14:00 crc kubenswrapper[4869]: I1001 15:14:00.868379 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jq74b\" (UniqueName: \"kubernetes.io/projected/bd4b603c-ef21-4afa-adcf-0e075976eeef-kube-api-access-jq74b\") pod \"nmstate-operator-5d6f6cfd66-5whxj\" (UID: \"bd4b603c-ef21-4afa-adcf-0e075976eeef\") " pod="openshift-nmstate/nmstate-operator-5d6f6cfd66-5whxj" Oct 01 15:14:00 crc kubenswrapper[4869]: I1001 15:14:00.970032 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jq74b\" (UniqueName: \"kubernetes.io/projected/bd4b603c-ef21-4afa-adcf-0e075976eeef-kube-api-access-jq74b\") pod \"nmstate-operator-5d6f6cfd66-5whxj\" (UID: \"bd4b603c-ef21-4afa-adcf-0e075976eeef\") " pod="openshift-nmstate/nmstate-operator-5d6f6cfd66-5whxj" Oct 01 15:14:00 crc kubenswrapper[4869]: I1001 15:14:00.992764 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jq74b\" (UniqueName: \"kubernetes.io/projected/bd4b603c-ef21-4afa-adcf-0e075976eeef-kube-api-access-jq74b\") pod \"nmstate-operator-5d6f6cfd66-5whxj\" (UID: \"bd4b603c-ef21-4afa-adcf-0e075976eeef\") " pod="openshift-nmstate/nmstate-operator-5d6f6cfd66-5whxj" Oct 01 15:14:01 crc kubenswrapper[4869]: I1001 15:14:01.076477 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-operator-5d6f6cfd66-5whxj" Oct 01 15:14:01 crc kubenswrapper[4869]: I1001 15:14:01.314311 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-operator-5d6f6cfd66-5whxj"] Oct 01 15:14:01 crc kubenswrapper[4869]: W1001 15:14:01.321757 4869 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podbd4b603c_ef21_4afa_adcf_0e075976eeef.slice/crio-39d8adc701998a2bad3f1f0570cbb1ea8853d704eee6b78556a7bb385527fe90 WatchSource:0}: Error finding container 39d8adc701998a2bad3f1f0570cbb1ea8853d704eee6b78556a7bb385527fe90: Status 404 returned error can't find the container with id 39d8adc701998a2bad3f1f0570cbb1ea8853d704eee6b78556a7bb385527fe90 Oct 01 15:14:01 crc kubenswrapper[4869]: I1001 15:14:01.660186 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-operator-5d6f6cfd66-5whxj" event={"ID":"bd4b603c-ef21-4afa-adcf-0e075976eeef","Type":"ContainerStarted","Data":"39d8adc701998a2bad3f1f0570cbb1ea8853d704eee6b78556a7bb385527fe90"} Oct 01 15:14:04 crc kubenswrapper[4869]: I1001 15:14:04.682144 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-operator-5d6f6cfd66-5whxj" event={"ID":"bd4b603c-ef21-4afa-adcf-0e075976eeef","Type":"ContainerStarted","Data":"ae61def0d9c94972a6fd926fffa33a9a7ec9dc0aad61f31c8348d2b23c698eb6"} Oct 01 15:14:04 crc kubenswrapper[4869]: I1001 15:14:04.711672 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-operator-5d6f6cfd66-5whxj" podStartSLOduration=2.132326039 podStartE2EDuration="4.711639754s" podCreationTimestamp="2025-10-01 15:14:00 +0000 UTC" firstStartedPulling="2025-10-01 15:14:01.323851623 +0000 UTC m=+550.470694739" lastFinishedPulling="2025-10-01 15:14:03.903165338 +0000 UTC m=+553.050008454" observedRunningTime="2025-10-01 15:14:04.707356496 +0000 UTC m=+553.854199642" watchObservedRunningTime="2025-10-01 15:14:04.711639754 +0000 UTC m=+553.858482940" Oct 01 15:14:05 crc kubenswrapper[4869]: I1001 15:14:05.755443 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-metrics-58fcddf996-bc5gk"] Oct 01 15:14:05 crc kubenswrapper[4869]: I1001 15:14:05.756899 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-metrics-58fcddf996-bc5gk" Oct 01 15:14:05 crc kubenswrapper[4869]: I1001 15:14:05.759761 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"nmstate-handler-dockercfg-hc2zd" Oct 01 15:14:05 crc kubenswrapper[4869]: I1001 15:14:05.760575 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-webhook-6d689559c5-dssrt"] Oct 01 15:14:05 crc kubenswrapper[4869]: I1001 15:14:05.761679 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-webhook-6d689559c5-dssrt" Oct 01 15:14:05 crc kubenswrapper[4869]: I1001 15:14:05.765324 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"openshift-nmstate-webhook" Oct 01 15:14:05 crc kubenswrapper[4869]: I1001 15:14:05.779731 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-metrics-58fcddf996-bc5gk"] Oct 01 15:14:05 crc kubenswrapper[4869]: I1001 15:14:05.784443 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-webhook-6d689559c5-dssrt"] Oct 01 15:14:05 crc kubenswrapper[4869]: I1001 15:14:05.793292 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-handler-xs8c4"] Oct 01 15:14:05 crc kubenswrapper[4869]: I1001 15:14:05.794186 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-handler-xs8c4" Oct 01 15:14:05 crc kubenswrapper[4869]: I1001 15:14:05.834526 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dbus-socket\" (UniqueName: \"kubernetes.io/host-path/441818c5-6e78-4a8f-9ed9-58e7dd4b2028-dbus-socket\") pod \"nmstate-handler-xs8c4\" (UID: \"441818c5-6e78-4a8f-9ed9-58e7dd4b2028\") " pod="openshift-nmstate/nmstate-handler-xs8c4" Oct 01 15:14:05 crc kubenswrapper[4869]: I1001 15:14:05.834575 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovs-socket\" (UniqueName: \"kubernetes.io/host-path/441818c5-6e78-4a8f-9ed9-58e7dd4b2028-ovs-socket\") pod \"nmstate-handler-xs8c4\" (UID: \"441818c5-6e78-4a8f-9ed9-58e7dd4b2028\") " pod="openshift-nmstate/nmstate-handler-xs8c4" Oct 01 15:14:05 crc kubenswrapper[4869]: I1001 15:14:05.834617 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tls-key-pair\" (UniqueName: \"kubernetes.io/secret/aa4da6ef-158a-44e5-8d1a-779aa19fe3ac-tls-key-pair\") pod \"nmstate-webhook-6d689559c5-dssrt\" (UID: \"aa4da6ef-158a-44e5-8d1a-779aa19fe3ac\") " pod="openshift-nmstate/nmstate-webhook-6d689559c5-dssrt" Oct 01 15:14:05 crc kubenswrapper[4869]: I1001 15:14:05.834799 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7g9ld\" (UniqueName: \"kubernetes.io/projected/27f0e6e2-94a5-4313-9c9e-9eacbc971748-kube-api-access-7g9ld\") pod \"nmstate-metrics-58fcddf996-bc5gk\" (UID: \"27f0e6e2-94a5-4313-9c9e-9eacbc971748\") " pod="openshift-nmstate/nmstate-metrics-58fcddf996-bc5gk" Oct 01 15:14:05 crc kubenswrapper[4869]: I1001 15:14:05.834844 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5rn77\" (UniqueName: \"kubernetes.io/projected/aa4da6ef-158a-44e5-8d1a-779aa19fe3ac-kube-api-access-5rn77\") pod \"nmstate-webhook-6d689559c5-dssrt\" (UID: \"aa4da6ef-158a-44e5-8d1a-779aa19fe3ac\") " pod="openshift-nmstate/nmstate-webhook-6d689559c5-dssrt" Oct 01 15:14:05 crc kubenswrapper[4869]: I1001 15:14:05.834887 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nmstate-lock\" (UniqueName: \"kubernetes.io/host-path/441818c5-6e78-4a8f-9ed9-58e7dd4b2028-nmstate-lock\") pod \"nmstate-handler-xs8c4\" (UID: \"441818c5-6e78-4a8f-9ed9-58e7dd4b2028\") " pod="openshift-nmstate/nmstate-handler-xs8c4" Oct 01 15:14:05 crc kubenswrapper[4869]: I1001 15:14:05.834911 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4kgd6\" (UniqueName: \"kubernetes.io/projected/441818c5-6e78-4a8f-9ed9-58e7dd4b2028-kube-api-access-4kgd6\") pod \"nmstate-handler-xs8c4\" (UID: \"441818c5-6e78-4a8f-9ed9-58e7dd4b2028\") " pod="openshift-nmstate/nmstate-handler-xs8c4" Oct 01 15:14:05 crc kubenswrapper[4869]: I1001 15:14:05.909937 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-console-plugin-864bb6dfb5-zcwb9"] Oct 01 15:14:05 crc kubenswrapper[4869]: I1001 15:14:05.910635 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-console-plugin-864bb6dfb5-zcwb9" Oct 01 15:14:05 crc kubenswrapper[4869]: I1001 15:14:05.913430 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"plugin-serving-cert" Oct 01 15:14:05 crc kubenswrapper[4869]: I1001 15:14:05.913623 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-nmstate"/"nginx-conf" Oct 01 15:14:05 crc kubenswrapper[4869]: I1001 15:14:05.913775 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"default-dockercfg-zhbkz" Oct 01 15:14:05 crc kubenswrapper[4869]: I1001 15:14:05.928587 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-console-plugin-864bb6dfb5-zcwb9"] Oct 01 15:14:05 crc kubenswrapper[4869]: I1001 15:14:05.935853 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2jvc2\" (UniqueName: \"kubernetes.io/projected/58fa3f46-8eb7-4d4f-9548-37c56f012aba-kube-api-access-2jvc2\") pod \"nmstate-console-plugin-864bb6dfb5-zcwb9\" (UID: \"58fa3f46-8eb7-4d4f-9548-37c56f012aba\") " pod="openshift-nmstate/nmstate-console-plugin-864bb6dfb5-zcwb9" Oct 01 15:14:05 crc kubenswrapper[4869]: I1001 15:14:05.935927 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tls-key-pair\" (UniqueName: \"kubernetes.io/secret/aa4da6ef-158a-44e5-8d1a-779aa19fe3ac-tls-key-pair\") pod \"nmstate-webhook-6d689559c5-dssrt\" (UID: \"aa4da6ef-158a-44e5-8d1a-779aa19fe3ac\") " pod="openshift-nmstate/nmstate-webhook-6d689559c5-dssrt" Oct 01 15:14:05 crc kubenswrapper[4869]: I1001 15:14:05.935948 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugin-serving-cert\" (UniqueName: \"kubernetes.io/secret/58fa3f46-8eb7-4d4f-9548-37c56f012aba-plugin-serving-cert\") pod \"nmstate-console-plugin-864bb6dfb5-zcwb9\" (UID: \"58fa3f46-8eb7-4d4f-9548-37c56f012aba\") " pod="openshift-nmstate/nmstate-console-plugin-864bb6dfb5-zcwb9" Oct 01 15:14:05 crc kubenswrapper[4869]: I1001 15:14:05.936037 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7g9ld\" (UniqueName: \"kubernetes.io/projected/27f0e6e2-94a5-4313-9c9e-9eacbc971748-kube-api-access-7g9ld\") pod \"nmstate-metrics-58fcddf996-bc5gk\" (UID: \"27f0e6e2-94a5-4313-9c9e-9eacbc971748\") " pod="openshift-nmstate/nmstate-metrics-58fcddf996-bc5gk" Oct 01 15:14:05 crc kubenswrapper[4869]: I1001 15:14:05.936065 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5rn77\" (UniqueName: \"kubernetes.io/projected/aa4da6ef-158a-44e5-8d1a-779aa19fe3ac-kube-api-access-5rn77\") pod \"nmstate-webhook-6d689559c5-dssrt\" (UID: \"aa4da6ef-158a-44e5-8d1a-779aa19fe3ac\") " pod="openshift-nmstate/nmstate-webhook-6d689559c5-dssrt" Oct 01 15:14:05 crc kubenswrapper[4869]: E1001 15:14:05.936037 4869 secret.go:188] Couldn't get secret openshift-nmstate/openshift-nmstate-webhook: secret "openshift-nmstate-webhook" not found Oct 01 15:14:05 crc kubenswrapper[4869]: I1001 15:14:05.936108 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nmstate-lock\" (UniqueName: \"kubernetes.io/host-path/441818c5-6e78-4a8f-9ed9-58e7dd4b2028-nmstate-lock\") pod \"nmstate-handler-xs8c4\" (UID: \"441818c5-6e78-4a8f-9ed9-58e7dd4b2028\") " pod="openshift-nmstate/nmstate-handler-xs8c4" Oct 01 15:14:05 crc kubenswrapper[4869]: I1001 15:14:05.936132 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4kgd6\" (UniqueName: \"kubernetes.io/projected/441818c5-6e78-4a8f-9ed9-58e7dd4b2028-kube-api-access-4kgd6\") pod \"nmstate-handler-xs8c4\" (UID: \"441818c5-6e78-4a8f-9ed9-58e7dd4b2028\") " pod="openshift-nmstate/nmstate-handler-xs8c4" Oct 01 15:14:05 crc kubenswrapper[4869]: E1001 15:14:05.936175 4869 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/aa4da6ef-158a-44e5-8d1a-779aa19fe3ac-tls-key-pair podName:aa4da6ef-158a-44e5-8d1a-779aa19fe3ac nodeName:}" failed. No retries permitted until 2025-10-01 15:14:06.436151954 +0000 UTC m=+555.582995070 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "tls-key-pair" (UniqueName: "kubernetes.io/secret/aa4da6ef-158a-44e5-8d1a-779aa19fe3ac-tls-key-pair") pod "nmstate-webhook-6d689559c5-dssrt" (UID: "aa4da6ef-158a-44e5-8d1a-779aa19fe3ac") : secret "openshift-nmstate-webhook" not found Oct 01 15:14:05 crc kubenswrapper[4869]: I1001 15:14:05.936202 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nmstate-lock\" (UniqueName: \"kubernetes.io/host-path/441818c5-6e78-4a8f-9ed9-58e7dd4b2028-nmstate-lock\") pod \"nmstate-handler-xs8c4\" (UID: \"441818c5-6e78-4a8f-9ed9-58e7dd4b2028\") " pod="openshift-nmstate/nmstate-handler-xs8c4" Oct 01 15:14:05 crc kubenswrapper[4869]: I1001 15:14:05.936242 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dbus-socket\" (UniqueName: \"kubernetes.io/host-path/441818c5-6e78-4a8f-9ed9-58e7dd4b2028-dbus-socket\") pod \"nmstate-handler-xs8c4\" (UID: \"441818c5-6e78-4a8f-9ed9-58e7dd4b2028\") " pod="openshift-nmstate/nmstate-handler-xs8c4" Oct 01 15:14:05 crc kubenswrapper[4869]: I1001 15:14:05.936301 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovs-socket\" (UniqueName: \"kubernetes.io/host-path/441818c5-6e78-4a8f-9ed9-58e7dd4b2028-ovs-socket\") pod \"nmstate-handler-xs8c4\" (UID: \"441818c5-6e78-4a8f-9ed9-58e7dd4b2028\") " pod="openshift-nmstate/nmstate-handler-xs8c4" Oct 01 15:14:05 crc kubenswrapper[4869]: I1001 15:14:05.936334 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/58fa3f46-8eb7-4d4f-9548-37c56f012aba-nginx-conf\") pod \"nmstate-console-plugin-864bb6dfb5-zcwb9\" (UID: \"58fa3f46-8eb7-4d4f-9548-37c56f012aba\") " pod="openshift-nmstate/nmstate-console-plugin-864bb6dfb5-zcwb9" Oct 01 15:14:05 crc kubenswrapper[4869]: I1001 15:14:05.936363 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovs-socket\" (UniqueName: \"kubernetes.io/host-path/441818c5-6e78-4a8f-9ed9-58e7dd4b2028-ovs-socket\") pod \"nmstate-handler-xs8c4\" (UID: \"441818c5-6e78-4a8f-9ed9-58e7dd4b2028\") " pod="openshift-nmstate/nmstate-handler-xs8c4" Oct 01 15:14:05 crc kubenswrapper[4869]: I1001 15:14:05.936552 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dbus-socket\" (UniqueName: \"kubernetes.io/host-path/441818c5-6e78-4a8f-9ed9-58e7dd4b2028-dbus-socket\") pod \"nmstate-handler-xs8c4\" (UID: \"441818c5-6e78-4a8f-9ed9-58e7dd4b2028\") " pod="openshift-nmstate/nmstate-handler-xs8c4" Oct 01 15:14:05 crc kubenswrapper[4869]: I1001 15:14:05.956974 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5rn77\" (UniqueName: \"kubernetes.io/projected/aa4da6ef-158a-44e5-8d1a-779aa19fe3ac-kube-api-access-5rn77\") pod \"nmstate-webhook-6d689559c5-dssrt\" (UID: \"aa4da6ef-158a-44e5-8d1a-779aa19fe3ac\") " pod="openshift-nmstate/nmstate-webhook-6d689559c5-dssrt" Oct 01 15:14:05 crc kubenswrapper[4869]: I1001 15:14:05.957098 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4kgd6\" (UniqueName: \"kubernetes.io/projected/441818c5-6e78-4a8f-9ed9-58e7dd4b2028-kube-api-access-4kgd6\") pod \"nmstate-handler-xs8c4\" (UID: \"441818c5-6e78-4a8f-9ed9-58e7dd4b2028\") " pod="openshift-nmstate/nmstate-handler-xs8c4" Oct 01 15:14:05 crc kubenswrapper[4869]: I1001 15:14:05.960028 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7g9ld\" (UniqueName: \"kubernetes.io/projected/27f0e6e2-94a5-4313-9c9e-9eacbc971748-kube-api-access-7g9ld\") pod \"nmstate-metrics-58fcddf996-bc5gk\" (UID: \"27f0e6e2-94a5-4313-9c9e-9eacbc971748\") " pod="openshift-nmstate/nmstate-metrics-58fcddf996-bc5gk" Oct 01 15:14:06 crc kubenswrapper[4869]: I1001 15:14:06.037390 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugin-serving-cert\" (UniqueName: \"kubernetes.io/secret/58fa3f46-8eb7-4d4f-9548-37c56f012aba-plugin-serving-cert\") pod \"nmstate-console-plugin-864bb6dfb5-zcwb9\" (UID: \"58fa3f46-8eb7-4d4f-9548-37c56f012aba\") " pod="openshift-nmstate/nmstate-console-plugin-864bb6dfb5-zcwb9" Oct 01 15:14:06 crc kubenswrapper[4869]: I1001 15:14:06.037516 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/58fa3f46-8eb7-4d4f-9548-37c56f012aba-nginx-conf\") pod \"nmstate-console-plugin-864bb6dfb5-zcwb9\" (UID: \"58fa3f46-8eb7-4d4f-9548-37c56f012aba\") " pod="openshift-nmstate/nmstate-console-plugin-864bb6dfb5-zcwb9" Oct 01 15:14:06 crc kubenswrapper[4869]: E1001 15:14:06.037543 4869 secret.go:188] Couldn't get secret openshift-nmstate/plugin-serving-cert: secret "plugin-serving-cert" not found Oct 01 15:14:06 crc kubenswrapper[4869]: E1001 15:14:06.037606 4869 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/58fa3f46-8eb7-4d4f-9548-37c56f012aba-plugin-serving-cert podName:58fa3f46-8eb7-4d4f-9548-37c56f012aba nodeName:}" failed. No retries permitted until 2025-10-01 15:14:06.537588519 +0000 UTC m=+555.684431645 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "plugin-serving-cert" (UniqueName: "kubernetes.io/secret/58fa3f46-8eb7-4d4f-9548-37c56f012aba-plugin-serving-cert") pod "nmstate-console-plugin-864bb6dfb5-zcwb9" (UID: "58fa3f46-8eb7-4d4f-9548-37c56f012aba") : secret "plugin-serving-cert" not found Oct 01 15:14:06 crc kubenswrapper[4869]: I1001 15:14:06.037546 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2jvc2\" (UniqueName: \"kubernetes.io/projected/58fa3f46-8eb7-4d4f-9548-37c56f012aba-kube-api-access-2jvc2\") pod \"nmstate-console-plugin-864bb6dfb5-zcwb9\" (UID: \"58fa3f46-8eb7-4d4f-9548-37c56f012aba\") " pod="openshift-nmstate/nmstate-console-plugin-864bb6dfb5-zcwb9" Oct 01 15:14:06 crc kubenswrapper[4869]: I1001 15:14:06.038916 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/58fa3f46-8eb7-4d4f-9548-37c56f012aba-nginx-conf\") pod \"nmstate-console-plugin-864bb6dfb5-zcwb9\" (UID: \"58fa3f46-8eb7-4d4f-9548-37c56f012aba\") " pod="openshift-nmstate/nmstate-console-plugin-864bb6dfb5-zcwb9" Oct 01 15:14:06 crc kubenswrapper[4869]: I1001 15:14:06.071082 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2jvc2\" (UniqueName: \"kubernetes.io/projected/58fa3f46-8eb7-4d4f-9548-37c56f012aba-kube-api-access-2jvc2\") pod \"nmstate-console-plugin-864bb6dfb5-zcwb9\" (UID: \"58fa3f46-8eb7-4d4f-9548-37c56f012aba\") " pod="openshift-nmstate/nmstate-console-plugin-864bb6dfb5-zcwb9" Oct 01 15:14:06 crc kubenswrapper[4869]: I1001 15:14:06.082515 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-metrics-58fcddf996-bc5gk" Oct 01 15:14:06 crc kubenswrapper[4869]: I1001 15:14:06.116073 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-handler-xs8c4" Oct 01 15:14:06 crc kubenswrapper[4869]: I1001 15:14:06.124120 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console/console-5567b7c9db-kpmz7"] Oct 01 15:14:06 crc kubenswrapper[4869]: I1001 15:14:06.124728 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-5567b7c9db-kpmz7" Oct 01 15:14:06 crc kubenswrapper[4869]: I1001 15:14:06.138309 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/b8e5c8a3-b1f1-45ee-b69b-51eddf4c5946-console-serving-cert\") pod \"console-5567b7c9db-kpmz7\" (UID: \"b8e5c8a3-b1f1-45ee-b69b-51eddf4c5946\") " pod="openshift-console/console-5567b7c9db-kpmz7" Oct 01 15:14:06 crc kubenswrapper[4869]: I1001 15:14:06.138362 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/b8e5c8a3-b1f1-45ee-b69b-51eddf4c5946-console-oauth-config\") pod \"console-5567b7c9db-kpmz7\" (UID: \"b8e5c8a3-b1f1-45ee-b69b-51eddf4c5946\") " pod="openshift-console/console-5567b7c9db-kpmz7" Oct 01 15:14:06 crc kubenswrapper[4869]: I1001 15:14:06.138380 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/b8e5c8a3-b1f1-45ee-b69b-51eddf4c5946-oauth-serving-cert\") pod \"console-5567b7c9db-kpmz7\" (UID: \"b8e5c8a3-b1f1-45ee-b69b-51eddf4c5946\") " pod="openshift-console/console-5567b7c9db-kpmz7" Oct 01 15:14:06 crc kubenswrapper[4869]: I1001 15:14:06.138436 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/b8e5c8a3-b1f1-45ee-b69b-51eddf4c5946-trusted-ca-bundle\") pod \"console-5567b7c9db-kpmz7\" (UID: \"b8e5c8a3-b1f1-45ee-b69b-51eddf4c5946\") " pod="openshift-console/console-5567b7c9db-kpmz7" Oct 01 15:14:06 crc kubenswrapper[4869]: I1001 15:14:06.138464 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/b8e5c8a3-b1f1-45ee-b69b-51eddf4c5946-service-ca\") pod \"console-5567b7c9db-kpmz7\" (UID: \"b8e5c8a3-b1f1-45ee-b69b-51eddf4c5946\") " pod="openshift-console/console-5567b7c9db-kpmz7" Oct 01 15:14:06 crc kubenswrapper[4869]: I1001 15:14:06.138490 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/b8e5c8a3-b1f1-45ee-b69b-51eddf4c5946-console-config\") pod \"console-5567b7c9db-kpmz7\" (UID: \"b8e5c8a3-b1f1-45ee-b69b-51eddf4c5946\") " pod="openshift-console/console-5567b7c9db-kpmz7" Oct 01 15:14:06 crc kubenswrapper[4869]: I1001 15:14:06.138523 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-j7lbn\" (UniqueName: \"kubernetes.io/projected/b8e5c8a3-b1f1-45ee-b69b-51eddf4c5946-kube-api-access-j7lbn\") pod \"console-5567b7c9db-kpmz7\" (UID: \"b8e5c8a3-b1f1-45ee-b69b-51eddf4c5946\") " pod="openshift-console/console-5567b7c9db-kpmz7" Oct 01 15:14:06 crc kubenswrapper[4869]: I1001 15:14:06.147495 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-5567b7c9db-kpmz7"] Oct 01 15:14:06 crc kubenswrapper[4869]: I1001 15:14:06.239392 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/b8e5c8a3-b1f1-45ee-b69b-51eddf4c5946-console-serving-cert\") pod \"console-5567b7c9db-kpmz7\" (UID: \"b8e5c8a3-b1f1-45ee-b69b-51eddf4c5946\") " pod="openshift-console/console-5567b7c9db-kpmz7" Oct 01 15:14:06 crc kubenswrapper[4869]: I1001 15:14:06.239699 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/b8e5c8a3-b1f1-45ee-b69b-51eddf4c5946-console-oauth-config\") pod \"console-5567b7c9db-kpmz7\" (UID: \"b8e5c8a3-b1f1-45ee-b69b-51eddf4c5946\") " pod="openshift-console/console-5567b7c9db-kpmz7" Oct 01 15:14:06 crc kubenswrapper[4869]: I1001 15:14:06.239726 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/b8e5c8a3-b1f1-45ee-b69b-51eddf4c5946-oauth-serving-cert\") pod \"console-5567b7c9db-kpmz7\" (UID: \"b8e5c8a3-b1f1-45ee-b69b-51eddf4c5946\") " pod="openshift-console/console-5567b7c9db-kpmz7" Oct 01 15:14:06 crc kubenswrapper[4869]: I1001 15:14:06.239768 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/b8e5c8a3-b1f1-45ee-b69b-51eddf4c5946-trusted-ca-bundle\") pod \"console-5567b7c9db-kpmz7\" (UID: \"b8e5c8a3-b1f1-45ee-b69b-51eddf4c5946\") " pod="openshift-console/console-5567b7c9db-kpmz7" Oct 01 15:14:06 crc kubenswrapper[4869]: I1001 15:14:06.239789 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/b8e5c8a3-b1f1-45ee-b69b-51eddf4c5946-service-ca\") pod \"console-5567b7c9db-kpmz7\" (UID: \"b8e5c8a3-b1f1-45ee-b69b-51eddf4c5946\") " pod="openshift-console/console-5567b7c9db-kpmz7" Oct 01 15:14:06 crc kubenswrapper[4869]: I1001 15:14:06.239816 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/b8e5c8a3-b1f1-45ee-b69b-51eddf4c5946-console-config\") pod \"console-5567b7c9db-kpmz7\" (UID: \"b8e5c8a3-b1f1-45ee-b69b-51eddf4c5946\") " pod="openshift-console/console-5567b7c9db-kpmz7" Oct 01 15:14:06 crc kubenswrapper[4869]: I1001 15:14:06.239831 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-j7lbn\" (UniqueName: \"kubernetes.io/projected/b8e5c8a3-b1f1-45ee-b69b-51eddf4c5946-kube-api-access-j7lbn\") pod \"console-5567b7c9db-kpmz7\" (UID: \"b8e5c8a3-b1f1-45ee-b69b-51eddf4c5946\") " pod="openshift-console/console-5567b7c9db-kpmz7" Oct 01 15:14:06 crc kubenswrapper[4869]: I1001 15:14:06.241198 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/b8e5c8a3-b1f1-45ee-b69b-51eddf4c5946-oauth-serving-cert\") pod \"console-5567b7c9db-kpmz7\" (UID: \"b8e5c8a3-b1f1-45ee-b69b-51eddf4c5946\") " pod="openshift-console/console-5567b7c9db-kpmz7" Oct 01 15:14:06 crc kubenswrapper[4869]: I1001 15:14:06.241193 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/b8e5c8a3-b1f1-45ee-b69b-51eddf4c5946-service-ca\") pod \"console-5567b7c9db-kpmz7\" (UID: \"b8e5c8a3-b1f1-45ee-b69b-51eddf4c5946\") " pod="openshift-console/console-5567b7c9db-kpmz7" Oct 01 15:14:06 crc kubenswrapper[4869]: I1001 15:14:06.243315 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/b8e5c8a3-b1f1-45ee-b69b-51eddf4c5946-console-config\") pod \"console-5567b7c9db-kpmz7\" (UID: \"b8e5c8a3-b1f1-45ee-b69b-51eddf4c5946\") " pod="openshift-console/console-5567b7c9db-kpmz7" Oct 01 15:14:06 crc kubenswrapper[4869]: I1001 15:14:06.243669 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/b8e5c8a3-b1f1-45ee-b69b-51eddf4c5946-trusted-ca-bundle\") pod \"console-5567b7c9db-kpmz7\" (UID: \"b8e5c8a3-b1f1-45ee-b69b-51eddf4c5946\") " pod="openshift-console/console-5567b7c9db-kpmz7" Oct 01 15:14:06 crc kubenswrapper[4869]: I1001 15:14:06.245904 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/b8e5c8a3-b1f1-45ee-b69b-51eddf4c5946-console-serving-cert\") pod \"console-5567b7c9db-kpmz7\" (UID: \"b8e5c8a3-b1f1-45ee-b69b-51eddf4c5946\") " pod="openshift-console/console-5567b7c9db-kpmz7" Oct 01 15:14:06 crc kubenswrapper[4869]: I1001 15:14:06.248439 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/b8e5c8a3-b1f1-45ee-b69b-51eddf4c5946-console-oauth-config\") pod \"console-5567b7c9db-kpmz7\" (UID: \"b8e5c8a3-b1f1-45ee-b69b-51eddf4c5946\") " pod="openshift-console/console-5567b7c9db-kpmz7" Oct 01 15:14:06 crc kubenswrapper[4869]: I1001 15:14:06.257379 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-j7lbn\" (UniqueName: \"kubernetes.io/projected/b8e5c8a3-b1f1-45ee-b69b-51eddf4c5946-kube-api-access-j7lbn\") pod \"console-5567b7c9db-kpmz7\" (UID: \"b8e5c8a3-b1f1-45ee-b69b-51eddf4c5946\") " pod="openshift-console/console-5567b7c9db-kpmz7" Oct 01 15:14:06 crc kubenswrapper[4869]: I1001 15:14:06.284586 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-metrics-58fcddf996-bc5gk"] Oct 01 15:14:06 crc kubenswrapper[4869]: I1001 15:14:06.442583 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tls-key-pair\" (UniqueName: \"kubernetes.io/secret/aa4da6ef-158a-44e5-8d1a-779aa19fe3ac-tls-key-pair\") pod \"nmstate-webhook-6d689559c5-dssrt\" (UID: \"aa4da6ef-158a-44e5-8d1a-779aa19fe3ac\") " pod="openshift-nmstate/nmstate-webhook-6d689559c5-dssrt" Oct 01 15:14:06 crc kubenswrapper[4869]: I1001 15:14:06.444145 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-5567b7c9db-kpmz7" Oct 01 15:14:06 crc kubenswrapper[4869]: I1001 15:14:06.447802 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tls-key-pair\" (UniqueName: \"kubernetes.io/secret/aa4da6ef-158a-44e5-8d1a-779aa19fe3ac-tls-key-pair\") pod \"nmstate-webhook-6d689559c5-dssrt\" (UID: \"aa4da6ef-158a-44e5-8d1a-779aa19fe3ac\") " pod="openshift-nmstate/nmstate-webhook-6d689559c5-dssrt" Oct 01 15:14:06 crc kubenswrapper[4869]: I1001 15:14:06.544407 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugin-serving-cert\" (UniqueName: \"kubernetes.io/secret/58fa3f46-8eb7-4d4f-9548-37c56f012aba-plugin-serving-cert\") pod \"nmstate-console-plugin-864bb6dfb5-zcwb9\" (UID: \"58fa3f46-8eb7-4d4f-9548-37c56f012aba\") " pod="openshift-nmstate/nmstate-console-plugin-864bb6dfb5-zcwb9" Oct 01 15:14:06 crc kubenswrapper[4869]: I1001 15:14:06.550516 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugin-serving-cert\" (UniqueName: \"kubernetes.io/secret/58fa3f46-8eb7-4d4f-9548-37c56f012aba-plugin-serving-cert\") pod \"nmstate-console-plugin-864bb6dfb5-zcwb9\" (UID: \"58fa3f46-8eb7-4d4f-9548-37c56f012aba\") " pod="openshift-nmstate/nmstate-console-plugin-864bb6dfb5-zcwb9" Oct 01 15:14:06 crc kubenswrapper[4869]: I1001 15:14:06.695714 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-metrics-58fcddf996-bc5gk" event={"ID":"27f0e6e2-94a5-4313-9c9e-9eacbc971748","Type":"ContainerStarted","Data":"f77313576443eaa4db14e8cb84e0f7fea96e4e911d19266bc911dedd3de116cf"} Oct 01 15:14:06 crc kubenswrapper[4869]: I1001 15:14:06.698004 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-webhook-6d689559c5-dssrt" Oct 01 15:14:06 crc kubenswrapper[4869]: I1001 15:14:06.699528 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-handler-xs8c4" event={"ID":"441818c5-6e78-4a8f-9ed9-58e7dd4b2028","Type":"ContainerStarted","Data":"1b4f1160a493344f9201f412caca5c75b512b48d88702dbe99ffe5f13ea8d727"} Oct 01 15:14:06 crc kubenswrapper[4869]: I1001 15:14:06.715299 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-5567b7c9db-kpmz7"] Oct 01 15:14:06 crc kubenswrapper[4869]: W1001 15:14:06.729036 4869 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podb8e5c8a3_b1f1_45ee_b69b_51eddf4c5946.slice/crio-8c3d6bc7e6013fdee8c8e1d545486d42230b3c55cb4b83dfd5f26efb729d711a WatchSource:0}: Error finding container 8c3d6bc7e6013fdee8c8e1d545486d42230b3c55cb4b83dfd5f26efb729d711a: Status 404 returned error can't find the container with id 8c3d6bc7e6013fdee8c8e1d545486d42230b3c55cb4b83dfd5f26efb729d711a Oct 01 15:14:06 crc kubenswrapper[4869]: I1001 15:14:06.822831 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-console-plugin-864bb6dfb5-zcwb9" Oct 01 15:14:07 crc kubenswrapper[4869]: I1001 15:14:07.035392 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-console-plugin-864bb6dfb5-zcwb9"] Oct 01 15:14:07 crc kubenswrapper[4869]: W1001 15:14:07.050345 4869 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod58fa3f46_8eb7_4d4f_9548_37c56f012aba.slice/crio-ca496496ffb05a3cae9ffb4d3103f604bf3228d57ed376ba81a98e50d8a36e4f WatchSource:0}: Error finding container ca496496ffb05a3cae9ffb4d3103f604bf3228d57ed376ba81a98e50d8a36e4f: Status 404 returned error can't find the container with id ca496496ffb05a3cae9ffb4d3103f604bf3228d57ed376ba81a98e50d8a36e4f Oct 01 15:14:07 crc kubenswrapper[4869]: I1001 15:14:07.129087 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-webhook-6d689559c5-dssrt"] Oct 01 15:14:07 crc kubenswrapper[4869]: W1001 15:14:07.133290 4869 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podaa4da6ef_158a_44e5_8d1a_779aa19fe3ac.slice/crio-6223dadd0a29bb55bca96fae860cdce6e3c012da4bd842b0dcebfad503b7383c WatchSource:0}: Error finding container 6223dadd0a29bb55bca96fae860cdce6e3c012da4bd842b0dcebfad503b7383c: Status 404 returned error can't find the container with id 6223dadd0a29bb55bca96fae860cdce6e3c012da4bd842b0dcebfad503b7383c Oct 01 15:14:07 crc kubenswrapper[4869]: I1001 15:14:07.714769 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-5567b7c9db-kpmz7" event={"ID":"b8e5c8a3-b1f1-45ee-b69b-51eddf4c5946","Type":"ContainerStarted","Data":"e1cea44e97e11ba02b4b9d9a2d73be8912c3941f1123613b9071e95844019907"} Oct 01 15:14:07 crc kubenswrapper[4869]: I1001 15:14:07.714815 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-5567b7c9db-kpmz7" event={"ID":"b8e5c8a3-b1f1-45ee-b69b-51eddf4c5946","Type":"ContainerStarted","Data":"8c3d6bc7e6013fdee8c8e1d545486d42230b3c55cb4b83dfd5f26efb729d711a"} Oct 01 15:14:07 crc kubenswrapper[4869]: I1001 15:14:07.717125 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-webhook-6d689559c5-dssrt" event={"ID":"aa4da6ef-158a-44e5-8d1a-779aa19fe3ac","Type":"ContainerStarted","Data":"6223dadd0a29bb55bca96fae860cdce6e3c012da4bd842b0dcebfad503b7383c"} Oct 01 15:14:07 crc kubenswrapper[4869]: I1001 15:14:07.718355 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-console-plugin-864bb6dfb5-zcwb9" event={"ID":"58fa3f46-8eb7-4d4f-9548-37c56f012aba","Type":"ContainerStarted","Data":"ca496496ffb05a3cae9ffb4d3103f604bf3228d57ed376ba81a98e50d8a36e4f"} Oct 01 15:14:10 crc kubenswrapper[4869]: I1001 15:14:10.737060 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-handler-xs8c4" event={"ID":"441818c5-6e78-4a8f-9ed9-58e7dd4b2028","Type":"ContainerStarted","Data":"986db3c34f99f3e6dac6202d395cccb5945f8bda2565f33f95a8aec20a75bca0"} Oct 01 15:14:10 crc kubenswrapper[4869]: I1001 15:14:10.737873 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-nmstate/nmstate-handler-xs8c4" Oct 01 15:14:10 crc kubenswrapper[4869]: I1001 15:14:10.741681 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-webhook-6d689559c5-dssrt" event={"ID":"aa4da6ef-158a-44e5-8d1a-779aa19fe3ac","Type":"ContainerStarted","Data":"b2825444eff4b4c6d99387f6454d228630e32645a13994d2d0cec3b378353f1c"} Oct 01 15:14:10 crc kubenswrapper[4869]: I1001 15:14:10.742168 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-nmstate/nmstate-webhook-6d689559c5-dssrt" Oct 01 15:14:10 crc kubenswrapper[4869]: I1001 15:14:10.743827 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-console-plugin-864bb6dfb5-zcwb9" event={"ID":"58fa3f46-8eb7-4d4f-9548-37c56f012aba","Type":"ContainerStarted","Data":"94c2506b293dfb9320b5125e90948fadbd1858bdd3457fa4c836fd6414aa0591"} Oct 01 15:14:10 crc kubenswrapper[4869]: I1001 15:14:10.745616 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-metrics-58fcddf996-bc5gk" event={"ID":"27f0e6e2-94a5-4313-9c9e-9eacbc971748","Type":"ContainerStarted","Data":"c1f9913f8a4df05b50a07411e0e87e257fa37dc49c286ff0c860862ee21fc194"} Oct 01 15:14:10 crc kubenswrapper[4869]: I1001 15:14:10.755086 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console/console-5567b7c9db-kpmz7" podStartSLOduration=4.75506988 podStartE2EDuration="4.75506988s" podCreationTimestamp="2025-10-01 15:14:06 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-01 15:14:07.737473541 +0000 UTC m=+556.884316677" watchObservedRunningTime="2025-10-01 15:14:10.75506988 +0000 UTC m=+559.901912996" Oct 01 15:14:10 crc kubenswrapper[4869]: I1001 15:14:10.757180 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-handler-xs8c4" podStartSLOduration=2.078427313 podStartE2EDuration="5.757172103s" podCreationTimestamp="2025-10-01 15:14:05 +0000 UTC" firstStartedPulling="2025-10-01 15:14:06.162199891 +0000 UTC m=+555.309043007" lastFinishedPulling="2025-10-01 15:14:09.840944661 +0000 UTC m=+558.987787797" observedRunningTime="2025-10-01 15:14:10.755755557 +0000 UTC m=+559.902598693" watchObservedRunningTime="2025-10-01 15:14:10.757172103 +0000 UTC m=+559.904015219" Oct 01 15:14:10 crc kubenswrapper[4869]: I1001 15:14:10.773474 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-webhook-6d689559c5-dssrt" podStartSLOduration=3.091578838 podStartE2EDuration="5.773444045s" podCreationTimestamp="2025-10-01 15:14:05 +0000 UTC" firstStartedPulling="2025-10-01 15:14:07.135796945 +0000 UTC m=+556.282640071" lastFinishedPulling="2025-10-01 15:14:09.817662152 +0000 UTC m=+558.964505278" observedRunningTime="2025-10-01 15:14:10.771728701 +0000 UTC m=+559.918571817" watchObservedRunningTime="2025-10-01 15:14:10.773444045 +0000 UTC m=+559.920287191" Oct 01 15:14:10 crc kubenswrapper[4869]: I1001 15:14:10.795336 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-console-plugin-864bb6dfb5-zcwb9" podStartSLOduration=3.029713993 podStartE2EDuration="5.795314768s" podCreationTimestamp="2025-10-01 15:14:05 +0000 UTC" firstStartedPulling="2025-10-01 15:14:07.052006355 +0000 UTC m=+556.198849471" lastFinishedPulling="2025-10-01 15:14:09.81760713 +0000 UTC m=+558.964450246" observedRunningTime="2025-10-01 15:14:10.793243085 +0000 UTC m=+559.940086281" watchObservedRunningTime="2025-10-01 15:14:10.795314768 +0000 UTC m=+559.942157874" Oct 01 15:14:13 crc kubenswrapper[4869]: I1001 15:14:13.354637 4869 patch_prober.go:28] interesting pod/machine-config-daemon-c86m8 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 01 15:14:13 crc kubenswrapper[4869]: I1001 15:14:13.356457 4869 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-c86m8" podUID="a4b64f7f-0b03-4f47-965b-9fde048b735c" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 01 15:14:13 crc kubenswrapper[4869]: I1001 15:14:13.356534 4869 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-c86m8" Oct 01 15:14:13 crc kubenswrapper[4869]: I1001 15:14:13.357206 4869 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"d389e7566576d3e629333a96c6509eba4ec6eb584120d194f797cef044db86f8"} pod="openshift-machine-config-operator/machine-config-daemon-c86m8" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Oct 01 15:14:13 crc kubenswrapper[4869]: I1001 15:14:13.357287 4869 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-c86m8" podUID="a4b64f7f-0b03-4f47-965b-9fde048b735c" containerName="machine-config-daemon" containerID="cri-o://d389e7566576d3e629333a96c6509eba4ec6eb584120d194f797cef044db86f8" gracePeriod=600 Oct 01 15:14:13 crc kubenswrapper[4869]: I1001 15:14:13.788202 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-metrics-58fcddf996-bc5gk" event={"ID":"27f0e6e2-94a5-4313-9c9e-9eacbc971748","Type":"ContainerStarted","Data":"477d6470d69b9431c4f75da395985b54d2bbb544f32068e427a48f701dc3c101"} Oct 01 15:14:13 crc kubenswrapper[4869]: I1001 15:14:13.791985 4869 generic.go:334] "Generic (PLEG): container finished" podID="a4b64f7f-0b03-4f47-965b-9fde048b735c" containerID="d389e7566576d3e629333a96c6509eba4ec6eb584120d194f797cef044db86f8" exitCode=0 Oct 01 15:14:13 crc kubenswrapper[4869]: I1001 15:14:13.792026 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-c86m8" event={"ID":"a4b64f7f-0b03-4f47-965b-9fde048b735c","Type":"ContainerDied","Data":"d389e7566576d3e629333a96c6509eba4ec6eb584120d194f797cef044db86f8"} Oct 01 15:14:13 crc kubenswrapper[4869]: I1001 15:14:13.792048 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-c86m8" event={"ID":"a4b64f7f-0b03-4f47-965b-9fde048b735c","Type":"ContainerStarted","Data":"aedd256e8a7e9adcb3428c1dfb846efe5c6adc26f622dd82be7a88d857fb712b"} Oct 01 15:14:13 crc kubenswrapper[4869]: I1001 15:14:13.792067 4869 scope.go:117] "RemoveContainer" containerID="af6f12677fcb9642c558dd68c19ae34abd92bffe6eaa669049660c21b5cb4867" Oct 01 15:14:13 crc kubenswrapper[4869]: I1001 15:14:13.818365 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-metrics-58fcddf996-bc5gk" podStartSLOduration=2.376409111 podStartE2EDuration="8.818235621s" podCreationTimestamp="2025-10-01 15:14:05 +0000 UTC" firstStartedPulling="2025-10-01 15:14:06.289977443 +0000 UTC m=+555.436820559" lastFinishedPulling="2025-10-01 15:14:12.731803953 +0000 UTC m=+561.878647069" observedRunningTime="2025-10-01 15:14:13.816585929 +0000 UTC m=+562.963429125" watchObservedRunningTime="2025-10-01 15:14:13.818235621 +0000 UTC m=+562.965078767" Oct 01 15:14:16 crc kubenswrapper[4869]: I1001 15:14:16.158372 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-nmstate/nmstate-handler-xs8c4" Oct 01 15:14:16 crc kubenswrapper[4869]: I1001 15:14:16.445136 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console/console-5567b7c9db-kpmz7" Oct 01 15:14:16 crc kubenswrapper[4869]: I1001 15:14:16.445241 4869 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-console/console-5567b7c9db-kpmz7" Oct 01 15:14:16 crc kubenswrapper[4869]: I1001 15:14:16.453410 4869 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-console/console-5567b7c9db-kpmz7" Oct 01 15:14:16 crc kubenswrapper[4869]: I1001 15:14:16.824589 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console/console-5567b7c9db-kpmz7" Oct 01 15:14:16 crc kubenswrapper[4869]: I1001 15:14:16.892732 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-console/console-f9d7485db-jv4xs"] Oct 01 15:14:26 crc kubenswrapper[4869]: I1001 15:14:26.709153 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-nmstate/nmstate-webhook-6d689559c5-dssrt" Oct 01 15:14:41 crc kubenswrapper[4869]: I1001 15:14:41.296181 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96h5q5b"] Oct 01 15:14:41 crc kubenswrapper[4869]: I1001 15:14:41.298761 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96h5q5b" Oct 01 15:14:41 crc kubenswrapper[4869]: I1001 15:14:41.300849 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"default-dockercfg-vmwhc" Oct 01 15:14:41 crc kubenswrapper[4869]: I1001 15:14:41.315871 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96h5q5b"] Oct 01 15:14:41 crc kubenswrapper[4869]: I1001 15:14:41.433885 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/7e6aa79a-0f81-43b0-b0e7-61f08276a955-util\") pod \"f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96h5q5b\" (UID: \"7e6aa79a-0f81-43b0-b0e7-61f08276a955\") " pod="openshift-marketplace/f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96h5q5b" Oct 01 15:14:41 crc kubenswrapper[4869]: I1001 15:14:41.433962 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/7e6aa79a-0f81-43b0-b0e7-61f08276a955-bundle\") pod \"f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96h5q5b\" (UID: \"7e6aa79a-0f81-43b0-b0e7-61f08276a955\") " pod="openshift-marketplace/f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96h5q5b" Oct 01 15:14:41 crc kubenswrapper[4869]: I1001 15:14:41.434041 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-667vx\" (UniqueName: \"kubernetes.io/projected/7e6aa79a-0f81-43b0-b0e7-61f08276a955-kube-api-access-667vx\") pod \"f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96h5q5b\" (UID: \"7e6aa79a-0f81-43b0-b0e7-61f08276a955\") " pod="openshift-marketplace/f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96h5q5b" Oct 01 15:14:41 crc kubenswrapper[4869]: I1001 15:14:41.535338 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/7e6aa79a-0f81-43b0-b0e7-61f08276a955-util\") pod \"f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96h5q5b\" (UID: \"7e6aa79a-0f81-43b0-b0e7-61f08276a955\") " pod="openshift-marketplace/f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96h5q5b" Oct 01 15:14:41 crc kubenswrapper[4869]: I1001 15:14:41.535433 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/7e6aa79a-0f81-43b0-b0e7-61f08276a955-bundle\") pod \"f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96h5q5b\" (UID: \"7e6aa79a-0f81-43b0-b0e7-61f08276a955\") " pod="openshift-marketplace/f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96h5q5b" Oct 01 15:14:41 crc kubenswrapper[4869]: I1001 15:14:41.535494 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-667vx\" (UniqueName: \"kubernetes.io/projected/7e6aa79a-0f81-43b0-b0e7-61f08276a955-kube-api-access-667vx\") pod \"f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96h5q5b\" (UID: \"7e6aa79a-0f81-43b0-b0e7-61f08276a955\") " pod="openshift-marketplace/f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96h5q5b" Oct 01 15:14:41 crc kubenswrapper[4869]: I1001 15:14:41.536187 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/7e6aa79a-0f81-43b0-b0e7-61f08276a955-util\") pod \"f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96h5q5b\" (UID: \"7e6aa79a-0f81-43b0-b0e7-61f08276a955\") " pod="openshift-marketplace/f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96h5q5b" Oct 01 15:14:41 crc kubenswrapper[4869]: I1001 15:14:41.536671 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/7e6aa79a-0f81-43b0-b0e7-61f08276a955-bundle\") pod \"f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96h5q5b\" (UID: \"7e6aa79a-0f81-43b0-b0e7-61f08276a955\") " pod="openshift-marketplace/f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96h5q5b" Oct 01 15:14:41 crc kubenswrapper[4869]: I1001 15:14:41.571411 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-667vx\" (UniqueName: \"kubernetes.io/projected/7e6aa79a-0f81-43b0-b0e7-61f08276a955-kube-api-access-667vx\") pod \"f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96h5q5b\" (UID: \"7e6aa79a-0f81-43b0-b0e7-61f08276a955\") " pod="openshift-marketplace/f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96h5q5b" Oct 01 15:14:41 crc kubenswrapper[4869]: I1001 15:14:41.663241 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96h5q5b" Oct 01 15:14:41 crc kubenswrapper[4869]: I1001 15:14:41.947402 4869 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-console/console-f9d7485db-jv4xs" podUID="4636576a-d3da-4491-a146-a6ffe6382a06" containerName="console" containerID="cri-o://0f194fc8bf95f700b6d1017a54eaa910d245f1c7aed3a567d4cb83421a1ac718" gracePeriod=15 Oct 01 15:14:41 crc kubenswrapper[4869]: I1001 15:14:41.962505 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96h5q5b"] Oct 01 15:14:42 crc kubenswrapper[4869]: I1001 15:14:42.000895 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96h5q5b" event={"ID":"7e6aa79a-0f81-43b0-b0e7-61f08276a955","Type":"ContainerStarted","Data":"e9ba84a8bc2593ef70434b9a2d3e4370be812a32dc1eeb71a1800f8997b78b4d"} Oct 01 15:14:42 crc kubenswrapper[4869]: I1001 15:14:42.346253 4869 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-console_console-f9d7485db-jv4xs_4636576a-d3da-4491-a146-a6ffe6382a06/console/0.log" Oct 01 15:14:42 crc kubenswrapper[4869]: I1001 15:14:42.346602 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-f9d7485db-jv4xs" Oct 01 15:14:42 crc kubenswrapper[4869]: I1001 15:14:42.447034 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/4636576a-d3da-4491-a146-a6ffe6382a06-console-config\") pod \"4636576a-d3da-4491-a146-a6ffe6382a06\" (UID: \"4636576a-d3da-4491-a146-a6ffe6382a06\") " Oct 01 15:14:42 crc kubenswrapper[4869]: I1001 15:14:42.447103 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/4636576a-d3da-4491-a146-a6ffe6382a06-trusted-ca-bundle\") pod \"4636576a-d3da-4491-a146-a6ffe6382a06\" (UID: \"4636576a-d3da-4491-a146-a6ffe6382a06\") " Oct 01 15:14:42 crc kubenswrapper[4869]: I1001 15:14:42.447131 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/4636576a-d3da-4491-a146-a6ffe6382a06-service-ca\") pod \"4636576a-d3da-4491-a146-a6ffe6382a06\" (UID: \"4636576a-d3da-4491-a146-a6ffe6382a06\") " Oct 01 15:14:42 crc kubenswrapper[4869]: I1001 15:14:42.447155 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cvl5m\" (UniqueName: \"kubernetes.io/projected/4636576a-d3da-4491-a146-a6ffe6382a06-kube-api-access-cvl5m\") pod \"4636576a-d3da-4491-a146-a6ffe6382a06\" (UID: \"4636576a-d3da-4491-a146-a6ffe6382a06\") " Oct 01 15:14:42 crc kubenswrapper[4869]: I1001 15:14:42.447231 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/4636576a-d3da-4491-a146-a6ffe6382a06-oauth-serving-cert\") pod \"4636576a-d3da-4491-a146-a6ffe6382a06\" (UID: \"4636576a-d3da-4491-a146-a6ffe6382a06\") " Oct 01 15:14:42 crc kubenswrapper[4869]: I1001 15:14:42.447319 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/4636576a-d3da-4491-a146-a6ffe6382a06-console-serving-cert\") pod \"4636576a-d3da-4491-a146-a6ffe6382a06\" (UID: \"4636576a-d3da-4491-a146-a6ffe6382a06\") " Oct 01 15:14:42 crc kubenswrapper[4869]: I1001 15:14:42.447352 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/4636576a-d3da-4491-a146-a6ffe6382a06-console-oauth-config\") pod \"4636576a-d3da-4491-a146-a6ffe6382a06\" (UID: \"4636576a-d3da-4491-a146-a6ffe6382a06\") " Oct 01 15:14:42 crc kubenswrapper[4869]: I1001 15:14:42.448684 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4636576a-d3da-4491-a146-a6ffe6382a06-service-ca" (OuterVolumeSpecName: "service-ca") pod "4636576a-d3da-4491-a146-a6ffe6382a06" (UID: "4636576a-d3da-4491-a146-a6ffe6382a06"). InnerVolumeSpecName "service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 15:14:42 crc kubenswrapper[4869]: I1001 15:14:42.448699 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4636576a-d3da-4491-a146-a6ffe6382a06-console-config" (OuterVolumeSpecName: "console-config") pod "4636576a-d3da-4491-a146-a6ffe6382a06" (UID: "4636576a-d3da-4491-a146-a6ffe6382a06"). InnerVolumeSpecName "console-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 15:14:42 crc kubenswrapper[4869]: I1001 15:14:42.448711 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4636576a-d3da-4491-a146-a6ffe6382a06-oauth-serving-cert" (OuterVolumeSpecName: "oauth-serving-cert") pod "4636576a-d3da-4491-a146-a6ffe6382a06" (UID: "4636576a-d3da-4491-a146-a6ffe6382a06"). InnerVolumeSpecName "oauth-serving-cert". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 15:14:42 crc kubenswrapper[4869]: I1001 15:14:42.448735 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4636576a-d3da-4491-a146-a6ffe6382a06-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "4636576a-d3da-4491-a146-a6ffe6382a06" (UID: "4636576a-d3da-4491-a146-a6ffe6382a06"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 15:14:42 crc kubenswrapper[4869]: I1001 15:14:42.453138 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4636576a-d3da-4491-a146-a6ffe6382a06-console-serving-cert" (OuterVolumeSpecName: "console-serving-cert") pod "4636576a-d3da-4491-a146-a6ffe6382a06" (UID: "4636576a-d3da-4491-a146-a6ffe6382a06"). InnerVolumeSpecName "console-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 15:14:42 crc kubenswrapper[4869]: I1001 15:14:42.453443 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4636576a-d3da-4491-a146-a6ffe6382a06-console-oauth-config" (OuterVolumeSpecName: "console-oauth-config") pod "4636576a-d3da-4491-a146-a6ffe6382a06" (UID: "4636576a-d3da-4491-a146-a6ffe6382a06"). InnerVolumeSpecName "console-oauth-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 15:14:42 crc kubenswrapper[4869]: I1001 15:14:42.453536 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4636576a-d3da-4491-a146-a6ffe6382a06-kube-api-access-cvl5m" (OuterVolumeSpecName: "kube-api-access-cvl5m") pod "4636576a-d3da-4491-a146-a6ffe6382a06" (UID: "4636576a-d3da-4491-a146-a6ffe6382a06"). InnerVolumeSpecName "kube-api-access-cvl5m". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 15:14:42 crc kubenswrapper[4869]: I1001 15:14:42.549443 4869 reconciler_common.go:293] "Volume detached for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/4636576a-d3da-4491-a146-a6ffe6382a06-console-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 01 15:14:42 crc kubenswrapper[4869]: I1001 15:14:42.549506 4869 reconciler_common.go:293] "Volume detached for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/4636576a-d3da-4491-a146-a6ffe6382a06-console-oauth-config\") on node \"crc\" DevicePath \"\"" Oct 01 15:14:42 crc kubenswrapper[4869]: I1001 15:14:42.549526 4869 reconciler_common.go:293] "Volume detached for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/4636576a-d3da-4491-a146-a6ffe6382a06-console-config\") on node \"crc\" DevicePath \"\"" Oct 01 15:14:42 crc kubenswrapper[4869]: I1001 15:14:42.549582 4869 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/4636576a-d3da-4491-a146-a6ffe6382a06-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 01 15:14:42 crc kubenswrapper[4869]: I1001 15:14:42.549603 4869 reconciler_common.go:293] "Volume detached for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/4636576a-d3da-4491-a146-a6ffe6382a06-service-ca\") on node \"crc\" DevicePath \"\"" Oct 01 15:14:42 crc kubenswrapper[4869]: I1001 15:14:42.549622 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cvl5m\" (UniqueName: \"kubernetes.io/projected/4636576a-d3da-4491-a146-a6ffe6382a06-kube-api-access-cvl5m\") on node \"crc\" DevicePath \"\"" Oct 01 15:14:42 crc kubenswrapper[4869]: I1001 15:14:42.549645 4869 reconciler_common.go:293] "Volume detached for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/4636576a-d3da-4491-a146-a6ffe6382a06-oauth-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 01 15:14:43 crc kubenswrapper[4869]: I1001 15:14:43.006273 4869 generic.go:334] "Generic (PLEG): container finished" podID="7e6aa79a-0f81-43b0-b0e7-61f08276a955" containerID="822c73b430f175cec205f6edada0022865d00815f4435b6fabb3e132abb58b52" exitCode=0 Oct 01 15:14:43 crc kubenswrapper[4869]: I1001 15:14:43.006335 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96h5q5b" event={"ID":"7e6aa79a-0f81-43b0-b0e7-61f08276a955","Type":"ContainerDied","Data":"822c73b430f175cec205f6edada0022865d00815f4435b6fabb3e132abb58b52"} Oct 01 15:14:43 crc kubenswrapper[4869]: I1001 15:14:43.010777 4869 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-console_console-f9d7485db-jv4xs_4636576a-d3da-4491-a146-a6ffe6382a06/console/0.log" Oct 01 15:14:43 crc kubenswrapper[4869]: I1001 15:14:43.010840 4869 generic.go:334] "Generic (PLEG): container finished" podID="4636576a-d3da-4491-a146-a6ffe6382a06" containerID="0f194fc8bf95f700b6d1017a54eaa910d245f1c7aed3a567d4cb83421a1ac718" exitCode=2 Oct 01 15:14:43 crc kubenswrapper[4869]: I1001 15:14:43.010970 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-f9d7485db-jv4xs" Oct 01 15:14:43 crc kubenswrapper[4869]: I1001 15:14:43.010988 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-f9d7485db-jv4xs" event={"ID":"4636576a-d3da-4491-a146-a6ffe6382a06","Type":"ContainerDied","Data":"0f194fc8bf95f700b6d1017a54eaa910d245f1c7aed3a567d4cb83421a1ac718"} Oct 01 15:14:43 crc kubenswrapper[4869]: I1001 15:14:43.011025 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-f9d7485db-jv4xs" event={"ID":"4636576a-d3da-4491-a146-a6ffe6382a06","Type":"ContainerDied","Data":"a776cce11b77c47670f2f274c2d6684e3a24e1354fe9cc6baba7884371fbbbfa"} Oct 01 15:14:43 crc kubenswrapper[4869]: I1001 15:14:43.011063 4869 scope.go:117] "RemoveContainer" containerID="0f194fc8bf95f700b6d1017a54eaa910d245f1c7aed3a567d4cb83421a1ac718" Oct 01 15:14:43 crc kubenswrapper[4869]: I1001 15:14:43.033983 4869 scope.go:117] "RemoveContainer" containerID="0f194fc8bf95f700b6d1017a54eaa910d245f1c7aed3a567d4cb83421a1ac718" Oct 01 15:14:43 crc kubenswrapper[4869]: E1001 15:14:43.034479 4869 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0f194fc8bf95f700b6d1017a54eaa910d245f1c7aed3a567d4cb83421a1ac718\": container with ID starting with 0f194fc8bf95f700b6d1017a54eaa910d245f1c7aed3a567d4cb83421a1ac718 not found: ID does not exist" containerID="0f194fc8bf95f700b6d1017a54eaa910d245f1c7aed3a567d4cb83421a1ac718" Oct 01 15:14:43 crc kubenswrapper[4869]: I1001 15:14:43.034522 4869 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0f194fc8bf95f700b6d1017a54eaa910d245f1c7aed3a567d4cb83421a1ac718"} err="failed to get container status \"0f194fc8bf95f700b6d1017a54eaa910d245f1c7aed3a567d4cb83421a1ac718\": rpc error: code = NotFound desc = could not find container \"0f194fc8bf95f700b6d1017a54eaa910d245f1c7aed3a567d4cb83421a1ac718\": container with ID starting with 0f194fc8bf95f700b6d1017a54eaa910d245f1c7aed3a567d4cb83421a1ac718 not found: ID does not exist" Oct 01 15:14:43 crc kubenswrapper[4869]: I1001 15:14:43.046118 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-console/console-f9d7485db-jv4xs"] Oct 01 15:14:43 crc kubenswrapper[4869]: I1001 15:14:43.050862 4869 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-console/console-f9d7485db-jv4xs"] Oct 01 15:14:43 crc kubenswrapper[4869]: I1001 15:14:43.595463 4869 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4636576a-d3da-4491-a146-a6ffe6382a06" path="/var/lib/kubelet/pods/4636576a-d3da-4491-a146-a6ffe6382a06/volumes" Oct 01 15:14:46 crc kubenswrapper[4869]: I1001 15:14:46.058071 4869 generic.go:334] "Generic (PLEG): container finished" podID="7e6aa79a-0f81-43b0-b0e7-61f08276a955" containerID="268863394d340b716e09c5c5bd88f98f14a93997e1d4f2bcea214cd11dfed946" exitCode=0 Oct 01 15:14:46 crc kubenswrapper[4869]: I1001 15:14:46.059450 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96h5q5b" event={"ID":"7e6aa79a-0f81-43b0-b0e7-61f08276a955","Type":"ContainerDied","Data":"268863394d340b716e09c5c5bd88f98f14a93997e1d4f2bcea214cd11dfed946"} Oct 01 15:14:47 crc kubenswrapper[4869]: I1001 15:14:47.067910 4869 generic.go:334] "Generic (PLEG): container finished" podID="7e6aa79a-0f81-43b0-b0e7-61f08276a955" containerID="803fcd16b02f9b94cd32790988d983aeee53ecb318f1564507facedfb3dd65a4" exitCode=0 Oct 01 15:14:47 crc kubenswrapper[4869]: I1001 15:14:47.067976 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96h5q5b" event={"ID":"7e6aa79a-0f81-43b0-b0e7-61f08276a955","Type":"ContainerDied","Data":"803fcd16b02f9b94cd32790988d983aeee53ecb318f1564507facedfb3dd65a4"} Oct 01 15:14:48 crc kubenswrapper[4869]: I1001 15:14:48.358578 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96h5q5b" Oct 01 15:14:48 crc kubenswrapper[4869]: I1001 15:14:48.430046 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/7e6aa79a-0f81-43b0-b0e7-61f08276a955-util\") pod \"7e6aa79a-0f81-43b0-b0e7-61f08276a955\" (UID: \"7e6aa79a-0f81-43b0-b0e7-61f08276a955\") " Oct 01 15:14:48 crc kubenswrapper[4869]: I1001 15:14:48.430123 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/7e6aa79a-0f81-43b0-b0e7-61f08276a955-bundle\") pod \"7e6aa79a-0f81-43b0-b0e7-61f08276a955\" (UID: \"7e6aa79a-0f81-43b0-b0e7-61f08276a955\") " Oct 01 15:14:48 crc kubenswrapper[4869]: I1001 15:14:48.430186 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-667vx\" (UniqueName: \"kubernetes.io/projected/7e6aa79a-0f81-43b0-b0e7-61f08276a955-kube-api-access-667vx\") pod \"7e6aa79a-0f81-43b0-b0e7-61f08276a955\" (UID: \"7e6aa79a-0f81-43b0-b0e7-61f08276a955\") " Oct 01 15:14:48 crc kubenswrapper[4869]: I1001 15:14:48.432567 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7e6aa79a-0f81-43b0-b0e7-61f08276a955-bundle" (OuterVolumeSpecName: "bundle") pod "7e6aa79a-0f81-43b0-b0e7-61f08276a955" (UID: "7e6aa79a-0f81-43b0-b0e7-61f08276a955"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 15:14:48 crc kubenswrapper[4869]: I1001 15:14:48.435554 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7e6aa79a-0f81-43b0-b0e7-61f08276a955-kube-api-access-667vx" (OuterVolumeSpecName: "kube-api-access-667vx") pod "7e6aa79a-0f81-43b0-b0e7-61f08276a955" (UID: "7e6aa79a-0f81-43b0-b0e7-61f08276a955"). InnerVolumeSpecName "kube-api-access-667vx". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 15:14:48 crc kubenswrapper[4869]: I1001 15:14:48.439745 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7e6aa79a-0f81-43b0-b0e7-61f08276a955-util" (OuterVolumeSpecName: "util") pod "7e6aa79a-0f81-43b0-b0e7-61f08276a955" (UID: "7e6aa79a-0f81-43b0-b0e7-61f08276a955"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 15:14:48 crc kubenswrapper[4869]: I1001 15:14:48.531551 4869 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/7e6aa79a-0f81-43b0-b0e7-61f08276a955-util\") on node \"crc\" DevicePath \"\"" Oct 01 15:14:48 crc kubenswrapper[4869]: I1001 15:14:48.531581 4869 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/7e6aa79a-0f81-43b0-b0e7-61f08276a955-bundle\") on node \"crc\" DevicePath \"\"" Oct 01 15:14:48 crc kubenswrapper[4869]: I1001 15:14:48.531591 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-667vx\" (UniqueName: \"kubernetes.io/projected/7e6aa79a-0f81-43b0-b0e7-61f08276a955-kube-api-access-667vx\") on node \"crc\" DevicePath \"\"" Oct 01 15:14:49 crc kubenswrapper[4869]: I1001 15:14:49.083819 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96h5q5b" event={"ID":"7e6aa79a-0f81-43b0-b0e7-61f08276a955","Type":"ContainerDied","Data":"e9ba84a8bc2593ef70434b9a2d3e4370be812a32dc1eeb71a1800f8997b78b4d"} Oct 01 15:14:49 crc kubenswrapper[4869]: I1001 15:14:49.083874 4869 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="e9ba84a8bc2593ef70434b9a2d3e4370be812a32dc1eeb71a1800f8997b78b4d" Oct 01 15:14:49 crc kubenswrapper[4869]: I1001 15:14:49.083955 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96h5q5b" Oct 01 15:14:59 crc kubenswrapper[4869]: I1001 15:14:59.538649 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/metallb-operator-controller-manager-56886c7897-6kmth"] Oct 01 15:14:59 crc kubenswrapper[4869]: E1001 15:14:59.539313 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7e6aa79a-0f81-43b0-b0e7-61f08276a955" containerName="pull" Oct 01 15:14:59 crc kubenswrapper[4869]: I1001 15:14:59.539325 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="7e6aa79a-0f81-43b0-b0e7-61f08276a955" containerName="pull" Oct 01 15:14:59 crc kubenswrapper[4869]: E1001 15:14:59.539337 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7e6aa79a-0f81-43b0-b0e7-61f08276a955" containerName="util" Oct 01 15:14:59 crc kubenswrapper[4869]: I1001 15:14:59.539342 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="7e6aa79a-0f81-43b0-b0e7-61f08276a955" containerName="util" Oct 01 15:14:59 crc kubenswrapper[4869]: E1001 15:14:59.539353 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7e6aa79a-0f81-43b0-b0e7-61f08276a955" containerName="extract" Oct 01 15:14:59 crc kubenswrapper[4869]: I1001 15:14:59.539360 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="7e6aa79a-0f81-43b0-b0e7-61f08276a955" containerName="extract" Oct 01 15:14:59 crc kubenswrapper[4869]: E1001 15:14:59.539370 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4636576a-d3da-4491-a146-a6ffe6382a06" containerName="console" Oct 01 15:14:59 crc kubenswrapper[4869]: I1001 15:14:59.539375 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="4636576a-d3da-4491-a146-a6ffe6382a06" containerName="console" Oct 01 15:14:59 crc kubenswrapper[4869]: I1001 15:14:59.539460 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="4636576a-d3da-4491-a146-a6ffe6382a06" containerName="console" Oct 01 15:14:59 crc kubenswrapper[4869]: I1001 15:14:59.539475 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="7e6aa79a-0f81-43b0-b0e7-61f08276a955" containerName="extract" Oct 01 15:14:59 crc kubenswrapper[4869]: I1001 15:14:59.539831 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/metallb-operator-controller-manager-56886c7897-6kmth" Oct 01 15:14:59 crc kubenswrapper[4869]: I1001 15:14:59.542783 4869 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-operator-webhook-server-cert" Oct 01 15:14:59 crc kubenswrapper[4869]: I1001 15:14:59.543239 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"metallb-system"/"openshift-service-ca.crt" Oct 01 15:14:59 crc kubenswrapper[4869]: I1001 15:14:59.543509 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"metallb-system"/"kube-root-ca.crt" Oct 01 15:14:59 crc kubenswrapper[4869]: I1001 15:14:59.543665 4869 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-operator-controller-manager-service-cert" Oct 01 15:14:59 crc kubenswrapper[4869]: I1001 15:14:59.544434 4869 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"manager-account-dockercfg-8gppr" Oct 01 15:14:59 crc kubenswrapper[4869]: I1001 15:14:59.555895 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/metallb-operator-controller-manager-56886c7897-6kmth"] Oct 01 15:14:59 crc kubenswrapper[4869]: I1001 15:14:59.687806 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/2ad6ecdc-44a8-4c62-89e8-ca70878847a5-webhook-cert\") pod \"metallb-operator-controller-manager-56886c7897-6kmth\" (UID: \"2ad6ecdc-44a8-4c62-89e8-ca70878847a5\") " pod="metallb-system/metallb-operator-controller-manager-56886c7897-6kmth" Oct 01 15:14:59 crc kubenswrapper[4869]: I1001 15:14:59.687915 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-h6lgt\" (UniqueName: \"kubernetes.io/projected/2ad6ecdc-44a8-4c62-89e8-ca70878847a5-kube-api-access-h6lgt\") pod \"metallb-operator-controller-manager-56886c7897-6kmth\" (UID: \"2ad6ecdc-44a8-4c62-89e8-ca70878847a5\") " pod="metallb-system/metallb-operator-controller-manager-56886c7897-6kmth" Oct 01 15:14:59 crc kubenswrapper[4869]: I1001 15:14:59.688449 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/2ad6ecdc-44a8-4c62-89e8-ca70878847a5-apiservice-cert\") pod \"metallb-operator-controller-manager-56886c7897-6kmth\" (UID: \"2ad6ecdc-44a8-4c62-89e8-ca70878847a5\") " pod="metallb-system/metallb-operator-controller-manager-56886c7897-6kmth" Oct 01 15:14:59 crc kubenswrapper[4869]: I1001 15:14:59.775900 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/metallb-operator-webhook-server-96958c6f-4zrjt"] Oct 01 15:14:59 crc kubenswrapper[4869]: I1001 15:14:59.776848 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/metallb-operator-webhook-server-96958c6f-4zrjt" Oct 01 15:14:59 crc kubenswrapper[4869]: I1001 15:14:59.779220 4869 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-operator-webhook-server-service-cert" Oct 01 15:14:59 crc kubenswrapper[4869]: I1001 15:14:59.779510 4869 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-webhook-cert" Oct 01 15:14:59 crc kubenswrapper[4869]: I1001 15:14:59.779545 4869 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"controller-dockercfg-xnbmk" Oct 01 15:14:59 crc kubenswrapper[4869]: I1001 15:14:59.789783 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/2ad6ecdc-44a8-4c62-89e8-ca70878847a5-apiservice-cert\") pod \"metallb-operator-controller-manager-56886c7897-6kmth\" (UID: \"2ad6ecdc-44a8-4c62-89e8-ca70878847a5\") " pod="metallb-system/metallb-operator-controller-manager-56886c7897-6kmth" Oct 01 15:14:59 crc kubenswrapper[4869]: I1001 15:14:59.789885 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/2ad6ecdc-44a8-4c62-89e8-ca70878847a5-webhook-cert\") pod \"metallb-operator-controller-manager-56886c7897-6kmth\" (UID: \"2ad6ecdc-44a8-4c62-89e8-ca70878847a5\") " pod="metallb-system/metallb-operator-controller-manager-56886c7897-6kmth" Oct 01 15:14:59 crc kubenswrapper[4869]: I1001 15:14:59.789956 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-h6lgt\" (UniqueName: \"kubernetes.io/projected/2ad6ecdc-44a8-4c62-89e8-ca70878847a5-kube-api-access-h6lgt\") pod \"metallb-operator-controller-manager-56886c7897-6kmth\" (UID: \"2ad6ecdc-44a8-4c62-89e8-ca70878847a5\") " pod="metallb-system/metallb-operator-controller-manager-56886c7897-6kmth" Oct 01 15:14:59 crc kubenswrapper[4869]: I1001 15:14:59.794391 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/metallb-operator-webhook-server-96958c6f-4zrjt"] Oct 01 15:14:59 crc kubenswrapper[4869]: I1001 15:14:59.795195 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/2ad6ecdc-44a8-4c62-89e8-ca70878847a5-webhook-cert\") pod \"metallb-operator-controller-manager-56886c7897-6kmth\" (UID: \"2ad6ecdc-44a8-4c62-89e8-ca70878847a5\") " pod="metallb-system/metallb-operator-controller-manager-56886c7897-6kmth" Oct 01 15:14:59 crc kubenswrapper[4869]: I1001 15:14:59.808513 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/2ad6ecdc-44a8-4c62-89e8-ca70878847a5-apiservice-cert\") pod \"metallb-operator-controller-manager-56886c7897-6kmth\" (UID: \"2ad6ecdc-44a8-4c62-89e8-ca70878847a5\") " pod="metallb-system/metallb-operator-controller-manager-56886c7897-6kmth" Oct 01 15:14:59 crc kubenswrapper[4869]: I1001 15:14:59.831020 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-h6lgt\" (UniqueName: \"kubernetes.io/projected/2ad6ecdc-44a8-4c62-89e8-ca70878847a5-kube-api-access-h6lgt\") pod \"metallb-operator-controller-manager-56886c7897-6kmth\" (UID: \"2ad6ecdc-44a8-4c62-89e8-ca70878847a5\") " pod="metallb-system/metallb-operator-controller-manager-56886c7897-6kmth" Oct 01 15:14:59 crc kubenswrapper[4869]: I1001 15:14:59.856582 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/metallb-operator-controller-manager-56886c7897-6kmth" Oct 01 15:14:59 crc kubenswrapper[4869]: I1001 15:14:59.891881 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2rg6w\" (UniqueName: \"kubernetes.io/projected/112154b6-526a-4e35-b3de-f3b95835eb03-kube-api-access-2rg6w\") pod \"metallb-operator-webhook-server-96958c6f-4zrjt\" (UID: \"112154b6-526a-4e35-b3de-f3b95835eb03\") " pod="metallb-system/metallb-operator-webhook-server-96958c6f-4zrjt" Oct 01 15:14:59 crc kubenswrapper[4869]: I1001 15:14:59.891945 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/112154b6-526a-4e35-b3de-f3b95835eb03-apiservice-cert\") pod \"metallb-operator-webhook-server-96958c6f-4zrjt\" (UID: \"112154b6-526a-4e35-b3de-f3b95835eb03\") " pod="metallb-system/metallb-operator-webhook-server-96958c6f-4zrjt" Oct 01 15:14:59 crc kubenswrapper[4869]: I1001 15:14:59.891984 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/112154b6-526a-4e35-b3de-f3b95835eb03-webhook-cert\") pod \"metallb-operator-webhook-server-96958c6f-4zrjt\" (UID: \"112154b6-526a-4e35-b3de-f3b95835eb03\") " pod="metallb-system/metallb-operator-webhook-server-96958c6f-4zrjt" Oct 01 15:14:59 crc kubenswrapper[4869]: I1001 15:14:59.996348 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2rg6w\" (UniqueName: \"kubernetes.io/projected/112154b6-526a-4e35-b3de-f3b95835eb03-kube-api-access-2rg6w\") pod \"metallb-operator-webhook-server-96958c6f-4zrjt\" (UID: \"112154b6-526a-4e35-b3de-f3b95835eb03\") " pod="metallb-system/metallb-operator-webhook-server-96958c6f-4zrjt" Oct 01 15:14:59 crc kubenswrapper[4869]: I1001 15:14:59.996803 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/112154b6-526a-4e35-b3de-f3b95835eb03-apiservice-cert\") pod \"metallb-operator-webhook-server-96958c6f-4zrjt\" (UID: \"112154b6-526a-4e35-b3de-f3b95835eb03\") " pod="metallb-system/metallb-operator-webhook-server-96958c6f-4zrjt" Oct 01 15:14:59 crc kubenswrapper[4869]: I1001 15:14:59.996862 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/112154b6-526a-4e35-b3de-f3b95835eb03-webhook-cert\") pod \"metallb-operator-webhook-server-96958c6f-4zrjt\" (UID: \"112154b6-526a-4e35-b3de-f3b95835eb03\") " pod="metallb-system/metallb-operator-webhook-server-96958c6f-4zrjt" Oct 01 15:15:00 crc kubenswrapper[4869]: I1001 15:15:00.002702 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/112154b6-526a-4e35-b3de-f3b95835eb03-webhook-cert\") pod \"metallb-operator-webhook-server-96958c6f-4zrjt\" (UID: \"112154b6-526a-4e35-b3de-f3b95835eb03\") " pod="metallb-system/metallb-operator-webhook-server-96958c6f-4zrjt" Oct 01 15:15:00 crc kubenswrapper[4869]: I1001 15:15:00.002935 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/112154b6-526a-4e35-b3de-f3b95835eb03-apiservice-cert\") pod \"metallb-operator-webhook-server-96958c6f-4zrjt\" (UID: \"112154b6-526a-4e35-b3de-f3b95835eb03\") " pod="metallb-system/metallb-operator-webhook-server-96958c6f-4zrjt" Oct 01 15:15:00 crc kubenswrapper[4869]: I1001 15:15:00.011050 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2rg6w\" (UniqueName: \"kubernetes.io/projected/112154b6-526a-4e35-b3de-f3b95835eb03-kube-api-access-2rg6w\") pod \"metallb-operator-webhook-server-96958c6f-4zrjt\" (UID: \"112154b6-526a-4e35-b3de-f3b95835eb03\") " pod="metallb-system/metallb-operator-webhook-server-96958c6f-4zrjt" Oct 01 15:15:00 crc kubenswrapper[4869]: I1001 15:15:00.062232 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/metallb-operator-controller-manager-56886c7897-6kmth"] Oct 01 15:15:00 crc kubenswrapper[4869]: I1001 15:15:00.092005 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/metallb-operator-webhook-server-96958c6f-4zrjt" Oct 01 15:15:00 crc kubenswrapper[4869]: I1001 15:15:00.143407 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29322195-8k696"] Oct 01 15:15:00 crc kubenswrapper[4869]: I1001 15:15:00.144393 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29322195-8k696" Oct 01 15:15:00 crc kubenswrapper[4869]: I1001 15:15:00.146455 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Oct 01 15:15:00 crc kubenswrapper[4869]: I1001 15:15:00.148058 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Oct 01 15:15:00 crc kubenswrapper[4869]: I1001 15:15:00.171303 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29322195-8k696"] Oct 01 15:15:00 crc kubenswrapper[4869]: I1001 15:15:00.173631 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/metallb-operator-controller-manager-56886c7897-6kmth" event={"ID":"2ad6ecdc-44a8-4c62-89e8-ca70878847a5","Type":"ContainerStarted","Data":"310acf8bf9d554fa38cad5b7adeb6f587de06f3d3f8a9479056f026e75a6f2e7"} Oct 01 15:15:00 crc kubenswrapper[4869]: I1001 15:15:00.305652 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/7b44ff70-e201-4423-bf46-ec7066786edc-secret-volume\") pod \"collect-profiles-29322195-8k696\" (UID: \"7b44ff70-e201-4423-bf46-ec7066786edc\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29322195-8k696" Oct 01 15:15:00 crc kubenswrapper[4869]: I1001 15:15:00.305960 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/7b44ff70-e201-4423-bf46-ec7066786edc-config-volume\") pod \"collect-profiles-29322195-8k696\" (UID: \"7b44ff70-e201-4423-bf46-ec7066786edc\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29322195-8k696" Oct 01 15:15:00 crc kubenswrapper[4869]: I1001 15:15:00.305989 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4fgtj\" (UniqueName: \"kubernetes.io/projected/7b44ff70-e201-4423-bf46-ec7066786edc-kube-api-access-4fgtj\") pod \"collect-profiles-29322195-8k696\" (UID: \"7b44ff70-e201-4423-bf46-ec7066786edc\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29322195-8k696" Oct 01 15:15:00 crc kubenswrapper[4869]: I1001 15:15:00.351076 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/metallb-operator-webhook-server-96958c6f-4zrjt"] Oct 01 15:15:00 crc kubenswrapper[4869]: I1001 15:15:00.407845 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/7b44ff70-e201-4423-bf46-ec7066786edc-secret-volume\") pod \"collect-profiles-29322195-8k696\" (UID: \"7b44ff70-e201-4423-bf46-ec7066786edc\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29322195-8k696" Oct 01 15:15:00 crc kubenswrapper[4869]: I1001 15:15:00.407931 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4fgtj\" (UniqueName: \"kubernetes.io/projected/7b44ff70-e201-4423-bf46-ec7066786edc-kube-api-access-4fgtj\") pod \"collect-profiles-29322195-8k696\" (UID: \"7b44ff70-e201-4423-bf46-ec7066786edc\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29322195-8k696" Oct 01 15:15:00 crc kubenswrapper[4869]: I1001 15:15:00.407966 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/7b44ff70-e201-4423-bf46-ec7066786edc-config-volume\") pod \"collect-profiles-29322195-8k696\" (UID: \"7b44ff70-e201-4423-bf46-ec7066786edc\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29322195-8k696" Oct 01 15:15:00 crc kubenswrapper[4869]: I1001 15:15:00.409391 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/7b44ff70-e201-4423-bf46-ec7066786edc-config-volume\") pod \"collect-profiles-29322195-8k696\" (UID: \"7b44ff70-e201-4423-bf46-ec7066786edc\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29322195-8k696" Oct 01 15:15:00 crc kubenswrapper[4869]: I1001 15:15:00.415822 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/7b44ff70-e201-4423-bf46-ec7066786edc-secret-volume\") pod \"collect-profiles-29322195-8k696\" (UID: \"7b44ff70-e201-4423-bf46-ec7066786edc\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29322195-8k696" Oct 01 15:15:00 crc kubenswrapper[4869]: I1001 15:15:00.427859 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4fgtj\" (UniqueName: \"kubernetes.io/projected/7b44ff70-e201-4423-bf46-ec7066786edc-kube-api-access-4fgtj\") pod \"collect-profiles-29322195-8k696\" (UID: \"7b44ff70-e201-4423-bf46-ec7066786edc\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29322195-8k696" Oct 01 15:15:00 crc kubenswrapper[4869]: I1001 15:15:00.473790 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29322195-8k696" Oct 01 15:15:00 crc kubenswrapper[4869]: I1001 15:15:00.660223 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29322195-8k696"] Oct 01 15:15:01 crc kubenswrapper[4869]: I1001 15:15:01.182076 4869 generic.go:334] "Generic (PLEG): container finished" podID="7b44ff70-e201-4423-bf46-ec7066786edc" containerID="09144d984105f0751c9baf4d4ac22b51fcf226e93d477c26c4e3cc76614119d8" exitCode=0 Oct 01 15:15:01 crc kubenswrapper[4869]: I1001 15:15:01.182203 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29322195-8k696" event={"ID":"7b44ff70-e201-4423-bf46-ec7066786edc","Type":"ContainerDied","Data":"09144d984105f0751c9baf4d4ac22b51fcf226e93d477c26c4e3cc76614119d8"} Oct 01 15:15:01 crc kubenswrapper[4869]: I1001 15:15:01.182491 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29322195-8k696" event={"ID":"7b44ff70-e201-4423-bf46-ec7066786edc","Type":"ContainerStarted","Data":"a89780a73b725c7bc33c4b4e5c9fba97da7476d93a7ed1fb28b49c1092fb9b92"} Oct 01 15:15:01 crc kubenswrapper[4869]: I1001 15:15:01.184307 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/metallb-operator-webhook-server-96958c6f-4zrjt" event={"ID":"112154b6-526a-4e35-b3de-f3b95835eb03","Type":"ContainerStarted","Data":"64db8db3933ef37c284ed41d8c16de127e7fad792e842bbebf7a3f1638ef03e7"} Oct 01 15:15:02 crc kubenswrapper[4869]: I1001 15:15:02.487669 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29322195-8k696" Oct 01 15:15:02 crc kubenswrapper[4869]: I1001 15:15:02.640775 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4fgtj\" (UniqueName: \"kubernetes.io/projected/7b44ff70-e201-4423-bf46-ec7066786edc-kube-api-access-4fgtj\") pod \"7b44ff70-e201-4423-bf46-ec7066786edc\" (UID: \"7b44ff70-e201-4423-bf46-ec7066786edc\") " Oct 01 15:15:02 crc kubenswrapper[4869]: I1001 15:15:02.640921 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/7b44ff70-e201-4423-bf46-ec7066786edc-secret-volume\") pod \"7b44ff70-e201-4423-bf46-ec7066786edc\" (UID: \"7b44ff70-e201-4423-bf46-ec7066786edc\") " Oct 01 15:15:02 crc kubenswrapper[4869]: I1001 15:15:02.641077 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/7b44ff70-e201-4423-bf46-ec7066786edc-config-volume\") pod \"7b44ff70-e201-4423-bf46-ec7066786edc\" (UID: \"7b44ff70-e201-4423-bf46-ec7066786edc\") " Oct 01 15:15:02 crc kubenswrapper[4869]: I1001 15:15:02.642144 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7b44ff70-e201-4423-bf46-ec7066786edc-config-volume" (OuterVolumeSpecName: "config-volume") pod "7b44ff70-e201-4423-bf46-ec7066786edc" (UID: "7b44ff70-e201-4423-bf46-ec7066786edc"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 15:15:02 crc kubenswrapper[4869]: I1001 15:15:02.655876 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7b44ff70-e201-4423-bf46-ec7066786edc-kube-api-access-4fgtj" (OuterVolumeSpecName: "kube-api-access-4fgtj") pod "7b44ff70-e201-4423-bf46-ec7066786edc" (UID: "7b44ff70-e201-4423-bf46-ec7066786edc"). InnerVolumeSpecName "kube-api-access-4fgtj". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 15:15:02 crc kubenswrapper[4869]: I1001 15:15:02.659612 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7b44ff70-e201-4423-bf46-ec7066786edc-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "7b44ff70-e201-4423-bf46-ec7066786edc" (UID: "7b44ff70-e201-4423-bf46-ec7066786edc"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 15:15:02 crc kubenswrapper[4869]: I1001 15:15:02.743015 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4fgtj\" (UniqueName: \"kubernetes.io/projected/7b44ff70-e201-4423-bf46-ec7066786edc-kube-api-access-4fgtj\") on node \"crc\" DevicePath \"\"" Oct 01 15:15:02 crc kubenswrapper[4869]: I1001 15:15:02.743052 4869 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/7b44ff70-e201-4423-bf46-ec7066786edc-secret-volume\") on node \"crc\" DevicePath \"\"" Oct 01 15:15:02 crc kubenswrapper[4869]: I1001 15:15:02.743062 4869 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/7b44ff70-e201-4423-bf46-ec7066786edc-config-volume\") on node \"crc\" DevicePath \"\"" Oct 01 15:15:03 crc kubenswrapper[4869]: I1001 15:15:03.197493 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29322195-8k696" event={"ID":"7b44ff70-e201-4423-bf46-ec7066786edc","Type":"ContainerDied","Data":"a89780a73b725c7bc33c4b4e5c9fba97da7476d93a7ed1fb28b49c1092fb9b92"} Oct 01 15:15:03 crc kubenswrapper[4869]: I1001 15:15:03.197528 4869 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="a89780a73b725c7bc33c4b4e5c9fba97da7476d93a7ed1fb28b49c1092fb9b92" Oct 01 15:15:03 crc kubenswrapper[4869]: I1001 15:15:03.197609 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29322195-8k696" Oct 01 15:15:04 crc kubenswrapper[4869]: I1001 15:15:04.207405 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/metallb-operator-controller-manager-56886c7897-6kmth" event={"ID":"2ad6ecdc-44a8-4c62-89e8-ca70878847a5","Type":"ContainerStarted","Data":"bf7560e390e8e04e0ca810a1f6f18104760366687a62624bb10bbb11f1b4ea4c"} Oct 01 15:15:04 crc kubenswrapper[4869]: I1001 15:15:04.207818 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/metallb-operator-controller-manager-56886c7897-6kmth" Oct 01 15:15:04 crc kubenswrapper[4869]: I1001 15:15:04.229413 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/metallb-operator-controller-manager-56886c7897-6kmth" podStartSLOduration=1.68070093 podStartE2EDuration="5.229388573s" podCreationTimestamp="2025-10-01 15:14:59 +0000 UTC" firstStartedPulling="2025-10-01 15:15:00.082369856 +0000 UTC m=+609.229212972" lastFinishedPulling="2025-10-01 15:15:03.631057489 +0000 UTC m=+612.777900615" observedRunningTime="2025-10-01 15:15:04.226606583 +0000 UTC m=+613.373449709" watchObservedRunningTime="2025-10-01 15:15:04.229388573 +0000 UTC m=+613.376231689" Oct 01 15:15:06 crc kubenswrapper[4869]: I1001 15:15:06.220096 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/metallb-operator-webhook-server-96958c6f-4zrjt" event={"ID":"112154b6-526a-4e35-b3de-f3b95835eb03","Type":"ContainerStarted","Data":"23c4c66704dd1afe5db5d785eedbff6303b0bddf5a7ebf7326d38bcf84adbf5a"} Oct 01 15:15:06 crc kubenswrapper[4869]: I1001 15:15:06.220493 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/metallb-operator-webhook-server-96958c6f-4zrjt" Oct 01 15:15:06 crc kubenswrapper[4869]: I1001 15:15:06.276870 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/metallb-operator-webhook-server-96958c6f-4zrjt" podStartSLOduration=2.215592558 podStartE2EDuration="7.276854147s" podCreationTimestamp="2025-10-01 15:14:59 +0000 UTC" firstStartedPulling="2025-10-01 15:15:00.370944316 +0000 UTC m=+609.517787432" lastFinishedPulling="2025-10-01 15:15:05.432205905 +0000 UTC m=+614.579049021" observedRunningTime="2025-10-01 15:15:06.276081738 +0000 UTC m=+615.422924854" watchObservedRunningTime="2025-10-01 15:15:06.276854147 +0000 UTC m=+615.423697263" Oct 01 15:15:20 crc kubenswrapper[4869]: I1001 15:15:20.097026 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/metallb-operator-webhook-server-96958c6f-4zrjt" Oct 01 15:15:33 crc kubenswrapper[4869]: I1001 15:15:33.099101 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-s8l4m"] Oct 01 15:15:33 crc kubenswrapper[4869]: I1001 15:15:33.100000 4869 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-controller-manager/controller-manager-879f6c89f-s8l4m" podUID="8367e54f-3b20-4903-8dcf-d3ae02e516ca" containerName="controller-manager" containerID="cri-o://edf3d3805b9c8aa57b9b4995817df24bc06cefbe5fda9f79542d1715fb7612fb" gracePeriod=30 Oct 01 15:15:33 crc kubenswrapper[4869]: I1001 15:15:33.173428 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-5zhxq"] Oct 01 15:15:33 crc kubenswrapper[4869]: I1001 15:15:33.173670 4869 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-5zhxq" podUID="3635d59c-a52d-465d-8402-80153cd7369b" containerName="route-controller-manager" containerID="cri-o://940ffe1670ca371e81078c19ecec12f7298b1df088ba46d33bb5c4b46427acd5" gracePeriod=30 Oct 01 15:15:33 crc kubenswrapper[4869]: I1001 15:15:33.402406 4869 generic.go:334] "Generic (PLEG): container finished" podID="3635d59c-a52d-465d-8402-80153cd7369b" containerID="940ffe1670ca371e81078c19ecec12f7298b1df088ba46d33bb5c4b46427acd5" exitCode=0 Oct 01 15:15:33 crc kubenswrapper[4869]: I1001 15:15:33.402847 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-5zhxq" event={"ID":"3635d59c-a52d-465d-8402-80153cd7369b","Type":"ContainerDied","Data":"940ffe1670ca371e81078c19ecec12f7298b1df088ba46d33bb5c4b46427acd5"} Oct 01 15:15:33 crc kubenswrapper[4869]: I1001 15:15:33.405010 4869 generic.go:334] "Generic (PLEG): container finished" podID="8367e54f-3b20-4903-8dcf-d3ae02e516ca" containerID="edf3d3805b9c8aa57b9b4995817df24bc06cefbe5fda9f79542d1715fb7612fb" exitCode=0 Oct 01 15:15:33 crc kubenswrapper[4869]: I1001 15:15:33.405075 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-s8l4m" event={"ID":"8367e54f-3b20-4903-8dcf-d3ae02e516ca","Type":"ContainerDied","Data":"edf3d3805b9c8aa57b9b4995817df24bc06cefbe5fda9f79542d1715fb7612fb"} Oct 01 15:15:33 crc kubenswrapper[4869]: I1001 15:15:33.481754 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-s8l4m" Oct 01 15:15:33 crc kubenswrapper[4869]: I1001 15:15:33.535123 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-5zhxq" Oct 01 15:15:33 crc kubenswrapper[4869]: I1001 15:15:33.563137 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/8367e54f-3b20-4903-8dcf-d3ae02e516ca-client-ca\") pod \"8367e54f-3b20-4903-8dcf-d3ae02e516ca\" (UID: \"8367e54f-3b20-4903-8dcf-d3ae02e516ca\") " Oct 01 15:15:33 crc kubenswrapper[4869]: I1001 15:15:33.563295 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8367e54f-3b20-4903-8dcf-d3ae02e516ca-config\") pod \"8367e54f-3b20-4903-8dcf-d3ae02e516ca\" (UID: \"8367e54f-3b20-4903-8dcf-d3ae02e516ca\") " Oct 01 15:15:33 crc kubenswrapper[4869]: I1001 15:15:33.563326 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6679m\" (UniqueName: \"kubernetes.io/projected/8367e54f-3b20-4903-8dcf-d3ae02e516ca-kube-api-access-6679m\") pod \"8367e54f-3b20-4903-8dcf-d3ae02e516ca\" (UID: \"8367e54f-3b20-4903-8dcf-d3ae02e516ca\") " Oct 01 15:15:33 crc kubenswrapper[4869]: I1001 15:15:33.563363 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/8367e54f-3b20-4903-8dcf-d3ae02e516ca-proxy-ca-bundles\") pod \"8367e54f-3b20-4903-8dcf-d3ae02e516ca\" (UID: \"8367e54f-3b20-4903-8dcf-d3ae02e516ca\") " Oct 01 15:15:33 crc kubenswrapper[4869]: I1001 15:15:33.563389 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8367e54f-3b20-4903-8dcf-d3ae02e516ca-serving-cert\") pod \"8367e54f-3b20-4903-8dcf-d3ae02e516ca\" (UID: \"8367e54f-3b20-4903-8dcf-d3ae02e516ca\") " Oct 01 15:15:33 crc kubenswrapper[4869]: I1001 15:15:33.568140 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8367e54f-3b20-4903-8dcf-d3ae02e516ca-config" (OuterVolumeSpecName: "config") pod "8367e54f-3b20-4903-8dcf-d3ae02e516ca" (UID: "8367e54f-3b20-4903-8dcf-d3ae02e516ca"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 15:15:33 crc kubenswrapper[4869]: I1001 15:15:33.568609 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8367e54f-3b20-4903-8dcf-d3ae02e516ca-client-ca" (OuterVolumeSpecName: "client-ca") pod "8367e54f-3b20-4903-8dcf-d3ae02e516ca" (UID: "8367e54f-3b20-4903-8dcf-d3ae02e516ca"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 15:15:33 crc kubenswrapper[4869]: I1001 15:15:33.568980 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8367e54f-3b20-4903-8dcf-d3ae02e516ca-proxy-ca-bundles" (OuterVolumeSpecName: "proxy-ca-bundles") pod "8367e54f-3b20-4903-8dcf-d3ae02e516ca" (UID: "8367e54f-3b20-4903-8dcf-d3ae02e516ca"). InnerVolumeSpecName "proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 15:15:33 crc kubenswrapper[4869]: I1001 15:15:33.572686 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8367e54f-3b20-4903-8dcf-d3ae02e516ca-kube-api-access-6679m" (OuterVolumeSpecName: "kube-api-access-6679m") pod "8367e54f-3b20-4903-8dcf-d3ae02e516ca" (UID: "8367e54f-3b20-4903-8dcf-d3ae02e516ca"). InnerVolumeSpecName "kube-api-access-6679m". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 15:15:33 crc kubenswrapper[4869]: I1001 15:15:33.572761 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8367e54f-3b20-4903-8dcf-d3ae02e516ca-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "8367e54f-3b20-4903-8dcf-d3ae02e516ca" (UID: "8367e54f-3b20-4903-8dcf-d3ae02e516ca"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 15:15:33 crc kubenswrapper[4869]: I1001 15:15:33.664586 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rc5nb\" (UniqueName: \"kubernetes.io/projected/3635d59c-a52d-465d-8402-80153cd7369b-kube-api-access-rc5nb\") pod \"3635d59c-a52d-465d-8402-80153cd7369b\" (UID: \"3635d59c-a52d-465d-8402-80153cd7369b\") " Oct 01 15:15:33 crc kubenswrapper[4869]: I1001 15:15:33.664644 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/3635d59c-a52d-465d-8402-80153cd7369b-serving-cert\") pod \"3635d59c-a52d-465d-8402-80153cd7369b\" (UID: \"3635d59c-a52d-465d-8402-80153cd7369b\") " Oct 01 15:15:33 crc kubenswrapper[4869]: I1001 15:15:33.664721 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3635d59c-a52d-465d-8402-80153cd7369b-config\") pod \"3635d59c-a52d-465d-8402-80153cd7369b\" (UID: \"3635d59c-a52d-465d-8402-80153cd7369b\") " Oct 01 15:15:33 crc kubenswrapper[4869]: I1001 15:15:33.664781 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/3635d59c-a52d-465d-8402-80153cd7369b-client-ca\") pod \"3635d59c-a52d-465d-8402-80153cd7369b\" (UID: \"3635d59c-a52d-465d-8402-80153cd7369b\") " Oct 01 15:15:33 crc kubenswrapper[4869]: I1001 15:15:33.665581 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3635d59c-a52d-465d-8402-80153cd7369b-client-ca" (OuterVolumeSpecName: "client-ca") pod "3635d59c-a52d-465d-8402-80153cd7369b" (UID: "3635d59c-a52d-465d-8402-80153cd7369b"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 15:15:33 crc kubenswrapper[4869]: I1001 15:15:33.665680 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3635d59c-a52d-465d-8402-80153cd7369b-config" (OuterVolumeSpecName: "config") pod "3635d59c-a52d-465d-8402-80153cd7369b" (UID: "3635d59c-a52d-465d-8402-80153cd7369b"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 15:15:33 crc kubenswrapper[4869]: I1001 15:15:33.665928 4869 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8367e54f-3b20-4903-8dcf-d3ae02e516ca-config\") on node \"crc\" DevicePath \"\"" Oct 01 15:15:33 crc kubenswrapper[4869]: I1001 15:15:33.665949 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6679m\" (UniqueName: \"kubernetes.io/projected/8367e54f-3b20-4903-8dcf-d3ae02e516ca-kube-api-access-6679m\") on node \"crc\" DevicePath \"\"" Oct 01 15:15:33 crc kubenswrapper[4869]: I1001 15:15:33.665959 4869 reconciler_common.go:293] "Volume detached for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/8367e54f-3b20-4903-8dcf-d3ae02e516ca-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Oct 01 15:15:33 crc kubenswrapper[4869]: I1001 15:15:33.665968 4869 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8367e54f-3b20-4903-8dcf-d3ae02e516ca-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 01 15:15:33 crc kubenswrapper[4869]: I1001 15:15:33.665978 4869 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/8367e54f-3b20-4903-8dcf-d3ae02e516ca-client-ca\") on node \"crc\" DevicePath \"\"" Oct 01 15:15:33 crc kubenswrapper[4869]: I1001 15:15:33.665986 4869 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/3635d59c-a52d-465d-8402-80153cd7369b-client-ca\") on node \"crc\" DevicePath \"\"" Oct 01 15:15:33 crc kubenswrapper[4869]: I1001 15:15:33.668515 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3635d59c-a52d-465d-8402-80153cd7369b-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "3635d59c-a52d-465d-8402-80153cd7369b" (UID: "3635d59c-a52d-465d-8402-80153cd7369b"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 15:15:33 crc kubenswrapper[4869]: I1001 15:15:33.669508 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3635d59c-a52d-465d-8402-80153cd7369b-kube-api-access-rc5nb" (OuterVolumeSpecName: "kube-api-access-rc5nb") pod "3635d59c-a52d-465d-8402-80153cd7369b" (UID: "3635d59c-a52d-465d-8402-80153cd7369b"). InnerVolumeSpecName "kube-api-access-rc5nb". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 15:15:33 crc kubenswrapper[4869]: I1001 15:15:33.767634 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rc5nb\" (UniqueName: \"kubernetes.io/projected/3635d59c-a52d-465d-8402-80153cd7369b-kube-api-access-rc5nb\") on node \"crc\" DevicePath \"\"" Oct 01 15:15:33 crc kubenswrapper[4869]: I1001 15:15:33.767685 4869 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/3635d59c-a52d-465d-8402-80153cd7369b-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 01 15:15:33 crc kubenswrapper[4869]: I1001 15:15:33.767705 4869 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3635d59c-a52d-465d-8402-80153cd7369b-config\") on node \"crc\" DevicePath \"\"" Oct 01 15:15:34 crc kubenswrapper[4869]: I1001 15:15:34.296659 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager/controller-manager-66db67bc96-wm56j"] Oct 01 15:15:34 crc kubenswrapper[4869]: E1001 15:15:34.297623 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8367e54f-3b20-4903-8dcf-d3ae02e516ca" containerName="controller-manager" Oct 01 15:15:34 crc kubenswrapper[4869]: I1001 15:15:34.297646 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="8367e54f-3b20-4903-8dcf-d3ae02e516ca" containerName="controller-manager" Oct 01 15:15:34 crc kubenswrapper[4869]: E1001 15:15:34.297668 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3635d59c-a52d-465d-8402-80153cd7369b" containerName="route-controller-manager" Oct 01 15:15:34 crc kubenswrapper[4869]: I1001 15:15:34.297683 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="3635d59c-a52d-465d-8402-80153cd7369b" containerName="route-controller-manager" Oct 01 15:15:34 crc kubenswrapper[4869]: E1001 15:15:34.297721 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7b44ff70-e201-4423-bf46-ec7066786edc" containerName="collect-profiles" Oct 01 15:15:34 crc kubenswrapper[4869]: I1001 15:15:34.297734 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="7b44ff70-e201-4423-bf46-ec7066786edc" containerName="collect-profiles" Oct 01 15:15:34 crc kubenswrapper[4869]: I1001 15:15:34.297979 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="7b44ff70-e201-4423-bf46-ec7066786edc" containerName="collect-profiles" Oct 01 15:15:34 crc kubenswrapper[4869]: I1001 15:15:34.298000 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="3635d59c-a52d-465d-8402-80153cd7369b" containerName="route-controller-manager" Oct 01 15:15:34 crc kubenswrapper[4869]: I1001 15:15:34.298020 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="8367e54f-3b20-4903-8dcf-d3ae02e516ca" containerName="controller-manager" Oct 01 15:15:34 crc kubenswrapper[4869]: I1001 15:15:34.298776 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-66db67bc96-wm56j" Oct 01 15:15:34 crc kubenswrapper[4869]: I1001 15:15:34.305568 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-route-controller-manager/route-controller-manager-5867954f74-ndnn8"] Oct 01 15:15:34 crc kubenswrapper[4869]: I1001 15:15:34.307206 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-5867954f74-ndnn8" Oct 01 15:15:34 crc kubenswrapper[4869]: I1001 15:15:34.309352 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-66db67bc96-wm56j"] Oct 01 15:15:34 crc kubenswrapper[4869]: I1001 15:15:34.345643 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-5867954f74-ndnn8"] Oct 01 15:15:34 crc kubenswrapper[4869]: I1001 15:15:34.376102 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/632b05cf-42ab-4cbe-a17e-a0aa822e832b-proxy-ca-bundles\") pod \"controller-manager-66db67bc96-wm56j\" (UID: \"632b05cf-42ab-4cbe-a17e-a0aa822e832b\") " pod="openshift-controller-manager/controller-manager-66db67bc96-wm56j" Oct 01 15:15:34 crc kubenswrapper[4869]: I1001 15:15:34.376156 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4szr7\" (UniqueName: \"kubernetes.io/projected/632b05cf-42ab-4cbe-a17e-a0aa822e832b-kube-api-access-4szr7\") pod \"controller-manager-66db67bc96-wm56j\" (UID: \"632b05cf-42ab-4cbe-a17e-a0aa822e832b\") " pod="openshift-controller-manager/controller-manager-66db67bc96-wm56j" Oct 01 15:15:34 crc kubenswrapper[4869]: I1001 15:15:34.376205 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/632b05cf-42ab-4cbe-a17e-a0aa822e832b-client-ca\") pod \"controller-manager-66db67bc96-wm56j\" (UID: \"632b05cf-42ab-4cbe-a17e-a0aa822e832b\") " pod="openshift-controller-manager/controller-manager-66db67bc96-wm56j" Oct 01 15:15:34 crc kubenswrapper[4869]: I1001 15:15:34.376226 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/632b05cf-42ab-4cbe-a17e-a0aa822e832b-serving-cert\") pod \"controller-manager-66db67bc96-wm56j\" (UID: \"632b05cf-42ab-4cbe-a17e-a0aa822e832b\") " pod="openshift-controller-manager/controller-manager-66db67bc96-wm56j" Oct 01 15:15:34 crc kubenswrapper[4869]: I1001 15:15:34.376276 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/632b05cf-42ab-4cbe-a17e-a0aa822e832b-config\") pod \"controller-manager-66db67bc96-wm56j\" (UID: \"632b05cf-42ab-4cbe-a17e-a0aa822e832b\") " pod="openshift-controller-manager/controller-manager-66db67bc96-wm56j" Oct 01 15:15:34 crc kubenswrapper[4869]: I1001 15:15:34.413070 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-5zhxq" event={"ID":"3635d59c-a52d-465d-8402-80153cd7369b","Type":"ContainerDied","Data":"bb40840dc360e6b13928e139ca1cf6a8e3205b063ad217173aa662a7f34a1e4f"} Oct 01 15:15:34 crc kubenswrapper[4869]: I1001 15:15:34.413428 4869 scope.go:117] "RemoveContainer" containerID="940ffe1670ca371e81078c19ecec12f7298b1df088ba46d33bb5c4b46427acd5" Oct 01 15:15:34 crc kubenswrapper[4869]: I1001 15:15:34.413130 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-5zhxq" Oct 01 15:15:34 crc kubenswrapper[4869]: I1001 15:15:34.415383 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-s8l4m" event={"ID":"8367e54f-3b20-4903-8dcf-d3ae02e516ca","Type":"ContainerDied","Data":"3c69d7e8347bffdee5fa5d619939f5a7774e1c535edc50326919f3567698681b"} Oct 01 15:15:34 crc kubenswrapper[4869]: I1001 15:15:34.415534 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-s8l4m" Oct 01 15:15:34 crc kubenswrapper[4869]: I1001 15:15:34.445067 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-s8l4m"] Oct 01 15:15:34 crc kubenswrapper[4869]: I1001 15:15:34.452496 4869 scope.go:117] "RemoveContainer" containerID="edf3d3805b9c8aa57b9b4995817df24bc06cefbe5fda9f79542d1715fb7612fb" Oct 01 15:15:34 crc kubenswrapper[4869]: I1001 15:15:34.466183 4869 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-s8l4m"] Oct 01 15:15:34 crc kubenswrapper[4869]: I1001 15:15:34.483504 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-5zhxq"] Oct 01 15:15:34 crc kubenswrapper[4869]: I1001 15:15:34.483745 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/632b05cf-42ab-4cbe-a17e-a0aa822e832b-proxy-ca-bundles\") pod \"controller-manager-66db67bc96-wm56j\" (UID: \"632b05cf-42ab-4cbe-a17e-a0aa822e832b\") " pod="openshift-controller-manager/controller-manager-66db67bc96-wm56j" Oct 01 15:15:34 crc kubenswrapper[4869]: I1001 15:15:34.483886 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b8e54269-4e13-43e0-8f7a-9a336b0dea27-config\") pod \"route-controller-manager-5867954f74-ndnn8\" (UID: \"b8e54269-4e13-43e0-8f7a-9a336b0dea27\") " pod="openshift-route-controller-manager/route-controller-manager-5867954f74-ndnn8" Oct 01 15:15:34 crc kubenswrapper[4869]: I1001 15:15:34.483982 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4szr7\" (UniqueName: \"kubernetes.io/projected/632b05cf-42ab-4cbe-a17e-a0aa822e832b-kube-api-access-4szr7\") pod \"controller-manager-66db67bc96-wm56j\" (UID: \"632b05cf-42ab-4cbe-a17e-a0aa822e832b\") " pod="openshift-controller-manager/controller-manager-66db67bc96-wm56j" Oct 01 15:15:34 crc kubenswrapper[4869]: I1001 15:15:34.484085 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/632b05cf-42ab-4cbe-a17e-a0aa822e832b-client-ca\") pod \"controller-manager-66db67bc96-wm56j\" (UID: \"632b05cf-42ab-4cbe-a17e-a0aa822e832b\") " pod="openshift-controller-manager/controller-manager-66db67bc96-wm56j" Oct 01 15:15:34 crc kubenswrapper[4869]: I1001 15:15:34.484125 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/632b05cf-42ab-4cbe-a17e-a0aa822e832b-serving-cert\") pod \"controller-manager-66db67bc96-wm56j\" (UID: \"632b05cf-42ab-4cbe-a17e-a0aa822e832b\") " pod="openshift-controller-manager/controller-manager-66db67bc96-wm56j" Oct 01 15:15:34 crc kubenswrapper[4869]: I1001 15:15:34.484199 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/632b05cf-42ab-4cbe-a17e-a0aa822e832b-config\") pod \"controller-manager-66db67bc96-wm56j\" (UID: \"632b05cf-42ab-4cbe-a17e-a0aa822e832b\") " pod="openshift-controller-manager/controller-manager-66db67bc96-wm56j" Oct 01 15:15:34 crc kubenswrapper[4869]: I1001 15:15:34.484446 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/b8e54269-4e13-43e0-8f7a-9a336b0dea27-serving-cert\") pod \"route-controller-manager-5867954f74-ndnn8\" (UID: \"b8e54269-4e13-43e0-8f7a-9a336b0dea27\") " pod="openshift-route-controller-manager/route-controller-manager-5867954f74-ndnn8" Oct 01 15:15:34 crc kubenswrapper[4869]: I1001 15:15:34.484511 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/b8e54269-4e13-43e0-8f7a-9a336b0dea27-client-ca\") pod \"route-controller-manager-5867954f74-ndnn8\" (UID: \"b8e54269-4e13-43e0-8f7a-9a336b0dea27\") " pod="openshift-route-controller-manager/route-controller-manager-5867954f74-ndnn8" Oct 01 15:15:34 crc kubenswrapper[4869]: I1001 15:15:34.484560 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9pcd2\" (UniqueName: \"kubernetes.io/projected/b8e54269-4e13-43e0-8f7a-9a336b0dea27-kube-api-access-9pcd2\") pod \"route-controller-manager-5867954f74-ndnn8\" (UID: \"b8e54269-4e13-43e0-8f7a-9a336b0dea27\") " pod="openshift-route-controller-manager/route-controller-manager-5867954f74-ndnn8" Oct 01 15:15:34 crc kubenswrapper[4869]: I1001 15:15:34.484739 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/632b05cf-42ab-4cbe-a17e-a0aa822e832b-proxy-ca-bundles\") pod \"controller-manager-66db67bc96-wm56j\" (UID: \"632b05cf-42ab-4cbe-a17e-a0aa822e832b\") " pod="openshift-controller-manager/controller-manager-66db67bc96-wm56j" Oct 01 15:15:34 crc kubenswrapper[4869]: I1001 15:15:34.485686 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/632b05cf-42ab-4cbe-a17e-a0aa822e832b-config\") pod \"controller-manager-66db67bc96-wm56j\" (UID: \"632b05cf-42ab-4cbe-a17e-a0aa822e832b\") " pod="openshift-controller-manager/controller-manager-66db67bc96-wm56j" Oct 01 15:15:34 crc kubenswrapper[4869]: I1001 15:15:34.486121 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/632b05cf-42ab-4cbe-a17e-a0aa822e832b-client-ca\") pod \"controller-manager-66db67bc96-wm56j\" (UID: \"632b05cf-42ab-4cbe-a17e-a0aa822e832b\") " pod="openshift-controller-manager/controller-manager-66db67bc96-wm56j" Oct 01 15:15:34 crc kubenswrapper[4869]: I1001 15:15:34.502195 4869 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-5zhxq"] Oct 01 15:15:34 crc kubenswrapper[4869]: I1001 15:15:34.510133 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4szr7\" (UniqueName: \"kubernetes.io/projected/632b05cf-42ab-4cbe-a17e-a0aa822e832b-kube-api-access-4szr7\") pod \"controller-manager-66db67bc96-wm56j\" (UID: \"632b05cf-42ab-4cbe-a17e-a0aa822e832b\") " pod="openshift-controller-manager/controller-manager-66db67bc96-wm56j" Oct 01 15:15:34 crc kubenswrapper[4869]: I1001 15:15:34.510653 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/632b05cf-42ab-4cbe-a17e-a0aa822e832b-serving-cert\") pod \"controller-manager-66db67bc96-wm56j\" (UID: \"632b05cf-42ab-4cbe-a17e-a0aa822e832b\") " pod="openshift-controller-manager/controller-manager-66db67bc96-wm56j" Oct 01 15:15:34 crc kubenswrapper[4869]: I1001 15:15:34.586233 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/b8e54269-4e13-43e0-8f7a-9a336b0dea27-serving-cert\") pod \"route-controller-manager-5867954f74-ndnn8\" (UID: \"b8e54269-4e13-43e0-8f7a-9a336b0dea27\") " pod="openshift-route-controller-manager/route-controller-manager-5867954f74-ndnn8" Oct 01 15:15:34 crc kubenswrapper[4869]: I1001 15:15:34.586303 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/b8e54269-4e13-43e0-8f7a-9a336b0dea27-client-ca\") pod \"route-controller-manager-5867954f74-ndnn8\" (UID: \"b8e54269-4e13-43e0-8f7a-9a336b0dea27\") " pod="openshift-route-controller-manager/route-controller-manager-5867954f74-ndnn8" Oct 01 15:15:34 crc kubenswrapper[4869]: I1001 15:15:34.586334 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9pcd2\" (UniqueName: \"kubernetes.io/projected/b8e54269-4e13-43e0-8f7a-9a336b0dea27-kube-api-access-9pcd2\") pod \"route-controller-manager-5867954f74-ndnn8\" (UID: \"b8e54269-4e13-43e0-8f7a-9a336b0dea27\") " pod="openshift-route-controller-manager/route-controller-manager-5867954f74-ndnn8" Oct 01 15:15:34 crc kubenswrapper[4869]: I1001 15:15:34.586375 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b8e54269-4e13-43e0-8f7a-9a336b0dea27-config\") pod \"route-controller-manager-5867954f74-ndnn8\" (UID: \"b8e54269-4e13-43e0-8f7a-9a336b0dea27\") " pod="openshift-route-controller-manager/route-controller-manager-5867954f74-ndnn8" Oct 01 15:15:34 crc kubenswrapper[4869]: I1001 15:15:34.587683 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b8e54269-4e13-43e0-8f7a-9a336b0dea27-config\") pod \"route-controller-manager-5867954f74-ndnn8\" (UID: \"b8e54269-4e13-43e0-8f7a-9a336b0dea27\") " pod="openshift-route-controller-manager/route-controller-manager-5867954f74-ndnn8" Oct 01 15:15:34 crc kubenswrapper[4869]: I1001 15:15:34.588087 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/b8e54269-4e13-43e0-8f7a-9a336b0dea27-client-ca\") pod \"route-controller-manager-5867954f74-ndnn8\" (UID: \"b8e54269-4e13-43e0-8f7a-9a336b0dea27\") " pod="openshift-route-controller-manager/route-controller-manager-5867954f74-ndnn8" Oct 01 15:15:34 crc kubenswrapper[4869]: I1001 15:15:34.589400 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/b8e54269-4e13-43e0-8f7a-9a336b0dea27-serving-cert\") pod \"route-controller-manager-5867954f74-ndnn8\" (UID: \"b8e54269-4e13-43e0-8f7a-9a336b0dea27\") " pod="openshift-route-controller-manager/route-controller-manager-5867954f74-ndnn8" Oct 01 15:15:34 crc kubenswrapper[4869]: I1001 15:15:34.607000 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9pcd2\" (UniqueName: \"kubernetes.io/projected/b8e54269-4e13-43e0-8f7a-9a336b0dea27-kube-api-access-9pcd2\") pod \"route-controller-manager-5867954f74-ndnn8\" (UID: \"b8e54269-4e13-43e0-8f7a-9a336b0dea27\") " pod="openshift-route-controller-manager/route-controller-manager-5867954f74-ndnn8" Oct 01 15:15:34 crc kubenswrapper[4869]: I1001 15:15:34.656992 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-66db67bc96-wm56j" Oct 01 15:15:34 crc kubenswrapper[4869]: I1001 15:15:34.663202 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-5867954f74-ndnn8" Oct 01 15:15:34 crc kubenswrapper[4869]: I1001 15:15:34.848817 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-5867954f74-ndnn8"] Oct 01 15:15:34 crc kubenswrapper[4869]: I1001 15:15:34.880339 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-5867954f74-ndnn8"] Oct 01 15:15:34 crc kubenswrapper[4869]: I1001 15:15:34.895326 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-66db67bc96-wm56j"] Oct 01 15:15:35 crc kubenswrapper[4869]: I1001 15:15:35.421651 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-5867954f74-ndnn8" event={"ID":"b8e54269-4e13-43e0-8f7a-9a336b0dea27","Type":"ContainerStarted","Data":"efcbe74f0f84b5f2a50b449608d97e230fae73893ab9872b2f90a39d5a4cdd1a"} Oct 01 15:15:35 crc kubenswrapper[4869]: I1001 15:15:35.422235 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-5867954f74-ndnn8" event={"ID":"b8e54269-4e13-43e0-8f7a-9a336b0dea27","Type":"ContainerStarted","Data":"173996647f4209c12a66145a695ae2b55e8217c10bad1dec4d8b64f3a564fd6d"} Oct 01 15:15:35 crc kubenswrapper[4869]: I1001 15:15:35.422252 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-route-controller-manager/route-controller-manager-5867954f74-ndnn8" Oct 01 15:15:35 crc kubenswrapper[4869]: I1001 15:15:35.421740 4869 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-route-controller-manager/route-controller-manager-5867954f74-ndnn8" podUID="b8e54269-4e13-43e0-8f7a-9a336b0dea27" containerName="route-controller-manager" containerID="cri-o://efcbe74f0f84b5f2a50b449608d97e230fae73893ab9872b2f90a39d5a4cdd1a" gracePeriod=30 Oct 01 15:15:35 crc kubenswrapper[4869]: I1001 15:15:35.423968 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-66db67bc96-wm56j" event={"ID":"632b05cf-42ab-4cbe-a17e-a0aa822e832b","Type":"ContainerStarted","Data":"3f83bf2587de3cf88abdf3035501de92f33bab5b8b69e3b6f1721e51b9feb120"} Oct 01 15:15:35 crc kubenswrapper[4869]: I1001 15:15:35.424011 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-66db67bc96-wm56j" event={"ID":"632b05cf-42ab-4cbe-a17e-a0aa822e832b","Type":"ContainerStarted","Data":"5a452f8f05cb7ad878593532737d17f8d1335c3a1de649996fbb84d153850466"} Oct 01 15:15:35 crc kubenswrapper[4869]: I1001 15:15:35.424193 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-controller-manager/controller-manager-66db67bc96-wm56j" Oct 01 15:15:35 crc kubenswrapper[4869]: I1001 15:15:35.429064 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-controller-manager/controller-manager-66db67bc96-wm56j" Oct 01 15:15:35 crc kubenswrapper[4869]: I1001 15:15:35.451497 4869 patch_prober.go:28] interesting pod/route-controller-manager-5867954f74-ndnn8 container/route-controller-manager namespace/openshift-route-controller-manager: Readiness probe status=failure output="Get \"https://10.217.0.51:8443/healthz\": read tcp 10.217.0.2:40978->10.217.0.51:8443: read: connection reset by peer" start-of-body= Oct 01 15:15:35 crc kubenswrapper[4869]: I1001 15:15:35.451549 4869 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-route-controller-manager/route-controller-manager-5867954f74-ndnn8" podUID="b8e54269-4e13-43e0-8f7a-9a336b0dea27" containerName="route-controller-manager" probeResult="failure" output="Get \"https://10.217.0.51:8443/healthz\": read tcp 10.217.0.2:40978->10.217.0.51:8443: read: connection reset by peer" Oct 01 15:15:35 crc kubenswrapper[4869]: I1001 15:15:35.479919 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-route-controller-manager/route-controller-manager-5867954f74-ndnn8" podStartSLOduration=2.479902408 podStartE2EDuration="2.479902408s" podCreationTimestamp="2025-10-01 15:15:33 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-01 15:15:35.461919325 +0000 UTC m=+644.608762451" watchObservedRunningTime="2025-10-01 15:15:35.479902408 +0000 UTC m=+644.626745524" Oct 01 15:15:35 crc kubenswrapper[4869]: I1001 15:15:35.586659 4869 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3635d59c-a52d-465d-8402-80153cd7369b" path="/var/lib/kubelet/pods/3635d59c-a52d-465d-8402-80153cd7369b/volumes" Oct 01 15:15:35 crc kubenswrapper[4869]: I1001 15:15:35.587368 4869 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8367e54f-3b20-4903-8dcf-d3ae02e516ca" path="/var/lib/kubelet/pods/8367e54f-3b20-4903-8dcf-d3ae02e516ca/volumes" Oct 01 15:15:35 crc kubenswrapper[4869]: I1001 15:15:35.748047 4869 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-route-controller-manager_route-controller-manager-5867954f74-ndnn8_b8e54269-4e13-43e0-8f7a-9a336b0dea27/route-controller-manager/0.log" Oct 01 15:15:35 crc kubenswrapper[4869]: I1001 15:15:35.748117 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-5867954f74-ndnn8" Oct 01 15:15:35 crc kubenswrapper[4869]: I1001 15:15:35.767467 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager/controller-manager-66db67bc96-wm56j" podStartSLOduration=2.767444522 podStartE2EDuration="2.767444522s" podCreationTimestamp="2025-10-01 15:15:33 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-01 15:15:35.482169485 +0000 UTC m=+644.629012601" watchObservedRunningTime="2025-10-01 15:15:35.767444522 +0000 UTC m=+644.914287648" Oct 01 15:15:35 crc kubenswrapper[4869]: I1001 15:15:35.913738 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/b8e54269-4e13-43e0-8f7a-9a336b0dea27-serving-cert\") pod \"b8e54269-4e13-43e0-8f7a-9a336b0dea27\" (UID: \"b8e54269-4e13-43e0-8f7a-9a336b0dea27\") " Oct 01 15:15:35 crc kubenswrapper[4869]: I1001 15:15:35.913801 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9pcd2\" (UniqueName: \"kubernetes.io/projected/b8e54269-4e13-43e0-8f7a-9a336b0dea27-kube-api-access-9pcd2\") pod \"b8e54269-4e13-43e0-8f7a-9a336b0dea27\" (UID: \"b8e54269-4e13-43e0-8f7a-9a336b0dea27\") " Oct 01 15:15:35 crc kubenswrapper[4869]: I1001 15:15:35.913880 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b8e54269-4e13-43e0-8f7a-9a336b0dea27-config\") pod \"b8e54269-4e13-43e0-8f7a-9a336b0dea27\" (UID: \"b8e54269-4e13-43e0-8f7a-9a336b0dea27\") " Oct 01 15:15:35 crc kubenswrapper[4869]: I1001 15:15:35.913917 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/b8e54269-4e13-43e0-8f7a-9a336b0dea27-client-ca\") pod \"b8e54269-4e13-43e0-8f7a-9a336b0dea27\" (UID: \"b8e54269-4e13-43e0-8f7a-9a336b0dea27\") " Oct 01 15:15:35 crc kubenswrapper[4869]: I1001 15:15:35.914857 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b8e54269-4e13-43e0-8f7a-9a336b0dea27-client-ca" (OuterVolumeSpecName: "client-ca") pod "b8e54269-4e13-43e0-8f7a-9a336b0dea27" (UID: "b8e54269-4e13-43e0-8f7a-9a336b0dea27"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 15:15:35 crc kubenswrapper[4869]: I1001 15:15:35.915898 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b8e54269-4e13-43e0-8f7a-9a336b0dea27-config" (OuterVolumeSpecName: "config") pod "b8e54269-4e13-43e0-8f7a-9a336b0dea27" (UID: "b8e54269-4e13-43e0-8f7a-9a336b0dea27"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 15:15:35 crc kubenswrapper[4869]: I1001 15:15:35.922995 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b8e54269-4e13-43e0-8f7a-9a336b0dea27-kube-api-access-9pcd2" (OuterVolumeSpecName: "kube-api-access-9pcd2") pod "b8e54269-4e13-43e0-8f7a-9a336b0dea27" (UID: "b8e54269-4e13-43e0-8f7a-9a336b0dea27"). InnerVolumeSpecName "kube-api-access-9pcd2". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 15:15:35 crc kubenswrapper[4869]: I1001 15:15:35.923159 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b8e54269-4e13-43e0-8f7a-9a336b0dea27-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "b8e54269-4e13-43e0-8f7a-9a336b0dea27" (UID: "b8e54269-4e13-43e0-8f7a-9a336b0dea27"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 15:15:36 crc kubenswrapper[4869]: I1001 15:15:36.015340 4869 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/b8e54269-4e13-43e0-8f7a-9a336b0dea27-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 01 15:15:36 crc kubenswrapper[4869]: I1001 15:15:36.015378 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9pcd2\" (UniqueName: \"kubernetes.io/projected/b8e54269-4e13-43e0-8f7a-9a336b0dea27-kube-api-access-9pcd2\") on node \"crc\" DevicePath \"\"" Oct 01 15:15:36 crc kubenswrapper[4869]: I1001 15:15:36.015391 4869 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b8e54269-4e13-43e0-8f7a-9a336b0dea27-config\") on node \"crc\" DevicePath \"\"" Oct 01 15:15:36 crc kubenswrapper[4869]: I1001 15:15:36.015401 4869 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/b8e54269-4e13-43e0-8f7a-9a336b0dea27-client-ca\") on node \"crc\" DevicePath \"\"" Oct 01 15:15:36 crc kubenswrapper[4869]: I1001 15:15:36.294248 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-route-controller-manager/route-controller-manager-dc967cccc-rxmdz"] Oct 01 15:15:36 crc kubenswrapper[4869]: E1001 15:15:36.294525 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b8e54269-4e13-43e0-8f7a-9a336b0dea27" containerName="route-controller-manager" Oct 01 15:15:36 crc kubenswrapper[4869]: I1001 15:15:36.294542 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="b8e54269-4e13-43e0-8f7a-9a336b0dea27" containerName="route-controller-manager" Oct 01 15:15:36 crc kubenswrapper[4869]: I1001 15:15:36.294718 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="b8e54269-4e13-43e0-8f7a-9a336b0dea27" containerName="route-controller-manager" Oct 01 15:15:36 crc kubenswrapper[4869]: I1001 15:15:36.295172 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-dc967cccc-rxmdz" Oct 01 15:15:36 crc kubenswrapper[4869]: I1001 15:15:36.331849 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-dc967cccc-rxmdz"] Oct 01 15:15:36 crc kubenswrapper[4869]: I1001 15:15:36.420447 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e3e19fbf-86ec-4608-8f8f-b8d25c524566-serving-cert\") pod \"route-controller-manager-dc967cccc-rxmdz\" (UID: \"e3e19fbf-86ec-4608-8f8f-b8d25c524566\") " pod="openshift-route-controller-manager/route-controller-manager-dc967cccc-rxmdz" Oct 01 15:15:36 crc kubenswrapper[4869]: I1001 15:15:36.420615 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e3e19fbf-86ec-4608-8f8f-b8d25c524566-config\") pod \"route-controller-manager-dc967cccc-rxmdz\" (UID: \"e3e19fbf-86ec-4608-8f8f-b8d25c524566\") " pod="openshift-route-controller-manager/route-controller-manager-dc967cccc-rxmdz" Oct 01 15:15:36 crc kubenswrapper[4869]: I1001 15:15:36.420700 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-z9zn2\" (UniqueName: \"kubernetes.io/projected/e3e19fbf-86ec-4608-8f8f-b8d25c524566-kube-api-access-z9zn2\") pod \"route-controller-manager-dc967cccc-rxmdz\" (UID: \"e3e19fbf-86ec-4608-8f8f-b8d25c524566\") " pod="openshift-route-controller-manager/route-controller-manager-dc967cccc-rxmdz" Oct 01 15:15:36 crc kubenswrapper[4869]: I1001 15:15:36.420773 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/e3e19fbf-86ec-4608-8f8f-b8d25c524566-client-ca\") pod \"route-controller-manager-dc967cccc-rxmdz\" (UID: \"e3e19fbf-86ec-4608-8f8f-b8d25c524566\") " pod="openshift-route-controller-manager/route-controller-manager-dc967cccc-rxmdz" Oct 01 15:15:36 crc kubenswrapper[4869]: I1001 15:15:36.440331 4869 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-route-controller-manager_route-controller-manager-5867954f74-ndnn8_b8e54269-4e13-43e0-8f7a-9a336b0dea27/route-controller-manager/0.log" Oct 01 15:15:36 crc kubenswrapper[4869]: I1001 15:15:36.440419 4869 generic.go:334] "Generic (PLEG): container finished" podID="b8e54269-4e13-43e0-8f7a-9a336b0dea27" containerID="efcbe74f0f84b5f2a50b449608d97e230fae73893ab9872b2f90a39d5a4cdd1a" exitCode=255 Oct 01 15:15:36 crc kubenswrapper[4869]: I1001 15:15:36.440520 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-5867954f74-ndnn8" event={"ID":"b8e54269-4e13-43e0-8f7a-9a336b0dea27","Type":"ContainerDied","Data":"efcbe74f0f84b5f2a50b449608d97e230fae73893ab9872b2f90a39d5a4cdd1a"} Oct 01 15:15:36 crc kubenswrapper[4869]: I1001 15:15:36.440590 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-5867954f74-ndnn8" event={"ID":"b8e54269-4e13-43e0-8f7a-9a336b0dea27","Type":"ContainerDied","Data":"173996647f4209c12a66145a695ae2b55e8217c10bad1dec4d8b64f3a564fd6d"} Oct 01 15:15:36 crc kubenswrapper[4869]: I1001 15:15:36.440609 4869 scope.go:117] "RemoveContainer" containerID="efcbe74f0f84b5f2a50b449608d97e230fae73893ab9872b2f90a39d5a4cdd1a" Oct 01 15:15:36 crc kubenswrapper[4869]: I1001 15:15:36.440541 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-5867954f74-ndnn8" Oct 01 15:15:36 crc kubenswrapper[4869]: I1001 15:15:36.470654 4869 scope.go:117] "RemoveContainer" containerID="efcbe74f0f84b5f2a50b449608d97e230fae73893ab9872b2f90a39d5a4cdd1a" Oct 01 15:15:36 crc kubenswrapper[4869]: E1001 15:15:36.471127 4869 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"efcbe74f0f84b5f2a50b449608d97e230fae73893ab9872b2f90a39d5a4cdd1a\": container with ID starting with efcbe74f0f84b5f2a50b449608d97e230fae73893ab9872b2f90a39d5a4cdd1a not found: ID does not exist" containerID="efcbe74f0f84b5f2a50b449608d97e230fae73893ab9872b2f90a39d5a4cdd1a" Oct 01 15:15:36 crc kubenswrapper[4869]: I1001 15:15:36.471178 4869 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"efcbe74f0f84b5f2a50b449608d97e230fae73893ab9872b2f90a39d5a4cdd1a"} err="failed to get container status \"efcbe74f0f84b5f2a50b449608d97e230fae73893ab9872b2f90a39d5a4cdd1a\": rpc error: code = NotFound desc = could not find container \"efcbe74f0f84b5f2a50b449608d97e230fae73893ab9872b2f90a39d5a4cdd1a\": container with ID starting with efcbe74f0f84b5f2a50b449608d97e230fae73893ab9872b2f90a39d5a4cdd1a not found: ID does not exist" Oct 01 15:15:36 crc kubenswrapper[4869]: I1001 15:15:36.472966 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-5867954f74-ndnn8"] Oct 01 15:15:36 crc kubenswrapper[4869]: I1001 15:15:36.476250 4869 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-5867954f74-ndnn8"] Oct 01 15:15:36 crc kubenswrapper[4869]: I1001 15:15:36.522381 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e3e19fbf-86ec-4608-8f8f-b8d25c524566-serving-cert\") pod \"route-controller-manager-dc967cccc-rxmdz\" (UID: \"e3e19fbf-86ec-4608-8f8f-b8d25c524566\") " pod="openshift-route-controller-manager/route-controller-manager-dc967cccc-rxmdz" Oct 01 15:15:36 crc kubenswrapper[4869]: I1001 15:15:36.522449 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e3e19fbf-86ec-4608-8f8f-b8d25c524566-config\") pod \"route-controller-manager-dc967cccc-rxmdz\" (UID: \"e3e19fbf-86ec-4608-8f8f-b8d25c524566\") " pod="openshift-route-controller-manager/route-controller-manager-dc967cccc-rxmdz" Oct 01 15:15:36 crc kubenswrapper[4869]: I1001 15:15:36.522496 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-z9zn2\" (UniqueName: \"kubernetes.io/projected/e3e19fbf-86ec-4608-8f8f-b8d25c524566-kube-api-access-z9zn2\") pod \"route-controller-manager-dc967cccc-rxmdz\" (UID: \"e3e19fbf-86ec-4608-8f8f-b8d25c524566\") " pod="openshift-route-controller-manager/route-controller-manager-dc967cccc-rxmdz" Oct 01 15:15:36 crc kubenswrapper[4869]: I1001 15:15:36.522590 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/e3e19fbf-86ec-4608-8f8f-b8d25c524566-client-ca\") pod \"route-controller-manager-dc967cccc-rxmdz\" (UID: \"e3e19fbf-86ec-4608-8f8f-b8d25c524566\") " pod="openshift-route-controller-manager/route-controller-manager-dc967cccc-rxmdz" Oct 01 15:15:36 crc kubenswrapper[4869]: I1001 15:15:36.524384 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/e3e19fbf-86ec-4608-8f8f-b8d25c524566-client-ca\") pod \"route-controller-manager-dc967cccc-rxmdz\" (UID: \"e3e19fbf-86ec-4608-8f8f-b8d25c524566\") " pod="openshift-route-controller-manager/route-controller-manager-dc967cccc-rxmdz" Oct 01 15:15:36 crc kubenswrapper[4869]: I1001 15:15:36.526366 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e3e19fbf-86ec-4608-8f8f-b8d25c524566-config\") pod \"route-controller-manager-dc967cccc-rxmdz\" (UID: \"e3e19fbf-86ec-4608-8f8f-b8d25c524566\") " pod="openshift-route-controller-manager/route-controller-manager-dc967cccc-rxmdz" Oct 01 15:15:36 crc kubenswrapper[4869]: I1001 15:15:36.528567 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e3e19fbf-86ec-4608-8f8f-b8d25c524566-serving-cert\") pod \"route-controller-manager-dc967cccc-rxmdz\" (UID: \"e3e19fbf-86ec-4608-8f8f-b8d25c524566\") " pod="openshift-route-controller-manager/route-controller-manager-dc967cccc-rxmdz" Oct 01 15:15:36 crc kubenswrapper[4869]: I1001 15:15:36.550925 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-z9zn2\" (UniqueName: \"kubernetes.io/projected/e3e19fbf-86ec-4608-8f8f-b8d25c524566-kube-api-access-z9zn2\") pod \"route-controller-manager-dc967cccc-rxmdz\" (UID: \"e3e19fbf-86ec-4608-8f8f-b8d25c524566\") " pod="openshift-route-controller-manager/route-controller-manager-dc967cccc-rxmdz" Oct 01 15:15:36 crc kubenswrapper[4869]: I1001 15:15:36.615429 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-dc967cccc-rxmdz" Oct 01 15:15:36 crc kubenswrapper[4869]: I1001 15:15:36.938933 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-dc967cccc-rxmdz"] Oct 01 15:15:36 crc kubenswrapper[4869]: W1001 15:15:36.945343 4869 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pode3e19fbf_86ec_4608_8f8f_b8d25c524566.slice/crio-0f8ec3c933559ea9896874d62d12873e0c454a30978089d85c0249609b3c9a42 WatchSource:0}: Error finding container 0f8ec3c933559ea9896874d62d12873e0c454a30978089d85c0249609b3c9a42: Status 404 returned error can't find the container with id 0f8ec3c933559ea9896874d62d12873e0c454a30978089d85c0249609b3c9a42 Oct 01 15:15:37 crc kubenswrapper[4869]: I1001 15:15:37.450387 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-dc967cccc-rxmdz" event={"ID":"e3e19fbf-86ec-4608-8f8f-b8d25c524566","Type":"ContainerStarted","Data":"dd31720dcdcaa5fab016b3d707e190416de14eed414524616b6f5324ad23230f"} Oct 01 15:15:37 crc kubenswrapper[4869]: I1001 15:15:37.450458 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-dc967cccc-rxmdz" event={"ID":"e3e19fbf-86ec-4608-8f8f-b8d25c524566","Type":"ContainerStarted","Data":"0f8ec3c933559ea9896874d62d12873e0c454a30978089d85c0249609b3c9a42"} Oct 01 15:15:37 crc kubenswrapper[4869]: I1001 15:15:37.480106 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-route-controller-manager/route-controller-manager-dc967cccc-rxmdz" podStartSLOduration=3.480078481 podStartE2EDuration="3.480078481s" podCreationTimestamp="2025-10-01 15:15:34 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-01 15:15:37.474108971 +0000 UTC m=+646.620952117" watchObservedRunningTime="2025-10-01 15:15:37.480078481 +0000 UTC m=+646.626921637" Oct 01 15:15:37 crc kubenswrapper[4869]: I1001 15:15:37.591669 4869 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b8e54269-4e13-43e0-8f7a-9a336b0dea27" path="/var/lib/kubelet/pods/b8e54269-4e13-43e0-8f7a-9a336b0dea27/volumes" Oct 01 15:15:38 crc kubenswrapper[4869]: I1001 15:15:38.462175 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-route-controller-manager/route-controller-manager-dc967cccc-rxmdz" Oct 01 15:15:38 crc kubenswrapper[4869]: I1001 15:15:38.468289 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-route-controller-manager/route-controller-manager-dc967cccc-rxmdz" Oct 01 15:15:39 crc kubenswrapper[4869]: I1001 15:15:39.860665 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/metallb-operator-controller-manager-56886c7897-6kmth" Oct 01 15:15:40 crc kubenswrapper[4869]: I1001 15:15:40.826202 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/frr-k8s-vrp9g"] Oct 01 15:15:40 crc kubenswrapper[4869]: I1001 15:15:40.828272 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/frr-k8s-vrp9g" Oct 01 15:15:40 crc kubenswrapper[4869]: I1001 15:15:40.830945 4869 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"frr-k8s-daemon-dockercfg-lzxnj" Oct 01 15:15:40 crc kubenswrapper[4869]: I1001 15:15:40.831095 4869 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"frr-k8s-certs-secret" Oct 01 15:15:40 crc kubenswrapper[4869]: I1001 15:15:40.831184 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"metallb-system"/"frr-startup" Oct 01 15:15:40 crc kubenswrapper[4869]: I1001 15:15:40.838744 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/frr-k8s-webhook-server-5478bdb765-t9fnk"] Oct 01 15:15:40 crc kubenswrapper[4869]: I1001 15:15:40.845490 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/frr-k8s-webhook-server-5478bdb765-t9fnk" Oct 01 15:15:40 crc kubenswrapper[4869]: I1001 15:15:40.851287 4869 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"frr-k8s-webhook-server-cert" Oct 01 15:15:40 crc kubenswrapper[4869]: I1001 15:15:40.857693 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/frr-k8s-webhook-server-5478bdb765-t9fnk"] Oct 01 15:15:40 crc kubenswrapper[4869]: I1001 15:15:40.889891 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics\" (UniqueName: \"kubernetes.io/empty-dir/38150466-9f4a-4ae1-a5b5-bfca202b829f-metrics\") pod \"frr-k8s-vrp9g\" (UID: \"38150466-9f4a-4ae1-a5b5-bfca202b829f\") " pod="metallb-system/frr-k8s-vrp9g" Oct 01 15:15:40 crc kubenswrapper[4869]: I1001 15:15:40.889934 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"frr-sockets\" (UniqueName: \"kubernetes.io/empty-dir/38150466-9f4a-4ae1-a5b5-bfca202b829f-frr-sockets\") pod \"frr-k8s-vrp9g\" (UID: \"38150466-9f4a-4ae1-a5b5-bfca202b829f\") " pod="metallb-system/frr-k8s-vrp9g" Oct 01 15:15:40 crc kubenswrapper[4869]: I1001 15:15:40.889959 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"frr-startup\" (UniqueName: \"kubernetes.io/configmap/38150466-9f4a-4ae1-a5b5-bfca202b829f-frr-startup\") pod \"frr-k8s-vrp9g\" (UID: \"38150466-9f4a-4ae1-a5b5-bfca202b829f\") " pod="metallb-system/frr-k8s-vrp9g" Oct 01 15:15:40 crc kubenswrapper[4869]: I1001 15:15:40.889980 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"frr-conf\" (UniqueName: \"kubernetes.io/empty-dir/38150466-9f4a-4ae1-a5b5-bfca202b829f-frr-conf\") pod \"frr-k8s-vrp9g\" (UID: \"38150466-9f4a-4ae1-a5b5-bfca202b829f\") " pod="metallb-system/frr-k8s-vrp9g" Oct 01 15:15:40 crc kubenswrapper[4869]: I1001 15:15:40.890025 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6w4n4\" (UniqueName: \"kubernetes.io/projected/38150466-9f4a-4ae1-a5b5-bfca202b829f-kube-api-access-6w4n4\") pod \"frr-k8s-vrp9g\" (UID: \"38150466-9f4a-4ae1-a5b5-bfca202b829f\") " pod="metallb-system/frr-k8s-vrp9g" Oct 01 15:15:40 crc kubenswrapper[4869]: I1001 15:15:40.890050 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/38150466-9f4a-4ae1-a5b5-bfca202b829f-metrics-certs\") pod \"frr-k8s-vrp9g\" (UID: \"38150466-9f4a-4ae1-a5b5-bfca202b829f\") " pod="metallb-system/frr-k8s-vrp9g" Oct 01 15:15:40 crc kubenswrapper[4869]: I1001 15:15:40.890075 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"reloader\" (UniqueName: \"kubernetes.io/empty-dir/38150466-9f4a-4ae1-a5b5-bfca202b829f-reloader\") pod \"frr-k8s-vrp9g\" (UID: \"38150466-9f4a-4ae1-a5b5-bfca202b829f\") " pod="metallb-system/frr-k8s-vrp9g" Oct 01 15:15:40 crc kubenswrapper[4869]: I1001 15:15:40.935767 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/speaker-kntp6"] Oct 01 15:15:40 crc kubenswrapper[4869]: I1001 15:15:40.936589 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/speaker-kntp6" Oct 01 15:15:40 crc kubenswrapper[4869]: I1001 15:15:40.947752 4869 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-memberlist" Oct 01 15:15:40 crc kubenswrapper[4869]: I1001 15:15:40.947799 4869 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"speaker-certs-secret" Oct 01 15:15:40 crc kubenswrapper[4869]: I1001 15:15:40.948114 4869 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"speaker-dockercfg-7pbg9" Oct 01 15:15:40 crc kubenswrapper[4869]: I1001 15:15:40.948276 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"metallb-system"/"metallb-excludel2" Oct 01 15:15:40 crc kubenswrapper[4869]: I1001 15:15:40.951999 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/controller-5d688f5ffc-jtdv4"] Oct 01 15:15:40 crc kubenswrapper[4869]: I1001 15:15:40.953010 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/controller-5d688f5ffc-jtdv4" Oct 01 15:15:40 crc kubenswrapper[4869]: I1001 15:15:40.955793 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/controller-5d688f5ffc-jtdv4"] Oct 01 15:15:40 crc kubenswrapper[4869]: I1001 15:15:40.956526 4869 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"controller-certs-secret" Oct 01 15:15:40 crc kubenswrapper[4869]: I1001 15:15:40.990921 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/74ce9afe-754c-4610-90f1-a9c42b2cd395-memberlist\") pod \"speaker-kntp6\" (UID: \"74ce9afe-754c-4610-90f1-a9c42b2cd395\") " pod="metallb-system/speaker-kntp6" Oct 01 15:15:40 crc kubenswrapper[4869]: I1001 15:15:40.991251 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics\" (UniqueName: \"kubernetes.io/empty-dir/38150466-9f4a-4ae1-a5b5-bfca202b829f-metrics\") pod \"frr-k8s-vrp9g\" (UID: \"38150466-9f4a-4ae1-a5b5-bfca202b829f\") " pod="metallb-system/frr-k8s-vrp9g" Oct 01 15:15:40 crc kubenswrapper[4869]: I1001 15:15:40.991285 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/74ce9afe-754c-4610-90f1-a9c42b2cd395-metrics-certs\") pod \"speaker-kntp6\" (UID: \"74ce9afe-754c-4610-90f1-a9c42b2cd395\") " pod="metallb-system/speaker-kntp6" Oct 01 15:15:40 crc kubenswrapper[4869]: I1001 15:15:40.991301 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"frr-sockets\" (UniqueName: \"kubernetes.io/empty-dir/38150466-9f4a-4ae1-a5b5-bfca202b829f-frr-sockets\") pod \"frr-k8s-vrp9g\" (UID: \"38150466-9f4a-4ae1-a5b5-bfca202b829f\") " pod="metallb-system/frr-k8s-vrp9g" Oct 01 15:15:40 crc kubenswrapper[4869]: I1001 15:15:40.991321 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lxmdv\" (UniqueName: \"kubernetes.io/projected/74ce9afe-754c-4610-90f1-a9c42b2cd395-kube-api-access-lxmdv\") pod \"speaker-kntp6\" (UID: \"74ce9afe-754c-4610-90f1-a9c42b2cd395\") " pod="metallb-system/speaker-kntp6" Oct 01 15:15:40 crc kubenswrapper[4869]: I1001 15:15:40.991342 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"frr-startup\" (UniqueName: \"kubernetes.io/configmap/38150466-9f4a-4ae1-a5b5-bfca202b829f-frr-startup\") pod \"frr-k8s-vrp9g\" (UID: \"38150466-9f4a-4ae1-a5b5-bfca202b829f\") " pod="metallb-system/frr-k8s-vrp9g" Oct 01 15:15:40 crc kubenswrapper[4869]: I1001 15:15:40.991365 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"frr-conf\" (UniqueName: \"kubernetes.io/empty-dir/38150466-9f4a-4ae1-a5b5-bfca202b829f-frr-conf\") pod \"frr-k8s-vrp9g\" (UID: \"38150466-9f4a-4ae1-a5b5-bfca202b829f\") " pod="metallb-system/frr-k8s-vrp9g" Oct 01 15:15:40 crc kubenswrapper[4869]: I1001 15:15:40.991385 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metallb-excludel2\" (UniqueName: \"kubernetes.io/configmap/74ce9afe-754c-4610-90f1-a9c42b2cd395-metallb-excludel2\") pod \"speaker-kntp6\" (UID: \"74ce9afe-754c-4610-90f1-a9c42b2cd395\") " pod="metallb-system/speaker-kntp6" Oct 01 15:15:40 crc kubenswrapper[4869]: I1001 15:15:40.991410 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6w4n4\" (UniqueName: \"kubernetes.io/projected/38150466-9f4a-4ae1-a5b5-bfca202b829f-kube-api-access-6w4n4\") pod \"frr-k8s-vrp9g\" (UID: \"38150466-9f4a-4ae1-a5b5-bfca202b829f\") " pod="metallb-system/frr-k8s-vrp9g" Oct 01 15:15:40 crc kubenswrapper[4869]: I1001 15:15:40.991433 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/38150466-9f4a-4ae1-a5b5-bfca202b829f-metrics-certs\") pod \"frr-k8s-vrp9g\" (UID: \"38150466-9f4a-4ae1-a5b5-bfca202b829f\") " pod="metallb-system/frr-k8s-vrp9g" Oct 01 15:15:40 crc kubenswrapper[4869]: I1001 15:15:40.991450 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-spzw9\" (UniqueName: \"kubernetes.io/projected/5ff28534-d014-499c-82cb-ffe31d55e7a1-kube-api-access-spzw9\") pod \"frr-k8s-webhook-server-5478bdb765-t9fnk\" (UID: \"5ff28534-d014-499c-82cb-ffe31d55e7a1\") " pod="metallb-system/frr-k8s-webhook-server-5478bdb765-t9fnk" Oct 01 15:15:40 crc kubenswrapper[4869]: I1001 15:15:40.991475 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"reloader\" (UniqueName: \"kubernetes.io/empty-dir/38150466-9f4a-4ae1-a5b5-bfca202b829f-reloader\") pod \"frr-k8s-vrp9g\" (UID: \"38150466-9f4a-4ae1-a5b5-bfca202b829f\") " pod="metallb-system/frr-k8s-vrp9g" Oct 01 15:15:40 crc kubenswrapper[4869]: I1001 15:15:40.991501 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/5ff28534-d014-499c-82cb-ffe31d55e7a1-cert\") pod \"frr-k8s-webhook-server-5478bdb765-t9fnk\" (UID: \"5ff28534-d014-499c-82cb-ffe31d55e7a1\") " pod="metallb-system/frr-k8s-webhook-server-5478bdb765-t9fnk" Oct 01 15:15:40 crc kubenswrapper[4869]: I1001 15:15:40.991868 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics\" (UniqueName: \"kubernetes.io/empty-dir/38150466-9f4a-4ae1-a5b5-bfca202b829f-metrics\") pod \"frr-k8s-vrp9g\" (UID: \"38150466-9f4a-4ae1-a5b5-bfca202b829f\") " pod="metallb-system/frr-k8s-vrp9g" Oct 01 15:15:40 crc kubenswrapper[4869]: I1001 15:15:40.992042 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"frr-sockets\" (UniqueName: \"kubernetes.io/empty-dir/38150466-9f4a-4ae1-a5b5-bfca202b829f-frr-sockets\") pod \"frr-k8s-vrp9g\" (UID: \"38150466-9f4a-4ae1-a5b5-bfca202b829f\") " pod="metallb-system/frr-k8s-vrp9g" Oct 01 15:15:40 crc kubenswrapper[4869]: I1001 15:15:40.992783 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"frr-startup\" (UniqueName: \"kubernetes.io/configmap/38150466-9f4a-4ae1-a5b5-bfca202b829f-frr-startup\") pod \"frr-k8s-vrp9g\" (UID: \"38150466-9f4a-4ae1-a5b5-bfca202b829f\") " pod="metallb-system/frr-k8s-vrp9g" Oct 01 15:15:40 crc kubenswrapper[4869]: I1001 15:15:40.992785 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"frr-conf\" (UniqueName: \"kubernetes.io/empty-dir/38150466-9f4a-4ae1-a5b5-bfca202b829f-frr-conf\") pod \"frr-k8s-vrp9g\" (UID: \"38150466-9f4a-4ae1-a5b5-bfca202b829f\") " pod="metallb-system/frr-k8s-vrp9g" Oct 01 15:15:40 crc kubenswrapper[4869]: E1001 15:15:40.992874 4869 secret.go:188] Couldn't get secret metallb-system/frr-k8s-certs-secret: secret "frr-k8s-certs-secret" not found Oct 01 15:15:40 crc kubenswrapper[4869]: E1001 15:15:40.992919 4869 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/38150466-9f4a-4ae1-a5b5-bfca202b829f-metrics-certs podName:38150466-9f4a-4ae1-a5b5-bfca202b829f nodeName:}" failed. No retries permitted until 2025-10-01 15:15:41.492902636 +0000 UTC m=+650.639745752 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/38150466-9f4a-4ae1-a5b5-bfca202b829f-metrics-certs") pod "frr-k8s-vrp9g" (UID: "38150466-9f4a-4ae1-a5b5-bfca202b829f") : secret "frr-k8s-certs-secret" not found Oct 01 15:15:40 crc kubenswrapper[4869]: I1001 15:15:40.992964 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"reloader\" (UniqueName: \"kubernetes.io/empty-dir/38150466-9f4a-4ae1-a5b5-bfca202b829f-reloader\") pod \"frr-k8s-vrp9g\" (UID: \"38150466-9f4a-4ae1-a5b5-bfca202b829f\") " pod="metallb-system/frr-k8s-vrp9g" Oct 01 15:15:41 crc kubenswrapper[4869]: I1001 15:15:41.019845 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6w4n4\" (UniqueName: \"kubernetes.io/projected/38150466-9f4a-4ae1-a5b5-bfca202b829f-kube-api-access-6w4n4\") pod \"frr-k8s-vrp9g\" (UID: \"38150466-9f4a-4ae1-a5b5-bfca202b829f\") " pod="metallb-system/frr-k8s-vrp9g" Oct 01 15:15:41 crc kubenswrapper[4869]: I1001 15:15:41.093216 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/74ce9afe-754c-4610-90f1-a9c42b2cd395-memberlist\") pod \"speaker-kntp6\" (UID: \"74ce9afe-754c-4610-90f1-a9c42b2cd395\") " pod="metallb-system/speaker-kntp6" Oct 01 15:15:41 crc kubenswrapper[4869]: I1001 15:15:41.093307 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/74ce9afe-754c-4610-90f1-a9c42b2cd395-metrics-certs\") pod \"speaker-kntp6\" (UID: \"74ce9afe-754c-4610-90f1-a9c42b2cd395\") " pod="metallb-system/speaker-kntp6" Oct 01 15:15:41 crc kubenswrapper[4869]: I1001 15:15:41.093334 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lxmdv\" (UniqueName: \"kubernetes.io/projected/74ce9afe-754c-4610-90f1-a9c42b2cd395-kube-api-access-lxmdv\") pod \"speaker-kntp6\" (UID: \"74ce9afe-754c-4610-90f1-a9c42b2cd395\") " pod="metallb-system/speaker-kntp6" Oct 01 15:15:41 crc kubenswrapper[4869]: I1001 15:15:41.093372 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/b15f2099-55a9-4e22-a7da-a1b91fcd63db-cert\") pod \"controller-5d688f5ffc-jtdv4\" (UID: \"b15f2099-55a9-4e22-a7da-a1b91fcd63db\") " pod="metallb-system/controller-5d688f5ffc-jtdv4" Oct 01 15:15:41 crc kubenswrapper[4869]: I1001 15:15:41.093390 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metallb-excludel2\" (UniqueName: \"kubernetes.io/configmap/74ce9afe-754c-4610-90f1-a9c42b2cd395-metallb-excludel2\") pod \"speaker-kntp6\" (UID: \"74ce9afe-754c-4610-90f1-a9c42b2cd395\") " pod="metallb-system/speaker-kntp6" Oct 01 15:15:41 crc kubenswrapper[4869]: I1001 15:15:41.093415 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/b15f2099-55a9-4e22-a7da-a1b91fcd63db-metrics-certs\") pod \"controller-5d688f5ffc-jtdv4\" (UID: \"b15f2099-55a9-4e22-a7da-a1b91fcd63db\") " pod="metallb-system/controller-5d688f5ffc-jtdv4" Oct 01 15:15:41 crc kubenswrapper[4869]: E1001 15:15:41.093414 4869 secret.go:188] Couldn't get secret metallb-system/metallb-memberlist: secret "metallb-memberlist" not found Oct 01 15:15:41 crc kubenswrapper[4869]: I1001 15:15:41.093447 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-spzw9\" (UniqueName: \"kubernetes.io/projected/5ff28534-d014-499c-82cb-ffe31d55e7a1-kube-api-access-spzw9\") pod \"frr-k8s-webhook-server-5478bdb765-t9fnk\" (UID: \"5ff28534-d014-499c-82cb-ffe31d55e7a1\") " pod="metallb-system/frr-k8s-webhook-server-5478bdb765-t9fnk" Oct 01 15:15:41 crc kubenswrapper[4869]: E1001 15:15:41.093676 4869 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/74ce9afe-754c-4610-90f1-a9c42b2cd395-memberlist podName:74ce9afe-754c-4610-90f1-a9c42b2cd395 nodeName:}" failed. No retries permitted until 2025-10-01 15:15:41.59347178 +0000 UTC m=+650.740314896 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "memberlist" (UniqueName: "kubernetes.io/secret/74ce9afe-754c-4610-90f1-a9c42b2cd395-memberlist") pod "speaker-kntp6" (UID: "74ce9afe-754c-4610-90f1-a9c42b2cd395") : secret "metallb-memberlist" not found Oct 01 15:15:41 crc kubenswrapper[4869]: I1001 15:15:41.093707 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-m2t6d\" (UniqueName: \"kubernetes.io/projected/b15f2099-55a9-4e22-a7da-a1b91fcd63db-kube-api-access-m2t6d\") pod \"controller-5d688f5ffc-jtdv4\" (UID: \"b15f2099-55a9-4e22-a7da-a1b91fcd63db\") " pod="metallb-system/controller-5d688f5ffc-jtdv4" Oct 01 15:15:41 crc kubenswrapper[4869]: I1001 15:15:41.093801 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/5ff28534-d014-499c-82cb-ffe31d55e7a1-cert\") pod \"frr-k8s-webhook-server-5478bdb765-t9fnk\" (UID: \"5ff28534-d014-499c-82cb-ffe31d55e7a1\") " pod="metallb-system/frr-k8s-webhook-server-5478bdb765-t9fnk" Oct 01 15:15:41 crc kubenswrapper[4869]: I1001 15:15:41.094410 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metallb-excludel2\" (UniqueName: \"kubernetes.io/configmap/74ce9afe-754c-4610-90f1-a9c42b2cd395-metallb-excludel2\") pod \"speaker-kntp6\" (UID: \"74ce9afe-754c-4610-90f1-a9c42b2cd395\") " pod="metallb-system/speaker-kntp6" Oct 01 15:15:41 crc kubenswrapper[4869]: I1001 15:15:41.097638 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/74ce9afe-754c-4610-90f1-a9c42b2cd395-metrics-certs\") pod \"speaker-kntp6\" (UID: \"74ce9afe-754c-4610-90f1-a9c42b2cd395\") " pod="metallb-system/speaker-kntp6" Oct 01 15:15:41 crc kubenswrapper[4869]: I1001 15:15:41.099963 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/5ff28534-d014-499c-82cb-ffe31d55e7a1-cert\") pod \"frr-k8s-webhook-server-5478bdb765-t9fnk\" (UID: \"5ff28534-d014-499c-82cb-ffe31d55e7a1\") " pod="metallb-system/frr-k8s-webhook-server-5478bdb765-t9fnk" Oct 01 15:15:41 crc kubenswrapper[4869]: I1001 15:15:41.111000 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-spzw9\" (UniqueName: \"kubernetes.io/projected/5ff28534-d014-499c-82cb-ffe31d55e7a1-kube-api-access-spzw9\") pod \"frr-k8s-webhook-server-5478bdb765-t9fnk\" (UID: \"5ff28534-d014-499c-82cb-ffe31d55e7a1\") " pod="metallb-system/frr-k8s-webhook-server-5478bdb765-t9fnk" Oct 01 15:15:41 crc kubenswrapper[4869]: I1001 15:15:41.132563 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lxmdv\" (UniqueName: \"kubernetes.io/projected/74ce9afe-754c-4610-90f1-a9c42b2cd395-kube-api-access-lxmdv\") pod \"speaker-kntp6\" (UID: \"74ce9afe-754c-4610-90f1-a9c42b2cd395\") " pod="metallb-system/speaker-kntp6" Oct 01 15:15:41 crc kubenswrapper[4869]: I1001 15:15:41.176390 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/frr-k8s-webhook-server-5478bdb765-t9fnk" Oct 01 15:15:41 crc kubenswrapper[4869]: I1001 15:15:41.195235 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/b15f2099-55a9-4e22-a7da-a1b91fcd63db-metrics-certs\") pod \"controller-5d688f5ffc-jtdv4\" (UID: \"b15f2099-55a9-4e22-a7da-a1b91fcd63db\") " pod="metallb-system/controller-5d688f5ffc-jtdv4" Oct 01 15:15:41 crc kubenswrapper[4869]: I1001 15:15:41.195357 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-m2t6d\" (UniqueName: \"kubernetes.io/projected/b15f2099-55a9-4e22-a7da-a1b91fcd63db-kube-api-access-m2t6d\") pod \"controller-5d688f5ffc-jtdv4\" (UID: \"b15f2099-55a9-4e22-a7da-a1b91fcd63db\") " pod="metallb-system/controller-5d688f5ffc-jtdv4" Oct 01 15:15:41 crc kubenswrapper[4869]: I1001 15:15:41.195462 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/b15f2099-55a9-4e22-a7da-a1b91fcd63db-cert\") pod \"controller-5d688f5ffc-jtdv4\" (UID: \"b15f2099-55a9-4e22-a7da-a1b91fcd63db\") " pod="metallb-system/controller-5d688f5ffc-jtdv4" Oct 01 15:15:41 crc kubenswrapper[4869]: I1001 15:15:41.200314 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/b15f2099-55a9-4e22-a7da-a1b91fcd63db-cert\") pod \"controller-5d688f5ffc-jtdv4\" (UID: \"b15f2099-55a9-4e22-a7da-a1b91fcd63db\") " pod="metallb-system/controller-5d688f5ffc-jtdv4" Oct 01 15:15:41 crc kubenswrapper[4869]: I1001 15:15:41.200612 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/b15f2099-55a9-4e22-a7da-a1b91fcd63db-metrics-certs\") pod \"controller-5d688f5ffc-jtdv4\" (UID: \"b15f2099-55a9-4e22-a7da-a1b91fcd63db\") " pod="metallb-system/controller-5d688f5ffc-jtdv4" Oct 01 15:15:41 crc kubenswrapper[4869]: I1001 15:15:41.213320 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-m2t6d\" (UniqueName: \"kubernetes.io/projected/b15f2099-55a9-4e22-a7da-a1b91fcd63db-kube-api-access-m2t6d\") pod \"controller-5d688f5ffc-jtdv4\" (UID: \"b15f2099-55a9-4e22-a7da-a1b91fcd63db\") " pod="metallb-system/controller-5d688f5ffc-jtdv4" Oct 01 15:15:41 crc kubenswrapper[4869]: I1001 15:15:41.271365 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/controller-5d688f5ffc-jtdv4" Oct 01 15:15:41 crc kubenswrapper[4869]: I1001 15:15:41.499877 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/38150466-9f4a-4ae1-a5b5-bfca202b829f-metrics-certs\") pod \"frr-k8s-vrp9g\" (UID: \"38150466-9f4a-4ae1-a5b5-bfca202b829f\") " pod="metallb-system/frr-k8s-vrp9g" Oct 01 15:15:41 crc kubenswrapper[4869]: I1001 15:15:41.513097 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/38150466-9f4a-4ae1-a5b5-bfca202b829f-metrics-certs\") pod \"frr-k8s-vrp9g\" (UID: \"38150466-9f4a-4ae1-a5b5-bfca202b829f\") " pod="metallb-system/frr-k8s-vrp9g" Oct 01 15:15:41 crc kubenswrapper[4869]: I1001 15:15:41.601886 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/74ce9afe-754c-4610-90f1-a9c42b2cd395-memberlist\") pod \"speaker-kntp6\" (UID: \"74ce9afe-754c-4610-90f1-a9c42b2cd395\") " pod="metallb-system/speaker-kntp6" Oct 01 15:15:41 crc kubenswrapper[4869]: E1001 15:15:41.602039 4869 secret.go:188] Couldn't get secret metallb-system/metallb-memberlist: secret "metallb-memberlist" not found Oct 01 15:15:41 crc kubenswrapper[4869]: E1001 15:15:41.602089 4869 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/74ce9afe-754c-4610-90f1-a9c42b2cd395-memberlist podName:74ce9afe-754c-4610-90f1-a9c42b2cd395 nodeName:}" failed. No retries permitted until 2025-10-01 15:15:42.602073417 +0000 UTC m=+651.748916533 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "memberlist" (UniqueName: "kubernetes.io/secret/74ce9afe-754c-4610-90f1-a9c42b2cd395-memberlist") pod "speaker-kntp6" (UID: "74ce9afe-754c-4610-90f1-a9c42b2cd395") : secret "metallb-memberlist" not found Oct 01 15:15:41 crc kubenswrapper[4869]: I1001 15:15:41.604161 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/controller-5d688f5ffc-jtdv4"] Oct 01 15:15:41 crc kubenswrapper[4869]: W1001 15:15:41.607069 4869 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podb15f2099_55a9_4e22_a7da_a1b91fcd63db.slice/crio-57b0659526a2f247f38e6ff45bb6cdd83f6615c423ab6e5beb2820433d6fc6eb WatchSource:0}: Error finding container 57b0659526a2f247f38e6ff45bb6cdd83f6615c423ab6e5beb2820433d6fc6eb: Status 404 returned error can't find the container with id 57b0659526a2f247f38e6ff45bb6cdd83f6615c423ab6e5beb2820433d6fc6eb Oct 01 15:15:41 crc kubenswrapper[4869]: I1001 15:15:41.657138 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/frr-k8s-webhook-server-5478bdb765-t9fnk"] Oct 01 15:15:41 crc kubenswrapper[4869]: W1001 15:15:41.673281 4869 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod5ff28534_d014_499c_82cb_ffe31d55e7a1.slice/crio-33db778453772d84b486f5d86fe6d7df6f48ca41bdc2b9d1f8da310740965867 WatchSource:0}: Error finding container 33db778453772d84b486f5d86fe6d7df6f48ca41bdc2b9d1f8da310740965867: Status 404 returned error can't find the container with id 33db778453772d84b486f5d86fe6d7df6f48ca41bdc2b9d1f8da310740965867 Oct 01 15:15:41 crc kubenswrapper[4869]: I1001 15:15:41.763948 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/frr-k8s-vrp9g" Oct 01 15:15:41 crc kubenswrapper[4869]: I1001 15:15:41.850758 4869 dynamic_cafile_content.go:123] "Loaded a new CA Bundle and Verifier" name="client-ca-bundle::/etc/kubernetes/kubelet-ca.crt" Oct 01 15:15:42 crc kubenswrapper[4869]: I1001 15:15:42.494585 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-vrp9g" event={"ID":"38150466-9f4a-4ae1-a5b5-bfca202b829f","Type":"ContainerStarted","Data":"e859d261b906f2e4cb0aa85e666e6facffa874d1c8bb264bcbe50e6895c89dc2"} Oct 01 15:15:42 crc kubenswrapper[4869]: I1001 15:15:42.495407 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-webhook-server-5478bdb765-t9fnk" event={"ID":"5ff28534-d014-499c-82cb-ffe31d55e7a1","Type":"ContainerStarted","Data":"33db778453772d84b486f5d86fe6d7df6f48ca41bdc2b9d1f8da310740965867"} Oct 01 15:15:42 crc kubenswrapper[4869]: I1001 15:15:42.497581 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/controller-5d688f5ffc-jtdv4" event={"ID":"b15f2099-55a9-4e22-a7da-a1b91fcd63db","Type":"ContainerStarted","Data":"916d4e78082a466f9783987d107bb8d9a642ba201d705b86f0fa616f3fc9d79b"} Oct 01 15:15:42 crc kubenswrapper[4869]: I1001 15:15:42.497607 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/controller-5d688f5ffc-jtdv4" event={"ID":"b15f2099-55a9-4e22-a7da-a1b91fcd63db","Type":"ContainerStarted","Data":"f0e28e051f6a02338e2ccd35331422158adabea194661b5e4e3040620360e253"} Oct 01 15:15:42 crc kubenswrapper[4869]: I1001 15:15:42.497619 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/controller-5d688f5ffc-jtdv4" event={"ID":"b15f2099-55a9-4e22-a7da-a1b91fcd63db","Type":"ContainerStarted","Data":"57b0659526a2f247f38e6ff45bb6cdd83f6615c423ab6e5beb2820433d6fc6eb"} Oct 01 15:15:42 crc kubenswrapper[4869]: I1001 15:15:42.497749 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/controller-5d688f5ffc-jtdv4" Oct 01 15:15:42 crc kubenswrapper[4869]: I1001 15:15:42.525163 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/controller-5d688f5ffc-jtdv4" podStartSLOduration=2.525146645 podStartE2EDuration="2.525146645s" podCreationTimestamp="2025-10-01 15:15:40 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-01 15:15:42.520751766 +0000 UTC m=+651.667594882" watchObservedRunningTime="2025-10-01 15:15:42.525146645 +0000 UTC m=+651.671989761" Oct 01 15:15:42 crc kubenswrapper[4869]: I1001 15:15:42.616764 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/74ce9afe-754c-4610-90f1-a9c42b2cd395-memberlist\") pod \"speaker-kntp6\" (UID: \"74ce9afe-754c-4610-90f1-a9c42b2cd395\") " pod="metallb-system/speaker-kntp6" Oct 01 15:15:42 crc kubenswrapper[4869]: I1001 15:15:42.626078 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/74ce9afe-754c-4610-90f1-a9c42b2cd395-memberlist\") pod \"speaker-kntp6\" (UID: \"74ce9afe-754c-4610-90f1-a9c42b2cd395\") " pod="metallb-system/speaker-kntp6" Oct 01 15:15:42 crc kubenswrapper[4869]: I1001 15:15:42.764480 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/speaker-kntp6" Oct 01 15:15:42 crc kubenswrapper[4869]: W1001 15:15:42.796704 4869 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod74ce9afe_754c_4610_90f1_a9c42b2cd395.slice/crio-65b13b20d25a8874e9b459dcb15d8ab3d51c4719224b1721f2ae5e090ed01c64 WatchSource:0}: Error finding container 65b13b20d25a8874e9b459dcb15d8ab3d51c4719224b1721f2ae5e090ed01c64: Status 404 returned error can't find the container with id 65b13b20d25a8874e9b459dcb15d8ab3d51c4719224b1721f2ae5e090ed01c64 Oct 01 15:15:43 crc kubenswrapper[4869]: I1001 15:15:43.520010 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/speaker-kntp6" event={"ID":"74ce9afe-754c-4610-90f1-a9c42b2cd395","Type":"ContainerStarted","Data":"97a0af7433489a0858c912899c72c07ca77fd506d778954fd6dd112e4f076228"} Oct 01 15:15:43 crc kubenswrapper[4869]: I1001 15:15:43.520408 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/speaker-kntp6" event={"ID":"74ce9afe-754c-4610-90f1-a9c42b2cd395","Type":"ContainerStarted","Data":"48e854ae42ed5b5b19b9f5df29806b8cbdf94b2e1afe36b217cdc8ffa92eaa7e"} Oct 01 15:15:43 crc kubenswrapper[4869]: I1001 15:15:43.520430 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/speaker-kntp6" event={"ID":"74ce9afe-754c-4610-90f1-a9c42b2cd395","Type":"ContainerStarted","Data":"65b13b20d25a8874e9b459dcb15d8ab3d51c4719224b1721f2ae5e090ed01c64"} Oct 01 15:15:43 crc kubenswrapper[4869]: I1001 15:15:43.520660 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/speaker-kntp6" Oct 01 15:15:43 crc kubenswrapper[4869]: I1001 15:15:43.553194 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/speaker-kntp6" podStartSLOduration=3.553174377 podStartE2EDuration="3.553174377s" podCreationTimestamp="2025-10-01 15:15:40 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-01 15:15:43.551875115 +0000 UTC m=+652.698718231" watchObservedRunningTime="2025-10-01 15:15:43.553174377 +0000 UTC m=+652.700017493" Oct 01 15:15:49 crc kubenswrapper[4869]: I1001 15:15:49.561307 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-webhook-server-5478bdb765-t9fnk" event={"ID":"5ff28534-d014-499c-82cb-ffe31d55e7a1","Type":"ContainerStarted","Data":"1095f8dab0f9106a8f9d7073f841cbfcef247a736cc00e4657f56be21feddbd7"} Oct 01 15:15:49 crc kubenswrapper[4869]: I1001 15:15:49.561822 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/frr-k8s-webhook-server-5478bdb765-t9fnk" Oct 01 15:15:49 crc kubenswrapper[4869]: I1001 15:15:49.564406 4869 generic.go:334] "Generic (PLEG): container finished" podID="38150466-9f4a-4ae1-a5b5-bfca202b829f" containerID="2c66249e632201212d06fd305de1c369bfe7a8eb299a38579b01785ea1fb6157" exitCode=0 Oct 01 15:15:49 crc kubenswrapper[4869]: I1001 15:15:49.564550 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-vrp9g" event={"ID":"38150466-9f4a-4ae1-a5b5-bfca202b829f","Type":"ContainerDied","Data":"2c66249e632201212d06fd305de1c369bfe7a8eb299a38579b01785ea1fb6157"} Oct 01 15:15:49 crc kubenswrapper[4869]: I1001 15:15:49.581391 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/frr-k8s-webhook-server-5478bdb765-t9fnk" podStartSLOduration=2.419052064 podStartE2EDuration="9.581373915s" podCreationTimestamp="2025-10-01 15:15:40 +0000 UTC" firstStartedPulling="2025-10-01 15:15:41.676322439 +0000 UTC m=+650.823165555" lastFinishedPulling="2025-10-01 15:15:48.83864429 +0000 UTC m=+657.985487406" observedRunningTime="2025-10-01 15:15:49.579094149 +0000 UTC m=+658.725937275" watchObservedRunningTime="2025-10-01 15:15:49.581373915 +0000 UTC m=+658.728217031" Oct 01 15:15:50 crc kubenswrapper[4869]: I1001 15:15:50.575103 4869 generic.go:334] "Generic (PLEG): container finished" podID="38150466-9f4a-4ae1-a5b5-bfca202b829f" containerID="61737b4bf50fd855eb59de0e5a68e27ef4c598b4ddbe64764455fb568926bc52" exitCode=0 Oct 01 15:15:50 crc kubenswrapper[4869]: I1001 15:15:50.575192 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-vrp9g" event={"ID":"38150466-9f4a-4ae1-a5b5-bfca202b829f","Type":"ContainerDied","Data":"61737b4bf50fd855eb59de0e5a68e27ef4c598b4ddbe64764455fb568926bc52"} Oct 01 15:15:51 crc kubenswrapper[4869]: I1001 15:15:51.278389 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/controller-5d688f5ffc-jtdv4" Oct 01 15:15:51 crc kubenswrapper[4869]: I1001 15:15:51.584341 4869 generic.go:334] "Generic (PLEG): container finished" podID="38150466-9f4a-4ae1-a5b5-bfca202b829f" containerID="0dd43c15e6404f8ea5d0b070b46184b12204e5be20f1f08405f36af12286da07" exitCode=0 Oct 01 15:15:51 crc kubenswrapper[4869]: I1001 15:15:51.595886 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-vrp9g" event={"ID":"38150466-9f4a-4ae1-a5b5-bfca202b829f","Type":"ContainerDied","Data":"0dd43c15e6404f8ea5d0b070b46184b12204e5be20f1f08405f36af12286da07"} Oct 01 15:15:52 crc kubenswrapper[4869]: I1001 15:15:52.592502 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-vrp9g" event={"ID":"38150466-9f4a-4ae1-a5b5-bfca202b829f","Type":"ContainerStarted","Data":"69b77abd8f39d740a581372658860145f35f34a8eacd6dccecd5de6817c73d1b"} Oct 01 15:15:52 crc kubenswrapper[4869]: I1001 15:15:52.593705 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/frr-k8s-vrp9g" Oct 01 15:15:52 crc kubenswrapper[4869]: I1001 15:15:52.593794 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-vrp9g" event={"ID":"38150466-9f4a-4ae1-a5b5-bfca202b829f","Type":"ContainerStarted","Data":"18fca2b1d4ce900b868746591ae6a63b53c11f8873e7f20c13e90a1ba65b66ff"} Oct 01 15:15:52 crc kubenswrapper[4869]: I1001 15:15:52.593871 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-vrp9g" event={"ID":"38150466-9f4a-4ae1-a5b5-bfca202b829f","Type":"ContainerStarted","Data":"d47376f0b0900e8d989fb702b9a922fb8d94fbadd197b208d10756adf7d9f781"} Oct 01 15:15:52 crc kubenswrapper[4869]: I1001 15:15:52.593927 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-vrp9g" event={"ID":"38150466-9f4a-4ae1-a5b5-bfca202b829f","Type":"ContainerStarted","Data":"b4637c37a19c42553fdba44eab27cdf16f2371202c7d1aa5006cea06065a1d37"} Oct 01 15:15:52 crc kubenswrapper[4869]: I1001 15:15:52.593980 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-vrp9g" event={"ID":"38150466-9f4a-4ae1-a5b5-bfca202b829f","Type":"ContainerStarted","Data":"7183574049d68257b43b0d7565c6d99799de35132e86d9a4f83fcd75701bb77c"} Oct 01 15:15:52 crc kubenswrapper[4869]: I1001 15:15:52.594041 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-vrp9g" event={"ID":"38150466-9f4a-4ae1-a5b5-bfca202b829f","Type":"ContainerStarted","Data":"7ea00b63fc78e883cd02ad429492496e6b43bbca90d812745be1b98ea1507b04"} Oct 01 15:15:52 crc kubenswrapper[4869]: I1001 15:15:52.620567 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/frr-k8s-vrp9g" podStartSLOduration=5.63662728 podStartE2EDuration="12.620548466s" podCreationTimestamp="2025-10-01 15:15:40 +0000 UTC" firstStartedPulling="2025-10-01 15:15:41.877730925 +0000 UTC m=+651.024574031" lastFinishedPulling="2025-10-01 15:15:48.861652101 +0000 UTC m=+658.008495217" observedRunningTime="2025-10-01 15:15:52.616859575 +0000 UTC m=+661.763702691" watchObservedRunningTime="2025-10-01 15:15:52.620548466 +0000 UTC m=+661.767391592" Oct 01 15:15:56 crc kubenswrapper[4869]: I1001 15:15:56.765914 4869 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="metallb-system/frr-k8s-vrp9g" Oct 01 15:15:56 crc kubenswrapper[4869]: I1001 15:15:56.833356 4869 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="metallb-system/frr-k8s-vrp9g" Oct 01 15:16:01 crc kubenswrapper[4869]: I1001 15:16:01.182765 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/frr-k8s-webhook-server-5478bdb765-t9fnk" Oct 01 15:16:01 crc kubenswrapper[4869]: I1001 15:16:01.769107 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/frr-k8s-vrp9g" Oct 01 15:16:02 crc kubenswrapper[4869]: I1001 15:16:02.771345 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/speaker-kntp6" Oct 01 15:16:09 crc kubenswrapper[4869]: I1001 15:16:09.534020 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-operator-index-t6m4z"] Oct 01 15:16:09 crc kubenswrapper[4869]: I1001 15:16:09.535888 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-t6m4z" Oct 01 15:16:09 crc kubenswrapper[4869]: I1001 15:16:09.544311 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-index-t6m4z"] Oct 01 15:16:09 crc kubenswrapper[4869]: I1001 15:16:09.549529 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-operator-index-dockercfg-2rq7v" Oct 01 15:16:09 crc kubenswrapper[4869]: I1001 15:16:09.550336 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-operators"/"kube-root-ca.crt" Oct 01 15:16:09 crc kubenswrapper[4869]: I1001 15:16:09.550899 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-operators"/"openshift-service-ca.crt" Oct 01 15:16:09 crc kubenswrapper[4869]: I1001 15:16:09.656089 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xwshc\" (UniqueName: \"kubernetes.io/projected/ec4e2c58-823d-4a10-a96e-5e4a1a8955f2-kube-api-access-xwshc\") pod \"openstack-operator-index-t6m4z\" (UID: \"ec4e2c58-823d-4a10-a96e-5e4a1a8955f2\") " pod="openstack-operators/openstack-operator-index-t6m4z" Oct 01 15:16:09 crc kubenswrapper[4869]: I1001 15:16:09.757286 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xwshc\" (UniqueName: \"kubernetes.io/projected/ec4e2c58-823d-4a10-a96e-5e4a1a8955f2-kube-api-access-xwshc\") pod \"openstack-operator-index-t6m4z\" (UID: \"ec4e2c58-823d-4a10-a96e-5e4a1a8955f2\") " pod="openstack-operators/openstack-operator-index-t6m4z" Oct 01 15:16:09 crc kubenswrapper[4869]: I1001 15:16:09.779909 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xwshc\" (UniqueName: \"kubernetes.io/projected/ec4e2c58-823d-4a10-a96e-5e4a1a8955f2-kube-api-access-xwshc\") pod \"openstack-operator-index-t6m4z\" (UID: \"ec4e2c58-823d-4a10-a96e-5e4a1a8955f2\") " pod="openstack-operators/openstack-operator-index-t6m4z" Oct 01 15:16:09 crc kubenswrapper[4869]: I1001 15:16:09.873332 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-t6m4z" Oct 01 15:16:10 crc kubenswrapper[4869]: I1001 15:16:10.325773 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-index-t6m4z"] Oct 01 15:16:10 crc kubenswrapper[4869]: I1001 15:16:10.718957 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-t6m4z" event={"ID":"ec4e2c58-823d-4a10-a96e-5e4a1a8955f2","Type":"ContainerStarted","Data":"7154dcf9a085580de8441523cc3722bed6ba1729ed800a0e8799b76a36895985"} Oct 01 15:16:11 crc kubenswrapper[4869]: I1001 15:16:11.732876 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-t6m4z" event={"ID":"ec4e2c58-823d-4a10-a96e-5e4a1a8955f2","Type":"ContainerStarted","Data":"8410f482476c8c76279b1df559de19144b9e8f46cf625995622b0e64e16ed87b"} Oct 01 15:16:11 crc kubenswrapper[4869]: I1001 15:16:11.748986 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-operator-index-t6m4z" podStartSLOduration=1.857232314 podStartE2EDuration="2.748961245s" podCreationTimestamp="2025-10-01 15:16:09 +0000 UTC" firstStartedPulling="2025-10-01 15:16:10.339923571 +0000 UTC m=+679.486766687" lastFinishedPulling="2025-10-01 15:16:11.231652492 +0000 UTC m=+680.378495618" observedRunningTime="2025-10-01 15:16:11.747853947 +0000 UTC m=+680.894697103" watchObservedRunningTime="2025-10-01 15:16:11.748961245 +0000 UTC m=+680.895804401" Oct 01 15:16:13 crc kubenswrapper[4869]: I1001 15:16:13.354873 4869 patch_prober.go:28] interesting pod/machine-config-daemon-c86m8 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 01 15:16:13 crc kubenswrapper[4869]: I1001 15:16:13.355526 4869 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-c86m8" podUID="a4b64f7f-0b03-4f47-965b-9fde048b735c" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 01 15:16:18 crc kubenswrapper[4869]: I1001 15:16:18.135972 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-5lh7f"] Oct 01 15:16:18 crc kubenswrapper[4869]: I1001 15:16:18.137349 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-5lh7f" Oct 01 15:16:18 crc kubenswrapper[4869]: I1001 15:16:18.167168 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-5lh7f"] Oct 01 15:16:18 crc kubenswrapper[4869]: I1001 15:16:18.271409 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4e36ed2d-92da-4de9-94bb-43d810ded2c9-catalog-content\") pod \"certified-operators-5lh7f\" (UID: \"4e36ed2d-92da-4de9-94bb-43d810ded2c9\") " pod="openshift-marketplace/certified-operators-5lh7f" Oct 01 15:16:18 crc kubenswrapper[4869]: I1001 15:16:18.271476 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ttbrv\" (UniqueName: \"kubernetes.io/projected/4e36ed2d-92da-4de9-94bb-43d810ded2c9-kube-api-access-ttbrv\") pod \"certified-operators-5lh7f\" (UID: \"4e36ed2d-92da-4de9-94bb-43d810ded2c9\") " pod="openshift-marketplace/certified-operators-5lh7f" Oct 01 15:16:18 crc kubenswrapper[4869]: I1001 15:16:18.271707 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4e36ed2d-92da-4de9-94bb-43d810ded2c9-utilities\") pod \"certified-operators-5lh7f\" (UID: \"4e36ed2d-92da-4de9-94bb-43d810ded2c9\") " pod="openshift-marketplace/certified-operators-5lh7f" Oct 01 15:16:18 crc kubenswrapper[4869]: I1001 15:16:18.373056 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4e36ed2d-92da-4de9-94bb-43d810ded2c9-catalog-content\") pod \"certified-operators-5lh7f\" (UID: \"4e36ed2d-92da-4de9-94bb-43d810ded2c9\") " pod="openshift-marketplace/certified-operators-5lh7f" Oct 01 15:16:18 crc kubenswrapper[4869]: I1001 15:16:18.373118 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ttbrv\" (UniqueName: \"kubernetes.io/projected/4e36ed2d-92da-4de9-94bb-43d810ded2c9-kube-api-access-ttbrv\") pod \"certified-operators-5lh7f\" (UID: \"4e36ed2d-92da-4de9-94bb-43d810ded2c9\") " pod="openshift-marketplace/certified-operators-5lh7f" Oct 01 15:16:18 crc kubenswrapper[4869]: I1001 15:16:18.373180 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4e36ed2d-92da-4de9-94bb-43d810ded2c9-utilities\") pod \"certified-operators-5lh7f\" (UID: \"4e36ed2d-92da-4de9-94bb-43d810ded2c9\") " pod="openshift-marketplace/certified-operators-5lh7f" Oct 01 15:16:18 crc kubenswrapper[4869]: I1001 15:16:18.373756 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4e36ed2d-92da-4de9-94bb-43d810ded2c9-catalog-content\") pod \"certified-operators-5lh7f\" (UID: \"4e36ed2d-92da-4de9-94bb-43d810ded2c9\") " pod="openshift-marketplace/certified-operators-5lh7f" Oct 01 15:16:18 crc kubenswrapper[4869]: I1001 15:16:18.373860 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4e36ed2d-92da-4de9-94bb-43d810ded2c9-utilities\") pod \"certified-operators-5lh7f\" (UID: \"4e36ed2d-92da-4de9-94bb-43d810ded2c9\") " pod="openshift-marketplace/certified-operators-5lh7f" Oct 01 15:16:18 crc kubenswrapper[4869]: I1001 15:16:18.394366 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ttbrv\" (UniqueName: \"kubernetes.io/projected/4e36ed2d-92da-4de9-94bb-43d810ded2c9-kube-api-access-ttbrv\") pod \"certified-operators-5lh7f\" (UID: \"4e36ed2d-92da-4de9-94bb-43d810ded2c9\") " pod="openshift-marketplace/certified-operators-5lh7f" Oct 01 15:16:18 crc kubenswrapper[4869]: I1001 15:16:18.459658 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-5lh7f" Oct 01 15:16:18 crc kubenswrapper[4869]: I1001 15:16:18.899711 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-5lh7f"] Oct 01 15:16:19 crc kubenswrapper[4869]: I1001 15:16:19.783770 4869 generic.go:334] "Generic (PLEG): container finished" podID="4e36ed2d-92da-4de9-94bb-43d810ded2c9" containerID="99b763c346005a96efa040397d177d2c3f3e897050d9e97c0714f9e52e6e87ac" exitCode=0 Oct 01 15:16:19 crc kubenswrapper[4869]: I1001 15:16:19.783867 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-5lh7f" event={"ID":"4e36ed2d-92da-4de9-94bb-43d810ded2c9","Type":"ContainerDied","Data":"99b763c346005a96efa040397d177d2c3f3e897050d9e97c0714f9e52e6e87ac"} Oct 01 15:16:19 crc kubenswrapper[4869]: I1001 15:16:19.784238 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-5lh7f" event={"ID":"4e36ed2d-92da-4de9-94bb-43d810ded2c9","Type":"ContainerStarted","Data":"f0089822774a5b8fc3cb2c9a125949b71be367605a063d7bcbfa91ba1de52105"} Oct 01 15:16:19 crc kubenswrapper[4869]: I1001 15:16:19.873676 4869 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-operators/openstack-operator-index-t6m4z" Oct 01 15:16:19 crc kubenswrapper[4869]: I1001 15:16:19.873748 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/openstack-operator-index-t6m4z" Oct 01 15:16:19 crc kubenswrapper[4869]: I1001 15:16:19.936371 4869 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-operators/openstack-operator-index-t6m4z" Oct 01 15:16:20 crc kubenswrapper[4869]: I1001 15:16:20.795321 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-5lh7f" event={"ID":"4e36ed2d-92da-4de9-94bb-43d810ded2c9","Type":"ContainerStarted","Data":"159e2a75b3a2ea71ccd5830d9a099fa720e9ee902027d656206c080cec30adf3"} Oct 01 15:16:20 crc kubenswrapper[4869]: I1001 15:16:20.828909 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/openstack-operator-index-t6m4z" Oct 01 15:16:21 crc kubenswrapper[4869]: I1001 15:16:21.802681 4869 generic.go:334] "Generic (PLEG): container finished" podID="4e36ed2d-92da-4de9-94bb-43d810ded2c9" containerID="159e2a75b3a2ea71ccd5830d9a099fa720e9ee902027d656206c080cec30adf3" exitCode=0 Oct 01 15:16:21 crc kubenswrapper[4869]: I1001 15:16:21.802771 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-5lh7f" event={"ID":"4e36ed2d-92da-4de9-94bb-43d810ded2c9","Type":"ContainerDied","Data":"159e2a75b3a2ea71ccd5830d9a099fa720e9ee902027d656206c080cec30adf3"} Oct 01 15:16:22 crc kubenswrapper[4869]: I1001 15:16:22.813588 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-5lh7f" event={"ID":"4e36ed2d-92da-4de9-94bb-43d810ded2c9","Type":"ContainerStarted","Data":"c474bbfc684b2bb23c62fd3df9a960b17e02fe588be03ef6a555da9a6c439c41"} Oct 01 15:16:22 crc kubenswrapper[4869]: I1001 15:16:22.835849 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-5lh7f" podStartSLOduration=2.190832758 podStartE2EDuration="4.83580726s" podCreationTimestamp="2025-10-01 15:16:18 +0000 UTC" firstStartedPulling="2025-10-01 15:16:19.78660034 +0000 UTC m=+688.933443496" lastFinishedPulling="2025-10-01 15:16:22.431574842 +0000 UTC m=+691.578417998" observedRunningTime="2025-10-01 15:16:22.833725808 +0000 UTC m=+691.980568924" watchObservedRunningTime="2025-10-01 15:16:22.83580726 +0000 UTC m=+691.982650376" Oct 01 15:16:27 crc kubenswrapper[4869]: I1001 15:16:27.355008 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/0e56acba034a4fe5e21c87878f7cabfd0ead2befdef111b141ca11c71a867s4"] Oct 01 15:16:27 crc kubenswrapper[4869]: I1001 15:16:27.356428 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/0e56acba034a4fe5e21c87878f7cabfd0ead2befdef111b141ca11c71a867s4" Oct 01 15:16:27 crc kubenswrapper[4869]: I1001 15:16:27.360346 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"default-dockercfg-bdfc4" Oct 01 15:16:27 crc kubenswrapper[4869]: I1001 15:16:27.365744 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/0e56acba034a4fe5e21c87878f7cabfd0ead2befdef111b141ca11c71a867s4"] Oct 01 15:16:27 crc kubenswrapper[4869]: I1001 15:16:27.503414 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-d6746\" (UniqueName: \"kubernetes.io/projected/e6a47fc5-70fe-4006-a6a2-bca9667eec47-kube-api-access-d6746\") pod \"0e56acba034a4fe5e21c87878f7cabfd0ead2befdef111b141ca11c71a867s4\" (UID: \"e6a47fc5-70fe-4006-a6a2-bca9667eec47\") " pod="openstack-operators/0e56acba034a4fe5e21c87878f7cabfd0ead2befdef111b141ca11c71a867s4" Oct 01 15:16:27 crc kubenswrapper[4869]: I1001 15:16:27.503463 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/e6a47fc5-70fe-4006-a6a2-bca9667eec47-util\") pod \"0e56acba034a4fe5e21c87878f7cabfd0ead2befdef111b141ca11c71a867s4\" (UID: \"e6a47fc5-70fe-4006-a6a2-bca9667eec47\") " pod="openstack-operators/0e56acba034a4fe5e21c87878f7cabfd0ead2befdef111b141ca11c71a867s4" Oct 01 15:16:27 crc kubenswrapper[4869]: I1001 15:16:27.503518 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/e6a47fc5-70fe-4006-a6a2-bca9667eec47-bundle\") pod \"0e56acba034a4fe5e21c87878f7cabfd0ead2befdef111b141ca11c71a867s4\" (UID: \"e6a47fc5-70fe-4006-a6a2-bca9667eec47\") " pod="openstack-operators/0e56acba034a4fe5e21c87878f7cabfd0ead2befdef111b141ca11c71a867s4" Oct 01 15:16:27 crc kubenswrapper[4869]: I1001 15:16:27.605302 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/e6a47fc5-70fe-4006-a6a2-bca9667eec47-bundle\") pod \"0e56acba034a4fe5e21c87878f7cabfd0ead2befdef111b141ca11c71a867s4\" (UID: \"e6a47fc5-70fe-4006-a6a2-bca9667eec47\") " pod="openstack-operators/0e56acba034a4fe5e21c87878f7cabfd0ead2befdef111b141ca11c71a867s4" Oct 01 15:16:27 crc kubenswrapper[4869]: I1001 15:16:27.605410 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-d6746\" (UniqueName: \"kubernetes.io/projected/e6a47fc5-70fe-4006-a6a2-bca9667eec47-kube-api-access-d6746\") pod \"0e56acba034a4fe5e21c87878f7cabfd0ead2befdef111b141ca11c71a867s4\" (UID: \"e6a47fc5-70fe-4006-a6a2-bca9667eec47\") " pod="openstack-operators/0e56acba034a4fe5e21c87878f7cabfd0ead2befdef111b141ca11c71a867s4" Oct 01 15:16:27 crc kubenswrapper[4869]: I1001 15:16:27.605445 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/e6a47fc5-70fe-4006-a6a2-bca9667eec47-util\") pod \"0e56acba034a4fe5e21c87878f7cabfd0ead2befdef111b141ca11c71a867s4\" (UID: \"e6a47fc5-70fe-4006-a6a2-bca9667eec47\") " pod="openstack-operators/0e56acba034a4fe5e21c87878f7cabfd0ead2befdef111b141ca11c71a867s4" Oct 01 15:16:27 crc kubenswrapper[4869]: I1001 15:16:27.605987 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/e6a47fc5-70fe-4006-a6a2-bca9667eec47-util\") pod \"0e56acba034a4fe5e21c87878f7cabfd0ead2befdef111b141ca11c71a867s4\" (UID: \"e6a47fc5-70fe-4006-a6a2-bca9667eec47\") " pod="openstack-operators/0e56acba034a4fe5e21c87878f7cabfd0ead2befdef111b141ca11c71a867s4" Oct 01 15:16:27 crc kubenswrapper[4869]: I1001 15:16:27.607090 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/e6a47fc5-70fe-4006-a6a2-bca9667eec47-bundle\") pod \"0e56acba034a4fe5e21c87878f7cabfd0ead2befdef111b141ca11c71a867s4\" (UID: \"e6a47fc5-70fe-4006-a6a2-bca9667eec47\") " pod="openstack-operators/0e56acba034a4fe5e21c87878f7cabfd0ead2befdef111b141ca11c71a867s4" Oct 01 15:16:27 crc kubenswrapper[4869]: I1001 15:16:27.634347 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-d6746\" (UniqueName: \"kubernetes.io/projected/e6a47fc5-70fe-4006-a6a2-bca9667eec47-kube-api-access-d6746\") pod \"0e56acba034a4fe5e21c87878f7cabfd0ead2befdef111b141ca11c71a867s4\" (UID: \"e6a47fc5-70fe-4006-a6a2-bca9667eec47\") " pod="openstack-operators/0e56acba034a4fe5e21c87878f7cabfd0ead2befdef111b141ca11c71a867s4" Oct 01 15:16:27 crc kubenswrapper[4869]: I1001 15:16:27.671970 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/0e56acba034a4fe5e21c87878f7cabfd0ead2befdef111b141ca11c71a867s4" Oct 01 15:16:27 crc kubenswrapper[4869]: I1001 15:16:27.954104 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/0e56acba034a4fe5e21c87878f7cabfd0ead2befdef111b141ca11c71a867s4"] Oct 01 15:16:28 crc kubenswrapper[4869]: I1001 15:16:28.459822 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-5lh7f" Oct 01 15:16:28 crc kubenswrapper[4869]: I1001 15:16:28.460345 4869 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-5lh7f" Oct 01 15:16:28 crc kubenswrapper[4869]: I1001 15:16:28.537737 4869 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-5lh7f" Oct 01 15:16:28 crc kubenswrapper[4869]: I1001 15:16:28.860546 4869 generic.go:334] "Generic (PLEG): container finished" podID="e6a47fc5-70fe-4006-a6a2-bca9667eec47" containerID="c30ed3d85fe54865ae08e5f334f0963c3973efd00443f7fdf2dc01e79f59545c" exitCode=0 Oct 01 15:16:28 crc kubenswrapper[4869]: I1001 15:16:28.860640 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/0e56acba034a4fe5e21c87878f7cabfd0ead2befdef111b141ca11c71a867s4" event={"ID":"e6a47fc5-70fe-4006-a6a2-bca9667eec47","Type":"ContainerDied","Data":"c30ed3d85fe54865ae08e5f334f0963c3973efd00443f7fdf2dc01e79f59545c"} Oct 01 15:16:28 crc kubenswrapper[4869]: I1001 15:16:28.860712 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/0e56acba034a4fe5e21c87878f7cabfd0ead2befdef111b141ca11c71a867s4" event={"ID":"e6a47fc5-70fe-4006-a6a2-bca9667eec47","Type":"ContainerStarted","Data":"0d7b781bd43ce98d52edbc825413716358fe7f43715b4a9ac680ebc518a81bcf"} Oct 01 15:16:28 crc kubenswrapper[4869]: I1001 15:16:28.914282 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-5lh7f" Oct 01 15:16:30 crc kubenswrapper[4869]: I1001 15:16:30.879363 4869 generic.go:334] "Generic (PLEG): container finished" podID="e6a47fc5-70fe-4006-a6a2-bca9667eec47" containerID="be5bcab26763f1a3c0f141b457cdf175051f26fe0b933e03abf685568a7cc05d" exitCode=0 Oct 01 15:16:30 crc kubenswrapper[4869]: I1001 15:16:30.879431 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/0e56acba034a4fe5e21c87878f7cabfd0ead2befdef111b141ca11c71a867s4" event={"ID":"e6a47fc5-70fe-4006-a6a2-bca9667eec47","Type":"ContainerDied","Data":"be5bcab26763f1a3c0f141b457cdf175051f26fe0b933e03abf685568a7cc05d"} Oct 01 15:16:31 crc kubenswrapper[4869]: I1001 15:16:31.903225 4869 generic.go:334] "Generic (PLEG): container finished" podID="e6a47fc5-70fe-4006-a6a2-bca9667eec47" containerID="74f80861e5d34356a10479f74b0016e143a08b25ab05588ce7aa896940c74ce0" exitCode=0 Oct 01 15:16:31 crc kubenswrapper[4869]: I1001 15:16:31.903322 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/0e56acba034a4fe5e21c87878f7cabfd0ead2befdef111b141ca11c71a867s4" event={"ID":"e6a47fc5-70fe-4006-a6a2-bca9667eec47","Type":"ContainerDied","Data":"74f80861e5d34356a10479f74b0016e143a08b25ab05588ce7aa896940c74ce0"} Oct 01 15:16:32 crc kubenswrapper[4869]: I1001 15:16:32.926168 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-5lh7f"] Oct 01 15:16:32 crc kubenswrapper[4869]: I1001 15:16:32.938561 4869 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-5lh7f" podUID="4e36ed2d-92da-4de9-94bb-43d810ded2c9" containerName="registry-server" containerID="cri-o://c474bbfc684b2bb23c62fd3df9a960b17e02fe588be03ef6a555da9a6c439c41" gracePeriod=2 Oct 01 15:16:33 crc kubenswrapper[4869]: I1001 15:16:33.225968 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/0e56acba034a4fe5e21c87878f7cabfd0ead2befdef111b141ca11c71a867s4" Oct 01 15:16:33 crc kubenswrapper[4869]: I1001 15:16:33.294647 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/e6a47fc5-70fe-4006-a6a2-bca9667eec47-bundle\") pod \"e6a47fc5-70fe-4006-a6a2-bca9667eec47\" (UID: \"e6a47fc5-70fe-4006-a6a2-bca9667eec47\") " Oct 01 15:16:33 crc kubenswrapper[4869]: I1001 15:16:33.294753 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-d6746\" (UniqueName: \"kubernetes.io/projected/e6a47fc5-70fe-4006-a6a2-bca9667eec47-kube-api-access-d6746\") pod \"e6a47fc5-70fe-4006-a6a2-bca9667eec47\" (UID: \"e6a47fc5-70fe-4006-a6a2-bca9667eec47\") " Oct 01 15:16:33 crc kubenswrapper[4869]: I1001 15:16:33.294809 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/e6a47fc5-70fe-4006-a6a2-bca9667eec47-util\") pod \"e6a47fc5-70fe-4006-a6a2-bca9667eec47\" (UID: \"e6a47fc5-70fe-4006-a6a2-bca9667eec47\") " Oct 01 15:16:33 crc kubenswrapper[4869]: I1001 15:16:33.295614 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e6a47fc5-70fe-4006-a6a2-bca9667eec47-bundle" (OuterVolumeSpecName: "bundle") pod "e6a47fc5-70fe-4006-a6a2-bca9667eec47" (UID: "e6a47fc5-70fe-4006-a6a2-bca9667eec47"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 15:16:33 crc kubenswrapper[4869]: I1001 15:16:33.307355 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e6a47fc5-70fe-4006-a6a2-bca9667eec47-util" (OuterVolumeSpecName: "util") pod "e6a47fc5-70fe-4006-a6a2-bca9667eec47" (UID: "e6a47fc5-70fe-4006-a6a2-bca9667eec47"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 15:16:33 crc kubenswrapper[4869]: I1001 15:16:33.309530 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e6a47fc5-70fe-4006-a6a2-bca9667eec47-kube-api-access-d6746" (OuterVolumeSpecName: "kube-api-access-d6746") pod "e6a47fc5-70fe-4006-a6a2-bca9667eec47" (UID: "e6a47fc5-70fe-4006-a6a2-bca9667eec47"). InnerVolumeSpecName "kube-api-access-d6746". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 15:16:33 crc kubenswrapper[4869]: I1001 15:16:33.362415 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-5lh7f" Oct 01 15:16:33 crc kubenswrapper[4869]: I1001 15:16:33.396442 4869 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/e6a47fc5-70fe-4006-a6a2-bca9667eec47-util\") on node \"crc\" DevicePath \"\"" Oct 01 15:16:33 crc kubenswrapper[4869]: I1001 15:16:33.396482 4869 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/e6a47fc5-70fe-4006-a6a2-bca9667eec47-bundle\") on node \"crc\" DevicePath \"\"" Oct 01 15:16:33 crc kubenswrapper[4869]: I1001 15:16:33.396495 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-d6746\" (UniqueName: \"kubernetes.io/projected/e6a47fc5-70fe-4006-a6a2-bca9667eec47-kube-api-access-d6746\") on node \"crc\" DevicePath \"\"" Oct 01 15:16:33 crc kubenswrapper[4869]: I1001 15:16:33.497859 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4e36ed2d-92da-4de9-94bb-43d810ded2c9-utilities\") pod \"4e36ed2d-92da-4de9-94bb-43d810ded2c9\" (UID: \"4e36ed2d-92da-4de9-94bb-43d810ded2c9\") " Oct 01 15:16:33 crc kubenswrapper[4869]: I1001 15:16:33.497943 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4e36ed2d-92da-4de9-94bb-43d810ded2c9-catalog-content\") pod \"4e36ed2d-92da-4de9-94bb-43d810ded2c9\" (UID: \"4e36ed2d-92da-4de9-94bb-43d810ded2c9\") " Oct 01 15:16:33 crc kubenswrapper[4869]: I1001 15:16:33.498039 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ttbrv\" (UniqueName: \"kubernetes.io/projected/4e36ed2d-92da-4de9-94bb-43d810ded2c9-kube-api-access-ttbrv\") pod \"4e36ed2d-92da-4de9-94bb-43d810ded2c9\" (UID: \"4e36ed2d-92da-4de9-94bb-43d810ded2c9\") " Oct 01 15:16:33 crc kubenswrapper[4869]: I1001 15:16:33.498916 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4e36ed2d-92da-4de9-94bb-43d810ded2c9-utilities" (OuterVolumeSpecName: "utilities") pod "4e36ed2d-92da-4de9-94bb-43d810ded2c9" (UID: "4e36ed2d-92da-4de9-94bb-43d810ded2c9"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 15:16:33 crc kubenswrapper[4869]: I1001 15:16:33.500872 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4e36ed2d-92da-4de9-94bb-43d810ded2c9-kube-api-access-ttbrv" (OuterVolumeSpecName: "kube-api-access-ttbrv") pod "4e36ed2d-92da-4de9-94bb-43d810ded2c9" (UID: "4e36ed2d-92da-4de9-94bb-43d810ded2c9"). InnerVolumeSpecName "kube-api-access-ttbrv". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 15:16:33 crc kubenswrapper[4869]: I1001 15:16:33.551805 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4e36ed2d-92da-4de9-94bb-43d810ded2c9-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "4e36ed2d-92da-4de9-94bb-43d810ded2c9" (UID: "4e36ed2d-92da-4de9-94bb-43d810ded2c9"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 15:16:33 crc kubenswrapper[4869]: I1001 15:16:33.600142 4869 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4e36ed2d-92da-4de9-94bb-43d810ded2c9-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 01 15:16:33 crc kubenswrapper[4869]: I1001 15:16:33.600208 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ttbrv\" (UniqueName: \"kubernetes.io/projected/4e36ed2d-92da-4de9-94bb-43d810ded2c9-kube-api-access-ttbrv\") on node \"crc\" DevicePath \"\"" Oct 01 15:16:33 crc kubenswrapper[4869]: I1001 15:16:33.600295 4869 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4e36ed2d-92da-4de9-94bb-43d810ded2c9-utilities\") on node \"crc\" DevicePath \"\"" Oct 01 15:16:33 crc kubenswrapper[4869]: I1001 15:16:33.924744 4869 generic.go:334] "Generic (PLEG): container finished" podID="4e36ed2d-92da-4de9-94bb-43d810ded2c9" containerID="c474bbfc684b2bb23c62fd3df9a960b17e02fe588be03ef6a555da9a6c439c41" exitCode=0 Oct 01 15:16:33 crc kubenswrapper[4869]: I1001 15:16:33.924860 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-5lh7f" Oct 01 15:16:33 crc kubenswrapper[4869]: I1001 15:16:33.924879 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-5lh7f" event={"ID":"4e36ed2d-92da-4de9-94bb-43d810ded2c9","Type":"ContainerDied","Data":"c474bbfc684b2bb23c62fd3df9a960b17e02fe588be03ef6a555da9a6c439c41"} Oct 01 15:16:33 crc kubenswrapper[4869]: I1001 15:16:33.924944 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-5lh7f" event={"ID":"4e36ed2d-92da-4de9-94bb-43d810ded2c9","Type":"ContainerDied","Data":"f0089822774a5b8fc3cb2c9a125949b71be367605a063d7bcbfa91ba1de52105"} Oct 01 15:16:33 crc kubenswrapper[4869]: I1001 15:16:33.924976 4869 scope.go:117] "RemoveContainer" containerID="c474bbfc684b2bb23c62fd3df9a960b17e02fe588be03ef6a555da9a6c439c41" Oct 01 15:16:33 crc kubenswrapper[4869]: I1001 15:16:33.928897 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/0e56acba034a4fe5e21c87878f7cabfd0ead2befdef111b141ca11c71a867s4" event={"ID":"e6a47fc5-70fe-4006-a6a2-bca9667eec47","Type":"ContainerDied","Data":"0d7b781bd43ce98d52edbc825413716358fe7f43715b4a9ac680ebc518a81bcf"} Oct 01 15:16:33 crc kubenswrapper[4869]: I1001 15:16:33.928968 4869 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="0d7b781bd43ce98d52edbc825413716358fe7f43715b4a9ac680ebc518a81bcf" Oct 01 15:16:33 crc kubenswrapper[4869]: I1001 15:16:33.929076 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/0e56acba034a4fe5e21c87878f7cabfd0ead2befdef111b141ca11c71a867s4" Oct 01 15:16:33 crc kubenswrapper[4869]: I1001 15:16:33.942764 4869 scope.go:117] "RemoveContainer" containerID="159e2a75b3a2ea71ccd5830d9a099fa720e9ee902027d656206c080cec30adf3" Oct 01 15:16:33 crc kubenswrapper[4869]: I1001 15:16:33.948881 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-5lh7f"] Oct 01 15:16:33 crc kubenswrapper[4869]: I1001 15:16:33.962123 4869 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-5lh7f"] Oct 01 15:16:33 crc kubenswrapper[4869]: I1001 15:16:33.963533 4869 scope.go:117] "RemoveContainer" containerID="99b763c346005a96efa040397d177d2c3f3e897050d9e97c0714f9e52e6e87ac" Oct 01 15:16:33 crc kubenswrapper[4869]: I1001 15:16:33.984317 4869 scope.go:117] "RemoveContainer" containerID="c474bbfc684b2bb23c62fd3df9a960b17e02fe588be03ef6a555da9a6c439c41" Oct 01 15:16:33 crc kubenswrapper[4869]: E1001 15:16:33.984863 4869 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c474bbfc684b2bb23c62fd3df9a960b17e02fe588be03ef6a555da9a6c439c41\": container with ID starting with c474bbfc684b2bb23c62fd3df9a960b17e02fe588be03ef6a555da9a6c439c41 not found: ID does not exist" containerID="c474bbfc684b2bb23c62fd3df9a960b17e02fe588be03ef6a555da9a6c439c41" Oct 01 15:16:33 crc kubenswrapper[4869]: I1001 15:16:33.984941 4869 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c474bbfc684b2bb23c62fd3df9a960b17e02fe588be03ef6a555da9a6c439c41"} err="failed to get container status \"c474bbfc684b2bb23c62fd3df9a960b17e02fe588be03ef6a555da9a6c439c41\": rpc error: code = NotFound desc = could not find container \"c474bbfc684b2bb23c62fd3df9a960b17e02fe588be03ef6a555da9a6c439c41\": container with ID starting with c474bbfc684b2bb23c62fd3df9a960b17e02fe588be03ef6a555da9a6c439c41 not found: ID does not exist" Oct 01 15:16:33 crc kubenswrapper[4869]: I1001 15:16:33.984994 4869 scope.go:117] "RemoveContainer" containerID="159e2a75b3a2ea71ccd5830d9a099fa720e9ee902027d656206c080cec30adf3" Oct 01 15:16:33 crc kubenswrapper[4869]: E1001 15:16:33.985461 4869 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"159e2a75b3a2ea71ccd5830d9a099fa720e9ee902027d656206c080cec30adf3\": container with ID starting with 159e2a75b3a2ea71ccd5830d9a099fa720e9ee902027d656206c080cec30adf3 not found: ID does not exist" containerID="159e2a75b3a2ea71ccd5830d9a099fa720e9ee902027d656206c080cec30adf3" Oct 01 15:16:33 crc kubenswrapper[4869]: I1001 15:16:33.985508 4869 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"159e2a75b3a2ea71ccd5830d9a099fa720e9ee902027d656206c080cec30adf3"} err="failed to get container status \"159e2a75b3a2ea71ccd5830d9a099fa720e9ee902027d656206c080cec30adf3\": rpc error: code = NotFound desc = could not find container \"159e2a75b3a2ea71ccd5830d9a099fa720e9ee902027d656206c080cec30adf3\": container with ID starting with 159e2a75b3a2ea71ccd5830d9a099fa720e9ee902027d656206c080cec30adf3 not found: ID does not exist" Oct 01 15:16:33 crc kubenswrapper[4869]: I1001 15:16:33.985548 4869 scope.go:117] "RemoveContainer" containerID="99b763c346005a96efa040397d177d2c3f3e897050d9e97c0714f9e52e6e87ac" Oct 01 15:16:33 crc kubenswrapper[4869]: E1001 15:16:33.986206 4869 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"99b763c346005a96efa040397d177d2c3f3e897050d9e97c0714f9e52e6e87ac\": container with ID starting with 99b763c346005a96efa040397d177d2c3f3e897050d9e97c0714f9e52e6e87ac not found: ID does not exist" containerID="99b763c346005a96efa040397d177d2c3f3e897050d9e97c0714f9e52e6e87ac" Oct 01 15:16:33 crc kubenswrapper[4869]: I1001 15:16:33.986371 4869 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"99b763c346005a96efa040397d177d2c3f3e897050d9e97c0714f9e52e6e87ac"} err="failed to get container status \"99b763c346005a96efa040397d177d2c3f3e897050d9e97c0714f9e52e6e87ac\": rpc error: code = NotFound desc = could not find container \"99b763c346005a96efa040397d177d2c3f3e897050d9e97c0714f9e52e6e87ac\": container with ID starting with 99b763c346005a96efa040397d177d2c3f3e897050d9e97c0714f9e52e6e87ac not found: ID does not exist" Oct 01 15:16:35 crc kubenswrapper[4869]: I1001 15:16:35.596732 4869 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4e36ed2d-92da-4de9-94bb-43d810ded2c9" path="/var/lib/kubelet/pods/4e36ed2d-92da-4de9-94bb-43d810ded2c9/volumes" Oct 01 15:16:39 crc kubenswrapper[4869]: I1001 15:16:39.343718 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-operator-controller-operator-676c66f88b-8r9wr"] Oct 01 15:16:39 crc kubenswrapper[4869]: E1001 15:16:39.344192 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4e36ed2d-92da-4de9-94bb-43d810ded2c9" containerName="extract-content" Oct 01 15:16:39 crc kubenswrapper[4869]: I1001 15:16:39.344206 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="4e36ed2d-92da-4de9-94bb-43d810ded2c9" containerName="extract-content" Oct 01 15:16:39 crc kubenswrapper[4869]: E1001 15:16:39.344216 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4e36ed2d-92da-4de9-94bb-43d810ded2c9" containerName="registry-server" Oct 01 15:16:39 crc kubenswrapper[4869]: I1001 15:16:39.344224 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="4e36ed2d-92da-4de9-94bb-43d810ded2c9" containerName="registry-server" Oct 01 15:16:39 crc kubenswrapper[4869]: E1001 15:16:39.344236 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4e36ed2d-92da-4de9-94bb-43d810ded2c9" containerName="extract-utilities" Oct 01 15:16:39 crc kubenswrapper[4869]: I1001 15:16:39.344245 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="4e36ed2d-92da-4de9-94bb-43d810ded2c9" containerName="extract-utilities" Oct 01 15:16:39 crc kubenswrapper[4869]: E1001 15:16:39.344277 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e6a47fc5-70fe-4006-a6a2-bca9667eec47" containerName="util" Oct 01 15:16:39 crc kubenswrapper[4869]: I1001 15:16:39.344285 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="e6a47fc5-70fe-4006-a6a2-bca9667eec47" containerName="util" Oct 01 15:16:39 crc kubenswrapper[4869]: E1001 15:16:39.344295 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e6a47fc5-70fe-4006-a6a2-bca9667eec47" containerName="pull" Oct 01 15:16:39 crc kubenswrapper[4869]: I1001 15:16:39.344303 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="e6a47fc5-70fe-4006-a6a2-bca9667eec47" containerName="pull" Oct 01 15:16:39 crc kubenswrapper[4869]: E1001 15:16:39.344318 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e6a47fc5-70fe-4006-a6a2-bca9667eec47" containerName="extract" Oct 01 15:16:39 crc kubenswrapper[4869]: I1001 15:16:39.344325 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="e6a47fc5-70fe-4006-a6a2-bca9667eec47" containerName="extract" Oct 01 15:16:39 crc kubenswrapper[4869]: I1001 15:16:39.344455 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="e6a47fc5-70fe-4006-a6a2-bca9667eec47" containerName="extract" Oct 01 15:16:39 crc kubenswrapper[4869]: I1001 15:16:39.344467 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="4e36ed2d-92da-4de9-94bb-43d810ded2c9" containerName="registry-server" Oct 01 15:16:39 crc kubenswrapper[4869]: I1001 15:16:39.345155 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-controller-operator-676c66f88b-8r9wr" Oct 01 15:16:39 crc kubenswrapper[4869]: I1001 15:16:39.347719 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-operator-controller-operator-dockercfg-n8lg5" Oct 01 15:16:39 crc kubenswrapper[4869]: I1001 15:16:39.388386 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-controller-operator-676c66f88b-8r9wr"] Oct 01 15:16:39 crc kubenswrapper[4869]: I1001 15:16:39.482591 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fcxw4\" (UniqueName: \"kubernetes.io/projected/c0f1c013-9664-4846-8576-8a9a26c64dad-kube-api-access-fcxw4\") pod \"openstack-operator-controller-operator-676c66f88b-8r9wr\" (UID: \"c0f1c013-9664-4846-8576-8a9a26c64dad\") " pod="openstack-operators/openstack-operator-controller-operator-676c66f88b-8r9wr" Oct 01 15:16:39 crc kubenswrapper[4869]: I1001 15:16:39.584115 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fcxw4\" (UniqueName: \"kubernetes.io/projected/c0f1c013-9664-4846-8576-8a9a26c64dad-kube-api-access-fcxw4\") pod \"openstack-operator-controller-operator-676c66f88b-8r9wr\" (UID: \"c0f1c013-9664-4846-8576-8a9a26c64dad\") " pod="openstack-operators/openstack-operator-controller-operator-676c66f88b-8r9wr" Oct 01 15:16:39 crc kubenswrapper[4869]: I1001 15:16:39.624293 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fcxw4\" (UniqueName: \"kubernetes.io/projected/c0f1c013-9664-4846-8576-8a9a26c64dad-kube-api-access-fcxw4\") pod \"openstack-operator-controller-operator-676c66f88b-8r9wr\" (UID: \"c0f1c013-9664-4846-8576-8a9a26c64dad\") " pod="openstack-operators/openstack-operator-controller-operator-676c66f88b-8r9wr" Oct 01 15:16:39 crc kubenswrapper[4869]: I1001 15:16:39.668369 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-controller-operator-676c66f88b-8r9wr" Oct 01 15:16:40 crc kubenswrapper[4869]: I1001 15:16:40.188085 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-controller-operator-676c66f88b-8r9wr"] Oct 01 15:16:40 crc kubenswrapper[4869]: I1001 15:16:40.980509 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-operator-676c66f88b-8r9wr" event={"ID":"c0f1c013-9664-4846-8576-8a9a26c64dad","Type":"ContainerStarted","Data":"75d18cebc5fc55fa6678aa4a2e427a29310e97a211d329ed8f5c78d0fa0da8e3"} Oct 01 15:16:43 crc kubenswrapper[4869]: I1001 15:16:43.354907 4869 patch_prober.go:28] interesting pod/machine-config-daemon-c86m8 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 01 15:16:43 crc kubenswrapper[4869]: I1001 15:16:43.355326 4869 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-c86m8" podUID="a4b64f7f-0b03-4f47-965b-9fde048b735c" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 01 15:16:45 crc kubenswrapper[4869]: I1001 15:16:45.013251 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-operator-676c66f88b-8r9wr" event={"ID":"c0f1c013-9664-4846-8576-8a9a26c64dad","Type":"ContainerStarted","Data":"a5d46c0d49426f951873a559d933817635be5f5aab8c5e38d1894f6e0f69a661"} Oct 01 15:16:47 crc kubenswrapper[4869]: I1001 15:16:47.026562 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-operator-676c66f88b-8r9wr" event={"ID":"c0f1c013-9664-4846-8576-8a9a26c64dad","Type":"ContainerStarted","Data":"08443d0df1fb1c49c0a76691ad8df71c97795e8a18eb345aef12035fac6fb39f"} Oct 01 15:16:47 crc kubenswrapper[4869]: I1001 15:16:47.026951 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/openstack-operator-controller-operator-676c66f88b-8r9wr" Oct 01 15:16:47 crc kubenswrapper[4869]: I1001 15:16:47.076403 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-operator-controller-operator-676c66f88b-8r9wr" podStartSLOduration=1.411380225 podStartE2EDuration="8.076381562s" podCreationTimestamp="2025-10-01 15:16:39 +0000 UTC" firstStartedPulling="2025-10-01 15:16:40.193450261 +0000 UTC m=+709.340293387" lastFinishedPulling="2025-10-01 15:16:46.858451608 +0000 UTC m=+716.005294724" observedRunningTime="2025-10-01 15:16:47.071295144 +0000 UTC m=+716.218138290" watchObservedRunningTime="2025-10-01 15:16:47.076381562 +0000 UTC m=+716.223224698" Oct 01 15:16:49 crc kubenswrapper[4869]: I1001 15:16:49.670954 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/openstack-operator-controller-operator-676c66f88b-8r9wr" Oct 01 15:16:52 crc kubenswrapper[4869]: I1001 15:16:52.328162 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-5pmns"] Oct 01 15:16:52 crc kubenswrapper[4869]: I1001 15:16:52.329836 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-5pmns" Oct 01 15:16:52 crc kubenswrapper[4869]: I1001 15:16:52.345508 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-5pmns"] Oct 01 15:16:52 crc kubenswrapper[4869]: I1001 15:16:52.466364 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rkrzf\" (UniqueName: \"kubernetes.io/projected/8ff59bb9-995b-4688-8cb1-9b4b6dd1f9af-kube-api-access-rkrzf\") pod \"community-operators-5pmns\" (UID: \"8ff59bb9-995b-4688-8cb1-9b4b6dd1f9af\") " pod="openshift-marketplace/community-operators-5pmns" Oct 01 15:16:52 crc kubenswrapper[4869]: I1001 15:16:52.466452 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8ff59bb9-995b-4688-8cb1-9b4b6dd1f9af-utilities\") pod \"community-operators-5pmns\" (UID: \"8ff59bb9-995b-4688-8cb1-9b4b6dd1f9af\") " pod="openshift-marketplace/community-operators-5pmns" Oct 01 15:16:52 crc kubenswrapper[4869]: I1001 15:16:52.466480 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8ff59bb9-995b-4688-8cb1-9b4b6dd1f9af-catalog-content\") pod \"community-operators-5pmns\" (UID: \"8ff59bb9-995b-4688-8cb1-9b4b6dd1f9af\") " pod="openshift-marketplace/community-operators-5pmns" Oct 01 15:16:52 crc kubenswrapper[4869]: I1001 15:16:52.567353 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8ff59bb9-995b-4688-8cb1-9b4b6dd1f9af-utilities\") pod \"community-operators-5pmns\" (UID: \"8ff59bb9-995b-4688-8cb1-9b4b6dd1f9af\") " pod="openshift-marketplace/community-operators-5pmns" Oct 01 15:16:52 crc kubenswrapper[4869]: I1001 15:16:52.567414 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8ff59bb9-995b-4688-8cb1-9b4b6dd1f9af-catalog-content\") pod \"community-operators-5pmns\" (UID: \"8ff59bb9-995b-4688-8cb1-9b4b6dd1f9af\") " pod="openshift-marketplace/community-operators-5pmns" Oct 01 15:16:52 crc kubenswrapper[4869]: I1001 15:16:52.567464 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rkrzf\" (UniqueName: \"kubernetes.io/projected/8ff59bb9-995b-4688-8cb1-9b4b6dd1f9af-kube-api-access-rkrzf\") pod \"community-operators-5pmns\" (UID: \"8ff59bb9-995b-4688-8cb1-9b4b6dd1f9af\") " pod="openshift-marketplace/community-operators-5pmns" Oct 01 15:16:52 crc kubenswrapper[4869]: I1001 15:16:52.568023 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8ff59bb9-995b-4688-8cb1-9b4b6dd1f9af-utilities\") pod \"community-operators-5pmns\" (UID: \"8ff59bb9-995b-4688-8cb1-9b4b6dd1f9af\") " pod="openshift-marketplace/community-operators-5pmns" Oct 01 15:16:52 crc kubenswrapper[4869]: I1001 15:16:52.568034 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8ff59bb9-995b-4688-8cb1-9b4b6dd1f9af-catalog-content\") pod \"community-operators-5pmns\" (UID: \"8ff59bb9-995b-4688-8cb1-9b4b6dd1f9af\") " pod="openshift-marketplace/community-operators-5pmns" Oct 01 15:16:52 crc kubenswrapper[4869]: I1001 15:16:52.599213 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rkrzf\" (UniqueName: \"kubernetes.io/projected/8ff59bb9-995b-4688-8cb1-9b4b6dd1f9af-kube-api-access-rkrzf\") pod \"community-operators-5pmns\" (UID: \"8ff59bb9-995b-4688-8cb1-9b4b6dd1f9af\") " pod="openshift-marketplace/community-operators-5pmns" Oct 01 15:16:52 crc kubenswrapper[4869]: I1001 15:16:52.644234 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-5pmns" Oct 01 15:16:52 crc kubenswrapper[4869]: I1001 15:16:52.946081 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-5pmns"] Oct 01 15:16:53 crc kubenswrapper[4869]: I1001 15:16:53.069325 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-5pmns" event={"ID":"8ff59bb9-995b-4688-8cb1-9b4b6dd1f9af","Type":"ContainerStarted","Data":"5fbb77575e6c1baee1b3df516bfe59f57302f2125bb9b00842b3038f982ef1dc"} Oct 01 15:16:54 crc kubenswrapper[4869]: I1001 15:16:54.081356 4869 generic.go:334] "Generic (PLEG): container finished" podID="8ff59bb9-995b-4688-8cb1-9b4b6dd1f9af" containerID="96cc395c19c2c44ff83e5c026c23d0502c0f0569e24c7ff57eb2a0e4c7ab0119" exitCode=0 Oct 01 15:16:54 crc kubenswrapper[4869]: I1001 15:16:54.081421 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-5pmns" event={"ID":"8ff59bb9-995b-4688-8cb1-9b4b6dd1f9af","Type":"ContainerDied","Data":"96cc395c19c2c44ff83e5c026c23d0502c0f0569e24c7ff57eb2a0e4c7ab0119"} Oct 01 15:16:55 crc kubenswrapper[4869]: I1001 15:16:55.087696 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-5pmns" event={"ID":"8ff59bb9-995b-4688-8cb1-9b4b6dd1f9af","Type":"ContainerStarted","Data":"eb7ebf586615537d7e2793cc039f6ac0ff69f4e467da5cb898d948f04b6a8ed0"} Oct 01 15:16:56 crc kubenswrapper[4869]: I1001 15:16:56.099001 4869 generic.go:334] "Generic (PLEG): container finished" podID="8ff59bb9-995b-4688-8cb1-9b4b6dd1f9af" containerID="eb7ebf586615537d7e2793cc039f6ac0ff69f4e467da5cb898d948f04b6a8ed0" exitCode=0 Oct 01 15:16:56 crc kubenswrapper[4869]: I1001 15:16:56.099048 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-5pmns" event={"ID":"8ff59bb9-995b-4688-8cb1-9b4b6dd1f9af","Type":"ContainerDied","Data":"eb7ebf586615537d7e2793cc039f6ac0ff69f4e467da5cb898d948f04b6a8ed0"} Oct 01 15:16:57 crc kubenswrapper[4869]: I1001 15:16:57.107494 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-5pmns" event={"ID":"8ff59bb9-995b-4688-8cb1-9b4b6dd1f9af","Type":"ContainerStarted","Data":"3b1af0a0fda857016f8a0a21b02c15be652670d9d5ea6b4a9fcd167d39af7af9"} Oct 01 15:16:57 crc kubenswrapper[4869]: I1001 15:16:57.126236 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-5pmns" podStartSLOduration=2.653634261 podStartE2EDuration="5.12621425s" podCreationTimestamp="2025-10-01 15:16:52 +0000 UTC" firstStartedPulling="2025-10-01 15:16:54.083940292 +0000 UTC m=+723.230783448" lastFinishedPulling="2025-10-01 15:16:56.556520311 +0000 UTC m=+725.703363437" observedRunningTime="2025-10-01 15:16:57.124969499 +0000 UTC m=+726.271812645" watchObservedRunningTime="2025-10-01 15:16:57.12621425 +0000 UTC m=+726.273057396" Oct 01 15:16:59 crc kubenswrapper[4869]: I1001 15:16:59.931205 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-g948z"] Oct 01 15:16:59 crc kubenswrapper[4869]: I1001 15:16:59.932861 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-g948z" Oct 01 15:16:59 crc kubenswrapper[4869]: I1001 15:16:59.951195 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-g948z"] Oct 01 15:16:59 crc kubenswrapper[4869]: I1001 15:16:59.977380 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2b186174-6ddb-49f4-8b00-e7ca815d1fa4-utilities\") pod \"redhat-marketplace-g948z\" (UID: \"2b186174-6ddb-49f4-8b00-e7ca815d1fa4\") " pod="openshift-marketplace/redhat-marketplace-g948z" Oct 01 15:16:59 crc kubenswrapper[4869]: I1001 15:16:59.977434 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2b186174-6ddb-49f4-8b00-e7ca815d1fa4-catalog-content\") pod \"redhat-marketplace-g948z\" (UID: \"2b186174-6ddb-49f4-8b00-e7ca815d1fa4\") " pod="openshift-marketplace/redhat-marketplace-g948z" Oct 01 15:16:59 crc kubenswrapper[4869]: I1001 15:16:59.977465 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xff9l\" (UniqueName: \"kubernetes.io/projected/2b186174-6ddb-49f4-8b00-e7ca815d1fa4-kube-api-access-xff9l\") pod \"redhat-marketplace-g948z\" (UID: \"2b186174-6ddb-49f4-8b00-e7ca815d1fa4\") " pod="openshift-marketplace/redhat-marketplace-g948z" Oct 01 15:17:00 crc kubenswrapper[4869]: I1001 15:17:00.078503 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2b186174-6ddb-49f4-8b00-e7ca815d1fa4-catalog-content\") pod \"redhat-marketplace-g948z\" (UID: \"2b186174-6ddb-49f4-8b00-e7ca815d1fa4\") " pod="openshift-marketplace/redhat-marketplace-g948z" Oct 01 15:17:00 crc kubenswrapper[4869]: I1001 15:17:00.078900 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xff9l\" (UniqueName: \"kubernetes.io/projected/2b186174-6ddb-49f4-8b00-e7ca815d1fa4-kube-api-access-xff9l\") pod \"redhat-marketplace-g948z\" (UID: \"2b186174-6ddb-49f4-8b00-e7ca815d1fa4\") " pod="openshift-marketplace/redhat-marketplace-g948z" Oct 01 15:17:00 crc kubenswrapper[4869]: I1001 15:17:00.079211 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2b186174-6ddb-49f4-8b00-e7ca815d1fa4-catalog-content\") pod \"redhat-marketplace-g948z\" (UID: \"2b186174-6ddb-49f4-8b00-e7ca815d1fa4\") " pod="openshift-marketplace/redhat-marketplace-g948z" Oct 01 15:17:00 crc kubenswrapper[4869]: I1001 15:17:00.079442 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2b186174-6ddb-49f4-8b00-e7ca815d1fa4-utilities\") pod \"redhat-marketplace-g948z\" (UID: \"2b186174-6ddb-49f4-8b00-e7ca815d1fa4\") " pod="openshift-marketplace/redhat-marketplace-g948z" Oct 01 15:17:00 crc kubenswrapper[4869]: I1001 15:17:00.079854 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2b186174-6ddb-49f4-8b00-e7ca815d1fa4-utilities\") pod \"redhat-marketplace-g948z\" (UID: \"2b186174-6ddb-49f4-8b00-e7ca815d1fa4\") " pod="openshift-marketplace/redhat-marketplace-g948z" Oct 01 15:17:00 crc kubenswrapper[4869]: I1001 15:17:00.115555 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xff9l\" (UniqueName: \"kubernetes.io/projected/2b186174-6ddb-49f4-8b00-e7ca815d1fa4-kube-api-access-xff9l\") pod \"redhat-marketplace-g948z\" (UID: \"2b186174-6ddb-49f4-8b00-e7ca815d1fa4\") " pod="openshift-marketplace/redhat-marketplace-g948z" Oct 01 15:17:00 crc kubenswrapper[4869]: I1001 15:17:00.254819 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-g948z" Oct 01 15:17:00 crc kubenswrapper[4869]: I1001 15:17:00.688556 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-g948z"] Oct 01 15:17:01 crc kubenswrapper[4869]: I1001 15:17:01.137430 4869 generic.go:334] "Generic (PLEG): container finished" podID="2b186174-6ddb-49f4-8b00-e7ca815d1fa4" containerID="897e89cd6af26b16192ca04d3c467c60ef74e02d346e8e05fc752944e3147032" exitCode=0 Oct 01 15:17:01 crc kubenswrapper[4869]: I1001 15:17:01.137583 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-g948z" event={"ID":"2b186174-6ddb-49f4-8b00-e7ca815d1fa4","Type":"ContainerDied","Data":"897e89cd6af26b16192ca04d3c467c60ef74e02d346e8e05fc752944e3147032"} Oct 01 15:17:01 crc kubenswrapper[4869]: I1001 15:17:01.137730 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-g948z" event={"ID":"2b186174-6ddb-49f4-8b00-e7ca815d1fa4","Type":"ContainerStarted","Data":"d619d5a771e2764be9a143b1e8620f60805421512ca523b7eb601c6c3e6313b8"} Oct 01 15:17:02 crc kubenswrapper[4869]: I1001 15:17:02.645475 4869 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-5pmns" Oct 01 15:17:02 crc kubenswrapper[4869]: I1001 15:17:02.645804 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-5pmns" Oct 01 15:17:02 crc kubenswrapper[4869]: I1001 15:17:02.720328 4869 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-5pmns" Oct 01 15:17:03 crc kubenswrapper[4869]: I1001 15:17:03.155141 4869 generic.go:334] "Generic (PLEG): container finished" podID="2b186174-6ddb-49f4-8b00-e7ca815d1fa4" containerID="68b4e4ea7500e2af38957583b980621124f07ef5f4edf1f1af4aacdea7473205" exitCode=0 Oct 01 15:17:03 crc kubenswrapper[4869]: I1001 15:17:03.155214 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-g948z" event={"ID":"2b186174-6ddb-49f4-8b00-e7ca815d1fa4","Type":"ContainerDied","Data":"68b4e4ea7500e2af38957583b980621124f07ef5f4edf1f1af4aacdea7473205"} Oct 01 15:17:03 crc kubenswrapper[4869]: I1001 15:17:03.235255 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-5pmns" Oct 01 15:17:04 crc kubenswrapper[4869]: I1001 15:17:04.177240 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-g948z" event={"ID":"2b186174-6ddb-49f4-8b00-e7ca815d1fa4","Type":"ContainerStarted","Data":"f1e0771d95495784d28102d4036f024ca472c98d0785e3f4e02d0da6d60f90c7"} Oct 01 15:17:04 crc kubenswrapper[4869]: I1001 15:17:04.921982 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-g948z" podStartSLOduration=3.355969048 podStartE2EDuration="5.921961657s" podCreationTimestamp="2025-10-01 15:16:59 +0000 UTC" firstStartedPulling="2025-10-01 15:17:01.139003721 +0000 UTC m=+730.285846837" lastFinishedPulling="2025-10-01 15:17:03.70499629 +0000 UTC m=+732.851839446" observedRunningTime="2025-10-01 15:17:04.205540292 +0000 UTC m=+733.352383418" watchObservedRunningTime="2025-10-01 15:17:04.921961657 +0000 UTC m=+734.068804783" Oct 01 15:17:04 crc kubenswrapper[4869]: I1001 15:17:04.922154 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-5pmns"] Oct 01 15:17:05 crc kubenswrapper[4869]: I1001 15:17:05.184988 4869 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-5pmns" podUID="8ff59bb9-995b-4688-8cb1-9b4b6dd1f9af" containerName="registry-server" containerID="cri-o://3b1af0a0fda857016f8a0a21b02c15be652670d9d5ea6b4a9fcd167d39af7af9" gracePeriod=2 Oct 01 15:17:05 crc kubenswrapper[4869]: I1001 15:17:05.685392 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-5pmns" Oct 01 15:17:05 crc kubenswrapper[4869]: I1001 15:17:05.778644 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8ff59bb9-995b-4688-8cb1-9b4b6dd1f9af-catalog-content\") pod \"8ff59bb9-995b-4688-8cb1-9b4b6dd1f9af\" (UID: \"8ff59bb9-995b-4688-8cb1-9b4b6dd1f9af\") " Oct 01 15:17:05 crc kubenswrapper[4869]: I1001 15:17:05.778724 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rkrzf\" (UniqueName: \"kubernetes.io/projected/8ff59bb9-995b-4688-8cb1-9b4b6dd1f9af-kube-api-access-rkrzf\") pod \"8ff59bb9-995b-4688-8cb1-9b4b6dd1f9af\" (UID: \"8ff59bb9-995b-4688-8cb1-9b4b6dd1f9af\") " Oct 01 15:17:05 crc kubenswrapper[4869]: I1001 15:17:05.778789 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8ff59bb9-995b-4688-8cb1-9b4b6dd1f9af-utilities\") pod \"8ff59bb9-995b-4688-8cb1-9b4b6dd1f9af\" (UID: \"8ff59bb9-995b-4688-8cb1-9b4b6dd1f9af\") " Oct 01 15:17:05 crc kubenswrapper[4869]: I1001 15:17:05.779500 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8ff59bb9-995b-4688-8cb1-9b4b6dd1f9af-utilities" (OuterVolumeSpecName: "utilities") pod "8ff59bb9-995b-4688-8cb1-9b4b6dd1f9af" (UID: "8ff59bb9-995b-4688-8cb1-9b4b6dd1f9af"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 15:17:05 crc kubenswrapper[4869]: I1001 15:17:05.784194 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8ff59bb9-995b-4688-8cb1-9b4b6dd1f9af-kube-api-access-rkrzf" (OuterVolumeSpecName: "kube-api-access-rkrzf") pod "8ff59bb9-995b-4688-8cb1-9b4b6dd1f9af" (UID: "8ff59bb9-995b-4688-8cb1-9b4b6dd1f9af"). InnerVolumeSpecName "kube-api-access-rkrzf". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 15:17:05 crc kubenswrapper[4869]: I1001 15:17:05.880019 4869 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8ff59bb9-995b-4688-8cb1-9b4b6dd1f9af-utilities\") on node \"crc\" DevicePath \"\"" Oct 01 15:17:05 crc kubenswrapper[4869]: I1001 15:17:05.880229 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rkrzf\" (UniqueName: \"kubernetes.io/projected/8ff59bb9-995b-4688-8cb1-9b4b6dd1f9af-kube-api-access-rkrzf\") on node \"crc\" DevicePath \"\"" Oct 01 15:17:06 crc kubenswrapper[4869]: I1001 15:17:06.200399 4869 generic.go:334] "Generic (PLEG): container finished" podID="8ff59bb9-995b-4688-8cb1-9b4b6dd1f9af" containerID="3b1af0a0fda857016f8a0a21b02c15be652670d9d5ea6b4a9fcd167d39af7af9" exitCode=0 Oct 01 15:17:06 crc kubenswrapper[4869]: I1001 15:17:06.200491 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-5pmns" event={"ID":"8ff59bb9-995b-4688-8cb1-9b4b6dd1f9af","Type":"ContainerDied","Data":"3b1af0a0fda857016f8a0a21b02c15be652670d9d5ea6b4a9fcd167d39af7af9"} Oct 01 15:17:06 crc kubenswrapper[4869]: I1001 15:17:06.200535 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-5pmns" Oct 01 15:17:06 crc kubenswrapper[4869]: I1001 15:17:06.200554 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-5pmns" event={"ID":"8ff59bb9-995b-4688-8cb1-9b4b6dd1f9af","Type":"ContainerDied","Data":"5fbb77575e6c1baee1b3df516bfe59f57302f2125bb9b00842b3038f982ef1dc"} Oct 01 15:17:06 crc kubenswrapper[4869]: I1001 15:17:06.200596 4869 scope.go:117] "RemoveContainer" containerID="3b1af0a0fda857016f8a0a21b02c15be652670d9d5ea6b4a9fcd167d39af7af9" Oct 01 15:17:06 crc kubenswrapper[4869]: I1001 15:17:06.224483 4869 scope.go:117] "RemoveContainer" containerID="eb7ebf586615537d7e2793cc039f6ac0ff69f4e467da5cb898d948f04b6a8ed0" Oct 01 15:17:06 crc kubenswrapper[4869]: I1001 15:17:06.247763 4869 scope.go:117] "RemoveContainer" containerID="96cc395c19c2c44ff83e5c026c23d0502c0f0569e24c7ff57eb2a0e4c7ab0119" Oct 01 15:17:06 crc kubenswrapper[4869]: I1001 15:17:06.278806 4869 scope.go:117] "RemoveContainer" containerID="3b1af0a0fda857016f8a0a21b02c15be652670d9d5ea6b4a9fcd167d39af7af9" Oct 01 15:17:06 crc kubenswrapper[4869]: E1001 15:17:06.279246 4869 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3b1af0a0fda857016f8a0a21b02c15be652670d9d5ea6b4a9fcd167d39af7af9\": container with ID starting with 3b1af0a0fda857016f8a0a21b02c15be652670d9d5ea6b4a9fcd167d39af7af9 not found: ID does not exist" containerID="3b1af0a0fda857016f8a0a21b02c15be652670d9d5ea6b4a9fcd167d39af7af9" Oct 01 15:17:06 crc kubenswrapper[4869]: I1001 15:17:06.279302 4869 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3b1af0a0fda857016f8a0a21b02c15be652670d9d5ea6b4a9fcd167d39af7af9"} err="failed to get container status \"3b1af0a0fda857016f8a0a21b02c15be652670d9d5ea6b4a9fcd167d39af7af9\": rpc error: code = NotFound desc = could not find container \"3b1af0a0fda857016f8a0a21b02c15be652670d9d5ea6b4a9fcd167d39af7af9\": container with ID starting with 3b1af0a0fda857016f8a0a21b02c15be652670d9d5ea6b4a9fcd167d39af7af9 not found: ID does not exist" Oct 01 15:17:06 crc kubenswrapper[4869]: I1001 15:17:06.279329 4869 scope.go:117] "RemoveContainer" containerID="eb7ebf586615537d7e2793cc039f6ac0ff69f4e467da5cb898d948f04b6a8ed0" Oct 01 15:17:06 crc kubenswrapper[4869]: E1001 15:17:06.279861 4869 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"eb7ebf586615537d7e2793cc039f6ac0ff69f4e467da5cb898d948f04b6a8ed0\": container with ID starting with eb7ebf586615537d7e2793cc039f6ac0ff69f4e467da5cb898d948f04b6a8ed0 not found: ID does not exist" containerID="eb7ebf586615537d7e2793cc039f6ac0ff69f4e467da5cb898d948f04b6a8ed0" Oct 01 15:17:06 crc kubenswrapper[4869]: I1001 15:17:06.279944 4869 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"eb7ebf586615537d7e2793cc039f6ac0ff69f4e467da5cb898d948f04b6a8ed0"} err="failed to get container status \"eb7ebf586615537d7e2793cc039f6ac0ff69f4e467da5cb898d948f04b6a8ed0\": rpc error: code = NotFound desc = could not find container \"eb7ebf586615537d7e2793cc039f6ac0ff69f4e467da5cb898d948f04b6a8ed0\": container with ID starting with eb7ebf586615537d7e2793cc039f6ac0ff69f4e467da5cb898d948f04b6a8ed0 not found: ID does not exist" Oct 01 15:17:06 crc kubenswrapper[4869]: I1001 15:17:06.280014 4869 scope.go:117] "RemoveContainer" containerID="96cc395c19c2c44ff83e5c026c23d0502c0f0569e24c7ff57eb2a0e4c7ab0119" Oct 01 15:17:06 crc kubenswrapper[4869]: E1001 15:17:06.280354 4869 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"96cc395c19c2c44ff83e5c026c23d0502c0f0569e24c7ff57eb2a0e4c7ab0119\": container with ID starting with 96cc395c19c2c44ff83e5c026c23d0502c0f0569e24c7ff57eb2a0e4c7ab0119 not found: ID does not exist" containerID="96cc395c19c2c44ff83e5c026c23d0502c0f0569e24c7ff57eb2a0e4c7ab0119" Oct 01 15:17:06 crc kubenswrapper[4869]: I1001 15:17:06.280408 4869 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"96cc395c19c2c44ff83e5c026c23d0502c0f0569e24c7ff57eb2a0e4c7ab0119"} err="failed to get container status \"96cc395c19c2c44ff83e5c026c23d0502c0f0569e24c7ff57eb2a0e4c7ab0119\": rpc error: code = NotFound desc = could not find container \"96cc395c19c2c44ff83e5c026c23d0502c0f0569e24c7ff57eb2a0e4c7ab0119\": container with ID starting with 96cc395c19c2c44ff83e5c026c23d0502c0f0569e24c7ff57eb2a0e4c7ab0119 not found: ID does not exist" Oct 01 15:17:06 crc kubenswrapper[4869]: I1001 15:17:06.624286 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8ff59bb9-995b-4688-8cb1-9b4b6dd1f9af-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "8ff59bb9-995b-4688-8cb1-9b4b6dd1f9af" (UID: "8ff59bb9-995b-4688-8cb1-9b4b6dd1f9af"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 15:17:06 crc kubenswrapper[4869]: I1001 15:17:06.693594 4869 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8ff59bb9-995b-4688-8cb1-9b4b6dd1f9af-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 01 15:17:06 crc kubenswrapper[4869]: I1001 15:17:06.827463 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-5pmns"] Oct 01 15:17:06 crc kubenswrapper[4869]: I1001 15:17:06.830608 4869 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-5pmns"] Oct 01 15:17:07 crc kubenswrapper[4869]: I1001 15:17:07.594681 4869 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8ff59bb9-995b-4688-8cb1-9b4b6dd1f9af" path="/var/lib/kubelet/pods/8ff59bb9-995b-4688-8cb1-9b4b6dd1f9af/volumes" Oct 01 15:17:10 crc kubenswrapper[4869]: I1001 15:17:10.255069 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-g948z" Oct 01 15:17:10 crc kubenswrapper[4869]: I1001 15:17:10.255132 4869 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-g948z" Oct 01 15:17:10 crc kubenswrapper[4869]: I1001 15:17:10.313190 4869 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-g948z" Oct 01 15:17:11 crc kubenswrapper[4869]: I1001 15:17:11.319514 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-g948z" Oct 01 15:17:11 crc kubenswrapper[4869]: I1001 15:17:11.392941 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-g948z"] Oct 01 15:17:13 crc kubenswrapper[4869]: I1001 15:17:13.262965 4869 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-g948z" podUID="2b186174-6ddb-49f4-8b00-e7ca815d1fa4" containerName="registry-server" containerID="cri-o://f1e0771d95495784d28102d4036f024ca472c98d0785e3f4e02d0da6d60f90c7" gracePeriod=2 Oct 01 15:17:13 crc kubenswrapper[4869]: I1001 15:17:13.353988 4869 patch_prober.go:28] interesting pod/machine-config-daemon-c86m8 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 01 15:17:13 crc kubenswrapper[4869]: I1001 15:17:13.354055 4869 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-c86m8" podUID="a4b64f7f-0b03-4f47-965b-9fde048b735c" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 01 15:17:13 crc kubenswrapper[4869]: I1001 15:17:13.354104 4869 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-c86m8" Oct 01 15:17:13 crc kubenswrapper[4869]: I1001 15:17:13.354740 4869 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"aedd256e8a7e9adcb3428c1dfb846efe5c6adc26f622dd82be7a88d857fb712b"} pod="openshift-machine-config-operator/machine-config-daemon-c86m8" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Oct 01 15:17:13 crc kubenswrapper[4869]: I1001 15:17:13.354810 4869 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-c86m8" podUID="a4b64f7f-0b03-4f47-965b-9fde048b735c" containerName="machine-config-daemon" containerID="cri-o://aedd256e8a7e9adcb3428c1dfb846efe5c6adc26f622dd82be7a88d857fb712b" gracePeriod=600 Oct 01 15:17:13 crc kubenswrapper[4869]: I1001 15:17:13.683811 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-g948z" Oct 01 15:17:13 crc kubenswrapper[4869]: I1001 15:17:13.797623 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2b186174-6ddb-49f4-8b00-e7ca815d1fa4-catalog-content\") pod \"2b186174-6ddb-49f4-8b00-e7ca815d1fa4\" (UID: \"2b186174-6ddb-49f4-8b00-e7ca815d1fa4\") " Oct 01 15:17:13 crc kubenswrapper[4869]: I1001 15:17:13.797694 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2b186174-6ddb-49f4-8b00-e7ca815d1fa4-utilities\") pod \"2b186174-6ddb-49f4-8b00-e7ca815d1fa4\" (UID: \"2b186174-6ddb-49f4-8b00-e7ca815d1fa4\") " Oct 01 15:17:13 crc kubenswrapper[4869]: I1001 15:17:13.797739 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xff9l\" (UniqueName: \"kubernetes.io/projected/2b186174-6ddb-49f4-8b00-e7ca815d1fa4-kube-api-access-xff9l\") pod \"2b186174-6ddb-49f4-8b00-e7ca815d1fa4\" (UID: \"2b186174-6ddb-49f4-8b00-e7ca815d1fa4\") " Oct 01 15:17:13 crc kubenswrapper[4869]: I1001 15:17:13.798777 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2b186174-6ddb-49f4-8b00-e7ca815d1fa4-utilities" (OuterVolumeSpecName: "utilities") pod "2b186174-6ddb-49f4-8b00-e7ca815d1fa4" (UID: "2b186174-6ddb-49f4-8b00-e7ca815d1fa4"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 15:17:13 crc kubenswrapper[4869]: I1001 15:17:13.802543 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2b186174-6ddb-49f4-8b00-e7ca815d1fa4-kube-api-access-xff9l" (OuterVolumeSpecName: "kube-api-access-xff9l") pod "2b186174-6ddb-49f4-8b00-e7ca815d1fa4" (UID: "2b186174-6ddb-49f4-8b00-e7ca815d1fa4"). InnerVolumeSpecName "kube-api-access-xff9l". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 15:17:13 crc kubenswrapper[4869]: I1001 15:17:13.839121 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2b186174-6ddb-49f4-8b00-e7ca815d1fa4-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "2b186174-6ddb-49f4-8b00-e7ca815d1fa4" (UID: "2b186174-6ddb-49f4-8b00-e7ca815d1fa4"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 15:17:13 crc kubenswrapper[4869]: I1001 15:17:13.899805 4869 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2b186174-6ddb-49f4-8b00-e7ca815d1fa4-utilities\") on node \"crc\" DevicePath \"\"" Oct 01 15:17:13 crc kubenswrapper[4869]: I1001 15:17:13.899862 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xff9l\" (UniqueName: \"kubernetes.io/projected/2b186174-6ddb-49f4-8b00-e7ca815d1fa4-kube-api-access-xff9l\") on node \"crc\" DevicePath \"\"" Oct 01 15:17:13 crc kubenswrapper[4869]: I1001 15:17:13.899882 4869 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2b186174-6ddb-49f4-8b00-e7ca815d1fa4-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 01 15:17:14 crc kubenswrapper[4869]: I1001 15:17:14.273052 4869 generic.go:334] "Generic (PLEG): container finished" podID="a4b64f7f-0b03-4f47-965b-9fde048b735c" containerID="aedd256e8a7e9adcb3428c1dfb846efe5c6adc26f622dd82be7a88d857fb712b" exitCode=0 Oct 01 15:17:14 crc kubenswrapper[4869]: I1001 15:17:14.273149 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-c86m8" event={"ID":"a4b64f7f-0b03-4f47-965b-9fde048b735c","Type":"ContainerDied","Data":"aedd256e8a7e9adcb3428c1dfb846efe5c6adc26f622dd82be7a88d857fb712b"} Oct 01 15:17:14 crc kubenswrapper[4869]: I1001 15:17:14.273225 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-c86m8" event={"ID":"a4b64f7f-0b03-4f47-965b-9fde048b735c","Type":"ContainerStarted","Data":"4be1a24ad49d8ad1b9a1395c62f6541610f0ea2bbd6f24d661f794435b423dc8"} Oct 01 15:17:14 crc kubenswrapper[4869]: I1001 15:17:14.273283 4869 scope.go:117] "RemoveContainer" containerID="d389e7566576d3e629333a96c6509eba4ec6eb584120d194f797cef044db86f8" Oct 01 15:17:14 crc kubenswrapper[4869]: I1001 15:17:14.276084 4869 generic.go:334] "Generic (PLEG): container finished" podID="2b186174-6ddb-49f4-8b00-e7ca815d1fa4" containerID="f1e0771d95495784d28102d4036f024ca472c98d0785e3f4e02d0da6d60f90c7" exitCode=0 Oct 01 15:17:14 crc kubenswrapper[4869]: I1001 15:17:14.276126 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-g948z" event={"ID":"2b186174-6ddb-49f4-8b00-e7ca815d1fa4","Type":"ContainerDied","Data":"f1e0771d95495784d28102d4036f024ca472c98d0785e3f4e02d0da6d60f90c7"} Oct 01 15:17:14 crc kubenswrapper[4869]: I1001 15:17:14.276160 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-g948z" event={"ID":"2b186174-6ddb-49f4-8b00-e7ca815d1fa4","Type":"ContainerDied","Data":"d619d5a771e2764be9a143b1e8620f60805421512ca523b7eb601c6c3e6313b8"} Oct 01 15:17:14 crc kubenswrapper[4869]: I1001 15:17:14.276215 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-g948z" Oct 01 15:17:14 crc kubenswrapper[4869]: I1001 15:17:14.316093 4869 scope.go:117] "RemoveContainer" containerID="f1e0771d95495784d28102d4036f024ca472c98d0785e3f4e02d0da6d60f90c7" Oct 01 15:17:14 crc kubenswrapper[4869]: I1001 15:17:14.326615 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-g948z"] Oct 01 15:17:14 crc kubenswrapper[4869]: I1001 15:17:14.332072 4869 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-g948z"] Oct 01 15:17:14 crc kubenswrapper[4869]: I1001 15:17:14.335137 4869 scope.go:117] "RemoveContainer" containerID="68b4e4ea7500e2af38957583b980621124f07ef5f4edf1f1af4aacdea7473205" Oct 01 15:17:14 crc kubenswrapper[4869]: I1001 15:17:14.351771 4869 scope.go:117] "RemoveContainer" containerID="897e89cd6af26b16192ca04d3c467c60ef74e02d346e8e05fc752944e3147032" Oct 01 15:17:14 crc kubenswrapper[4869]: I1001 15:17:14.367454 4869 scope.go:117] "RemoveContainer" containerID="f1e0771d95495784d28102d4036f024ca472c98d0785e3f4e02d0da6d60f90c7" Oct 01 15:17:14 crc kubenswrapper[4869]: E1001 15:17:14.367885 4869 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f1e0771d95495784d28102d4036f024ca472c98d0785e3f4e02d0da6d60f90c7\": container with ID starting with f1e0771d95495784d28102d4036f024ca472c98d0785e3f4e02d0da6d60f90c7 not found: ID does not exist" containerID="f1e0771d95495784d28102d4036f024ca472c98d0785e3f4e02d0da6d60f90c7" Oct 01 15:17:14 crc kubenswrapper[4869]: I1001 15:17:14.367921 4869 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f1e0771d95495784d28102d4036f024ca472c98d0785e3f4e02d0da6d60f90c7"} err="failed to get container status \"f1e0771d95495784d28102d4036f024ca472c98d0785e3f4e02d0da6d60f90c7\": rpc error: code = NotFound desc = could not find container \"f1e0771d95495784d28102d4036f024ca472c98d0785e3f4e02d0da6d60f90c7\": container with ID starting with f1e0771d95495784d28102d4036f024ca472c98d0785e3f4e02d0da6d60f90c7 not found: ID does not exist" Oct 01 15:17:14 crc kubenswrapper[4869]: I1001 15:17:14.367946 4869 scope.go:117] "RemoveContainer" containerID="68b4e4ea7500e2af38957583b980621124f07ef5f4edf1f1af4aacdea7473205" Oct 01 15:17:14 crc kubenswrapper[4869]: E1001 15:17:14.368213 4869 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"68b4e4ea7500e2af38957583b980621124f07ef5f4edf1f1af4aacdea7473205\": container with ID starting with 68b4e4ea7500e2af38957583b980621124f07ef5f4edf1f1af4aacdea7473205 not found: ID does not exist" containerID="68b4e4ea7500e2af38957583b980621124f07ef5f4edf1f1af4aacdea7473205" Oct 01 15:17:14 crc kubenswrapper[4869]: I1001 15:17:14.368251 4869 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"68b4e4ea7500e2af38957583b980621124f07ef5f4edf1f1af4aacdea7473205"} err="failed to get container status \"68b4e4ea7500e2af38957583b980621124f07ef5f4edf1f1af4aacdea7473205\": rpc error: code = NotFound desc = could not find container \"68b4e4ea7500e2af38957583b980621124f07ef5f4edf1f1af4aacdea7473205\": container with ID starting with 68b4e4ea7500e2af38957583b980621124f07ef5f4edf1f1af4aacdea7473205 not found: ID does not exist" Oct 01 15:17:14 crc kubenswrapper[4869]: I1001 15:17:14.368331 4869 scope.go:117] "RemoveContainer" containerID="897e89cd6af26b16192ca04d3c467c60ef74e02d346e8e05fc752944e3147032" Oct 01 15:17:14 crc kubenswrapper[4869]: E1001 15:17:14.368699 4869 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"897e89cd6af26b16192ca04d3c467c60ef74e02d346e8e05fc752944e3147032\": container with ID starting with 897e89cd6af26b16192ca04d3c467c60ef74e02d346e8e05fc752944e3147032 not found: ID does not exist" containerID="897e89cd6af26b16192ca04d3c467c60ef74e02d346e8e05fc752944e3147032" Oct 01 15:17:14 crc kubenswrapper[4869]: I1001 15:17:14.368721 4869 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"897e89cd6af26b16192ca04d3c467c60ef74e02d346e8e05fc752944e3147032"} err="failed to get container status \"897e89cd6af26b16192ca04d3c467c60ef74e02d346e8e05fc752944e3147032\": rpc error: code = NotFound desc = could not find container \"897e89cd6af26b16192ca04d3c467c60ef74e02d346e8e05fc752944e3147032\": container with ID starting with 897e89cd6af26b16192ca04d3c467c60ef74e02d346e8e05fc752944e3147032 not found: ID does not exist" Oct 01 15:17:15 crc kubenswrapper[4869]: I1001 15:17:15.587874 4869 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2b186174-6ddb-49f4-8b00-e7ca815d1fa4" path="/var/lib/kubelet/pods/2b186174-6ddb-49f4-8b00-e7ca815d1fa4/volumes" Oct 01 15:17:25 crc kubenswrapper[4869]: I1001 15:17:25.996110 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/barbican-operator-controller-manager-f7f98cb69-79zfb"] Oct 01 15:17:25 crc kubenswrapper[4869]: E1001 15:17:25.997012 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2b186174-6ddb-49f4-8b00-e7ca815d1fa4" containerName="extract-utilities" Oct 01 15:17:25 crc kubenswrapper[4869]: I1001 15:17:25.997031 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="2b186174-6ddb-49f4-8b00-e7ca815d1fa4" containerName="extract-utilities" Oct 01 15:17:25 crc kubenswrapper[4869]: E1001 15:17:25.997045 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8ff59bb9-995b-4688-8cb1-9b4b6dd1f9af" containerName="registry-server" Oct 01 15:17:25 crc kubenswrapper[4869]: I1001 15:17:25.997053 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="8ff59bb9-995b-4688-8cb1-9b4b6dd1f9af" containerName="registry-server" Oct 01 15:17:25 crc kubenswrapper[4869]: E1001 15:17:25.997083 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8ff59bb9-995b-4688-8cb1-9b4b6dd1f9af" containerName="extract-utilities" Oct 01 15:17:25 crc kubenswrapper[4869]: I1001 15:17:25.997092 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="8ff59bb9-995b-4688-8cb1-9b4b6dd1f9af" containerName="extract-utilities" Oct 01 15:17:25 crc kubenswrapper[4869]: E1001 15:17:25.997103 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2b186174-6ddb-49f4-8b00-e7ca815d1fa4" containerName="extract-content" Oct 01 15:17:25 crc kubenswrapper[4869]: I1001 15:17:25.997110 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="2b186174-6ddb-49f4-8b00-e7ca815d1fa4" containerName="extract-content" Oct 01 15:17:25 crc kubenswrapper[4869]: E1001 15:17:25.997131 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8ff59bb9-995b-4688-8cb1-9b4b6dd1f9af" containerName="extract-content" Oct 01 15:17:25 crc kubenswrapper[4869]: I1001 15:17:25.997139 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="8ff59bb9-995b-4688-8cb1-9b4b6dd1f9af" containerName="extract-content" Oct 01 15:17:25 crc kubenswrapper[4869]: E1001 15:17:25.997148 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2b186174-6ddb-49f4-8b00-e7ca815d1fa4" containerName="registry-server" Oct 01 15:17:25 crc kubenswrapper[4869]: I1001 15:17:25.997156 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="2b186174-6ddb-49f4-8b00-e7ca815d1fa4" containerName="registry-server" Oct 01 15:17:25 crc kubenswrapper[4869]: I1001 15:17:25.997312 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="8ff59bb9-995b-4688-8cb1-9b4b6dd1f9af" containerName="registry-server" Oct 01 15:17:25 crc kubenswrapper[4869]: I1001 15:17:25.997331 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="2b186174-6ddb-49f4-8b00-e7ca815d1fa4" containerName="registry-server" Oct 01 15:17:25 crc kubenswrapper[4869]: I1001 15:17:25.998008 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/barbican-operator-controller-manager-f7f98cb69-79zfb" Oct 01 15:17:25 crc kubenswrapper[4869]: I1001 15:17:25.999556 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/cinder-operator-controller-manager-859cd486d-s8dwj"] Oct 01 15:17:25 crc kubenswrapper[4869]: I1001 15:17:25.999854 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"barbican-operator-controller-manager-dockercfg-tzvgh" Oct 01 15:17:26 crc kubenswrapper[4869]: I1001 15:17:26.000281 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/cinder-operator-controller-manager-859cd486d-s8dwj" Oct 01 15:17:26 crc kubenswrapper[4869]: I1001 15:17:26.003631 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"cinder-operator-controller-manager-dockercfg-bbb52" Oct 01 15:17:26 crc kubenswrapper[4869]: I1001 15:17:26.005485 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/barbican-operator-controller-manager-f7f98cb69-79zfb"] Oct 01 15:17:26 crc kubenswrapper[4869]: I1001 15:17:26.026747 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/designate-operator-controller-manager-77fb7bcf5b-7pl4z"] Oct 01 15:17:26 crc kubenswrapper[4869]: I1001 15:17:26.027963 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/designate-operator-controller-manager-77fb7bcf5b-7pl4z" Oct 01 15:17:26 crc kubenswrapper[4869]: I1001 15:17:26.029684 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"designate-operator-controller-manager-dockercfg-xqdg8" Oct 01 15:17:26 crc kubenswrapper[4869]: I1001 15:17:26.031242 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/cinder-operator-controller-manager-859cd486d-s8dwj"] Oct 01 15:17:26 crc kubenswrapper[4869]: I1001 15:17:26.033237 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/glance-operator-controller-manager-8bc4775b5-wmx8b"] Oct 01 15:17:26 crc kubenswrapper[4869]: I1001 15:17:26.034123 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/glance-operator-controller-manager-8bc4775b5-wmx8b" Oct 01 15:17:26 crc kubenswrapper[4869]: I1001 15:17:26.035639 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"glance-operator-controller-manager-dockercfg-cxpsf" Oct 01 15:17:26 crc kubenswrapper[4869]: I1001 15:17:26.046023 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/designate-operator-controller-manager-77fb7bcf5b-7pl4z"] Oct 01 15:17:26 crc kubenswrapper[4869]: I1001 15:17:26.078776 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/glance-operator-controller-manager-8bc4775b5-wmx8b"] Oct 01 15:17:26 crc kubenswrapper[4869]: I1001 15:17:26.091326 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/heat-operator-controller-manager-5b4fc86755-g27k4"] Oct 01 15:17:26 crc kubenswrapper[4869]: I1001 15:17:26.092561 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/heat-operator-controller-manager-5b4fc86755-g27k4" Oct 01 15:17:26 crc kubenswrapper[4869]: I1001 15:17:26.096651 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"heat-operator-controller-manager-dockercfg-7bxh5" Oct 01 15:17:26 crc kubenswrapper[4869]: I1001 15:17:26.110966 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/heat-operator-controller-manager-5b4fc86755-g27k4"] Oct 01 15:17:26 crc kubenswrapper[4869]: I1001 15:17:26.139313 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/horizon-operator-controller-manager-679b4759bb-mrrm6"] Oct 01 15:17:26 crc kubenswrapper[4869]: I1001 15:17:26.140276 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/horizon-operator-controller-manager-679b4759bb-mrrm6" Oct 01 15:17:26 crc kubenswrapper[4869]: I1001 15:17:26.145878 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"horizon-operator-controller-manager-dockercfg-m9wsc" Oct 01 15:17:26 crc kubenswrapper[4869]: I1001 15:17:26.152843 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/horizon-operator-controller-manager-679b4759bb-mrrm6"] Oct 01 15:17:26 crc kubenswrapper[4869]: I1001 15:17:26.176883 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2ddfs\" (UniqueName: \"kubernetes.io/projected/3e866523-b046-49e3-88f5-1c657a204a14-kube-api-access-2ddfs\") pod \"cinder-operator-controller-manager-859cd486d-s8dwj\" (UID: \"3e866523-b046-49e3-88f5-1c657a204a14\") " pod="openstack-operators/cinder-operator-controller-manager-859cd486d-s8dwj" Oct 01 15:17:26 crc kubenswrapper[4869]: I1001 15:17:26.177113 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4stjp\" (UniqueName: \"kubernetes.io/projected/62fc6158-1408-44bd-891b-ef7ead1f5867-kube-api-access-4stjp\") pod \"barbican-operator-controller-manager-f7f98cb69-79zfb\" (UID: \"62fc6158-1408-44bd-891b-ef7ead1f5867\") " pod="openstack-operators/barbican-operator-controller-manager-f7f98cb69-79zfb" Oct 01 15:17:26 crc kubenswrapper[4869]: I1001 15:17:26.177213 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rkjg4\" (UniqueName: \"kubernetes.io/projected/c7cb96bc-1269-4c0c-b3f3-1575ee10543e-kube-api-access-rkjg4\") pod \"designate-operator-controller-manager-77fb7bcf5b-7pl4z\" (UID: \"c7cb96bc-1269-4c0c-b3f3-1575ee10543e\") " pod="openstack-operators/designate-operator-controller-manager-77fb7bcf5b-7pl4z" Oct 01 15:17:26 crc kubenswrapper[4869]: I1001 15:17:26.177312 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-f5m5s\" (UniqueName: \"kubernetes.io/projected/ee8b5119-5b8d-494f-8864-8f0cf2a10631-kube-api-access-f5m5s\") pod \"glance-operator-controller-manager-8bc4775b5-wmx8b\" (UID: \"ee8b5119-5b8d-494f-8864-8f0cf2a10631\") " pod="openstack-operators/glance-operator-controller-manager-8bc4775b5-wmx8b" Oct 01 15:17:26 crc kubenswrapper[4869]: I1001 15:17:26.189789 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/infra-operator-controller-manager-5c8fdc4d5c-pd6wq"] Oct 01 15:17:26 crc kubenswrapper[4869]: I1001 15:17:26.194523 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/infra-operator-controller-manager-5c8fdc4d5c-pd6wq" Oct 01 15:17:26 crc kubenswrapper[4869]: I1001 15:17:26.201513 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"infra-operator-controller-manager-dockercfg-g4flc" Oct 01 15:17:26 crc kubenswrapper[4869]: I1001 15:17:26.207891 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/keystone-operator-controller-manager-59d7dc95cf-mqdq6"] Oct 01 15:17:26 crc kubenswrapper[4869]: I1001 15:17:26.208490 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"infra-operator-webhook-server-cert" Oct 01 15:17:26 crc kubenswrapper[4869]: I1001 15:17:26.209042 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/keystone-operator-controller-manager-59d7dc95cf-mqdq6" Oct 01 15:17:26 crc kubenswrapper[4869]: I1001 15:17:26.212689 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"keystone-operator-controller-manager-dockercfg-rvnbf" Oct 01 15:17:26 crc kubenswrapper[4869]: I1001 15:17:26.225389 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/ironic-operator-controller-manager-5f45cd594f-mh4jm"] Oct 01 15:17:26 crc kubenswrapper[4869]: I1001 15:17:26.226519 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/ironic-operator-controller-manager-5f45cd594f-mh4jm" Oct 01 15:17:26 crc kubenswrapper[4869]: I1001 15:17:26.231853 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"ironic-operator-controller-manager-dockercfg-294cs" Oct 01 15:17:26 crc kubenswrapper[4869]: I1001 15:17:26.232888 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/infra-operator-controller-manager-5c8fdc4d5c-pd6wq"] Oct 01 15:17:26 crc kubenswrapper[4869]: I1001 15:17:26.242272 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/keystone-operator-controller-manager-59d7dc95cf-mqdq6"] Oct 01 15:17:26 crc kubenswrapper[4869]: I1001 15:17:26.249897 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/ironic-operator-controller-manager-5f45cd594f-mh4jm"] Oct 01 15:17:26 crc kubenswrapper[4869]: I1001 15:17:26.271537 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/manila-operator-controller-manager-b7cf8cb5f-6clgt"] Oct 01 15:17:26 crc kubenswrapper[4869]: I1001 15:17:26.272432 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/manila-operator-controller-manager-b7cf8cb5f-6clgt" Oct 01 15:17:26 crc kubenswrapper[4869]: I1001 15:17:26.282043 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/mariadb-operator-controller-manager-67bf5bb885-fv6pk"] Oct 01 15:17:26 crc kubenswrapper[4869]: I1001 15:17:26.282370 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"manila-operator-controller-manager-dockercfg-rl6vw" Oct 01 15:17:26 crc kubenswrapper[4869]: I1001 15:17:26.283072 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/mariadb-operator-controller-manager-67bf5bb885-fv6pk" Oct 01 15:17:26 crc kubenswrapper[4869]: I1001 15:17:26.284794 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2ddfs\" (UniqueName: \"kubernetes.io/projected/3e866523-b046-49e3-88f5-1c657a204a14-kube-api-access-2ddfs\") pod \"cinder-operator-controller-manager-859cd486d-s8dwj\" (UID: \"3e866523-b046-49e3-88f5-1c657a204a14\") " pod="openstack-operators/cinder-operator-controller-manager-859cd486d-s8dwj" Oct 01 15:17:26 crc kubenswrapper[4869]: I1001 15:17:26.284838 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/ccda625c-cf12-415b-9a87-dd77a4c0fa1b-cert\") pod \"infra-operator-controller-manager-5c8fdc4d5c-pd6wq\" (UID: \"ccda625c-cf12-415b-9a87-dd77a4c0fa1b\") " pod="openstack-operators/infra-operator-controller-manager-5c8fdc4d5c-pd6wq" Oct 01 15:17:26 crc kubenswrapper[4869]: I1001 15:17:26.284860 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4stjp\" (UniqueName: \"kubernetes.io/projected/62fc6158-1408-44bd-891b-ef7ead1f5867-kube-api-access-4stjp\") pod \"barbican-operator-controller-manager-f7f98cb69-79zfb\" (UID: \"62fc6158-1408-44bd-891b-ef7ead1f5867\") " pod="openstack-operators/barbican-operator-controller-manager-f7f98cb69-79zfb" Oct 01 15:17:26 crc kubenswrapper[4869]: I1001 15:17:26.284882 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jcpt8\" (UniqueName: \"kubernetes.io/projected/a67caefc-004c-4cd3-92b1-191f9531044a-kube-api-access-jcpt8\") pod \"heat-operator-controller-manager-5b4fc86755-g27k4\" (UID: \"a67caefc-004c-4cd3-92b1-191f9531044a\") " pod="openstack-operators/heat-operator-controller-manager-5b4fc86755-g27k4" Oct 01 15:17:26 crc kubenswrapper[4869]: I1001 15:17:26.284912 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rkjg4\" (UniqueName: \"kubernetes.io/projected/c7cb96bc-1269-4c0c-b3f3-1575ee10543e-kube-api-access-rkjg4\") pod \"designate-operator-controller-manager-77fb7bcf5b-7pl4z\" (UID: \"c7cb96bc-1269-4c0c-b3f3-1575ee10543e\") " pod="openstack-operators/designate-operator-controller-manager-77fb7bcf5b-7pl4z" Oct 01 15:17:26 crc kubenswrapper[4869]: I1001 15:17:26.284927 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-549qw\" (UniqueName: \"kubernetes.io/projected/1f43a837-ea14-4bdb-9b91-ffbd20f1bad3-kube-api-access-549qw\") pod \"horizon-operator-controller-manager-679b4759bb-mrrm6\" (UID: \"1f43a837-ea14-4bdb-9b91-ffbd20f1bad3\") " pod="openstack-operators/horizon-operator-controller-manager-679b4759bb-mrrm6" Oct 01 15:17:26 crc kubenswrapper[4869]: I1001 15:17:26.284956 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-f5m5s\" (UniqueName: \"kubernetes.io/projected/ee8b5119-5b8d-494f-8864-8f0cf2a10631-kube-api-access-f5m5s\") pod \"glance-operator-controller-manager-8bc4775b5-wmx8b\" (UID: \"ee8b5119-5b8d-494f-8864-8f0cf2a10631\") " pod="openstack-operators/glance-operator-controller-manager-8bc4775b5-wmx8b" Oct 01 15:17:26 crc kubenswrapper[4869]: I1001 15:17:26.284982 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-54czt\" (UniqueName: \"kubernetes.io/projected/ccda625c-cf12-415b-9a87-dd77a4c0fa1b-kube-api-access-54czt\") pod \"infra-operator-controller-manager-5c8fdc4d5c-pd6wq\" (UID: \"ccda625c-cf12-415b-9a87-dd77a4c0fa1b\") " pod="openstack-operators/infra-operator-controller-manager-5c8fdc4d5c-pd6wq" Oct 01 15:17:26 crc kubenswrapper[4869]: I1001 15:17:26.287241 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"mariadb-operator-controller-manager-dockercfg-k8zqg" Oct 01 15:17:26 crc kubenswrapper[4869]: I1001 15:17:26.291871 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/manila-operator-controller-manager-b7cf8cb5f-6clgt"] Oct 01 15:17:26 crc kubenswrapper[4869]: I1001 15:17:26.298140 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/mariadb-operator-controller-manager-67bf5bb885-fv6pk"] Oct 01 15:17:26 crc kubenswrapper[4869]: I1001 15:17:26.302899 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4stjp\" (UniqueName: \"kubernetes.io/projected/62fc6158-1408-44bd-891b-ef7ead1f5867-kube-api-access-4stjp\") pod \"barbican-operator-controller-manager-f7f98cb69-79zfb\" (UID: \"62fc6158-1408-44bd-891b-ef7ead1f5867\") " pod="openstack-operators/barbican-operator-controller-manager-f7f98cb69-79zfb" Oct 01 15:17:26 crc kubenswrapper[4869]: I1001 15:17:26.307811 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/neutron-operator-controller-manager-54fbbfcd44-j6zn8"] Oct 01 15:17:26 crc kubenswrapper[4869]: I1001 15:17:26.315903 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/neutron-operator-controller-manager-54fbbfcd44-j6zn8" Oct 01 15:17:26 crc kubenswrapper[4869]: I1001 15:17:26.316970 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/barbican-operator-controller-manager-f7f98cb69-79zfb" Oct 01 15:17:26 crc kubenswrapper[4869]: I1001 15:17:26.318070 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-f5m5s\" (UniqueName: \"kubernetes.io/projected/ee8b5119-5b8d-494f-8864-8f0cf2a10631-kube-api-access-f5m5s\") pod \"glance-operator-controller-manager-8bc4775b5-wmx8b\" (UID: \"ee8b5119-5b8d-494f-8864-8f0cf2a10631\") " pod="openstack-operators/glance-operator-controller-manager-8bc4775b5-wmx8b" Oct 01 15:17:26 crc kubenswrapper[4869]: I1001 15:17:26.330053 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2ddfs\" (UniqueName: \"kubernetes.io/projected/3e866523-b046-49e3-88f5-1c657a204a14-kube-api-access-2ddfs\") pod \"cinder-operator-controller-manager-859cd486d-s8dwj\" (UID: \"3e866523-b046-49e3-88f5-1c657a204a14\") " pod="openstack-operators/cinder-operator-controller-manager-859cd486d-s8dwj" Oct 01 15:17:26 crc kubenswrapper[4869]: I1001 15:17:26.332470 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/neutron-operator-controller-manager-54fbbfcd44-j6zn8"] Oct 01 15:17:26 crc kubenswrapper[4869]: I1001 15:17:26.334407 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"neutron-operator-controller-manager-dockercfg-f4dcf" Oct 01 15:17:26 crc kubenswrapper[4869]: I1001 15:17:26.340369 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/nova-operator-controller-manager-7fd5b6bbc6-5tcmh"] Oct 01 15:17:26 crc kubenswrapper[4869]: I1001 15:17:26.341973 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/nova-operator-controller-manager-7fd5b6bbc6-5tcmh" Oct 01 15:17:26 crc kubenswrapper[4869]: I1001 15:17:26.388965 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/glance-operator-controller-manager-8bc4775b5-wmx8b" Oct 01 15:17:26 crc kubenswrapper[4869]: I1001 15:17:26.391560 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-54czt\" (UniqueName: \"kubernetes.io/projected/ccda625c-cf12-415b-9a87-dd77a4c0fa1b-kube-api-access-54czt\") pod \"infra-operator-controller-manager-5c8fdc4d5c-pd6wq\" (UID: \"ccda625c-cf12-415b-9a87-dd77a4c0fa1b\") " pod="openstack-operators/infra-operator-controller-manager-5c8fdc4d5c-pd6wq" Oct 01 15:17:26 crc kubenswrapper[4869]: I1001 15:17:26.391796 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4pjp5\" (UniqueName: \"kubernetes.io/projected/e1f0cffe-e44a-432b-bd66-03be980080b2-kube-api-access-4pjp5\") pod \"keystone-operator-controller-manager-59d7dc95cf-mqdq6\" (UID: \"e1f0cffe-e44a-432b-bd66-03be980080b2\") " pod="openstack-operators/keystone-operator-controller-manager-59d7dc95cf-mqdq6" Oct 01 15:17:26 crc kubenswrapper[4869]: I1001 15:17:26.391906 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rd4gv\" (UniqueName: \"kubernetes.io/projected/1ee62730-fdac-40bd-b923-d5544be938e1-kube-api-access-rd4gv\") pod \"mariadb-operator-controller-manager-67bf5bb885-fv6pk\" (UID: \"1ee62730-fdac-40bd-b923-d5544be938e1\") " pod="openstack-operators/mariadb-operator-controller-manager-67bf5bb885-fv6pk" Oct 01 15:17:26 crc kubenswrapper[4869]: I1001 15:17:26.391939 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gpbdr\" (UniqueName: \"kubernetes.io/projected/e51d646d-01fe-48e5-af48-29db1e16c849-kube-api-access-gpbdr\") pod \"manila-operator-controller-manager-b7cf8cb5f-6clgt\" (UID: \"e51d646d-01fe-48e5-af48-29db1e16c849\") " pod="openstack-operators/manila-operator-controller-manager-b7cf8cb5f-6clgt" Oct 01 15:17:26 crc kubenswrapper[4869]: I1001 15:17:26.391999 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nv2sq\" (UniqueName: \"kubernetes.io/projected/d7e4e3d7-4b52-46b2-8097-b00b4de3b87a-kube-api-access-nv2sq\") pod \"ironic-operator-controller-manager-5f45cd594f-mh4jm\" (UID: \"d7e4e3d7-4b52-46b2-8097-b00b4de3b87a\") " pod="openstack-operators/ironic-operator-controller-manager-5f45cd594f-mh4jm" Oct 01 15:17:26 crc kubenswrapper[4869]: I1001 15:17:26.392034 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/ccda625c-cf12-415b-9a87-dd77a4c0fa1b-cert\") pod \"infra-operator-controller-manager-5c8fdc4d5c-pd6wq\" (UID: \"ccda625c-cf12-415b-9a87-dd77a4c0fa1b\") " pod="openstack-operators/infra-operator-controller-manager-5c8fdc4d5c-pd6wq" Oct 01 15:17:26 crc kubenswrapper[4869]: I1001 15:17:26.392079 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jcpt8\" (UniqueName: \"kubernetes.io/projected/a67caefc-004c-4cd3-92b1-191f9531044a-kube-api-access-jcpt8\") pod \"heat-operator-controller-manager-5b4fc86755-g27k4\" (UID: \"a67caefc-004c-4cd3-92b1-191f9531044a\") " pod="openstack-operators/heat-operator-controller-manager-5b4fc86755-g27k4" Oct 01 15:17:26 crc kubenswrapper[4869]: I1001 15:17:26.392139 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-549qw\" (UniqueName: \"kubernetes.io/projected/1f43a837-ea14-4bdb-9b91-ffbd20f1bad3-kube-api-access-549qw\") pod \"horizon-operator-controller-manager-679b4759bb-mrrm6\" (UID: \"1f43a837-ea14-4bdb-9b91-ffbd20f1bad3\") " pod="openstack-operators/horizon-operator-controller-manager-679b4759bb-mrrm6" Oct 01 15:17:26 crc kubenswrapper[4869]: E1001 15:17:26.393902 4869 secret.go:188] Couldn't get secret openstack-operators/infra-operator-webhook-server-cert: secret "infra-operator-webhook-server-cert" not found Oct 01 15:17:26 crc kubenswrapper[4869]: E1001 15:17:26.393971 4869 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/ccda625c-cf12-415b-9a87-dd77a4c0fa1b-cert podName:ccda625c-cf12-415b-9a87-dd77a4c0fa1b nodeName:}" failed. No retries permitted until 2025-10-01 15:17:26.893947672 +0000 UTC m=+756.040790788 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/ccda625c-cf12-415b-9a87-dd77a4c0fa1b-cert") pod "infra-operator-controller-manager-5c8fdc4d5c-pd6wq" (UID: "ccda625c-cf12-415b-9a87-dd77a4c0fa1b") : secret "infra-operator-webhook-server-cert" not found Oct 01 15:17:26 crc kubenswrapper[4869]: I1001 15:17:26.397506 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rkjg4\" (UniqueName: \"kubernetes.io/projected/c7cb96bc-1269-4c0c-b3f3-1575ee10543e-kube-api-access-rkjg4\") pod \"designate-operator-controller-manager-77fb7bcf5b-7pl4z\" (UID: \"c7cb96bc-1269-4c0c-b3f3-1575ee10543e\") " pod="openstack-operators/designate-operator-controller-manager-77fb7bcf5b-7pl4z" Oct 01 15:17:26 crc kubenswrapper[4869]: I1001 15:17:26.405941 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"nova-operator-controller-manager-dockercfg-4db2j" Oct 01 15:17:26 crc kubenswrapper[4869]: I1001 15:17:26.444253 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/nova-operator-controller-manager-7fd5b6bbc6-5tcmh"] Oct 01 15:17:26 crc kubenswrapper[4869]: I1001 15:17:26.462463 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/octavia-operator-controller-manager-75f8d67d86-42l4j"] Oct 01 15:17:26 crc kubenswrapper[4869]: I1001 15:17:26.471620 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jcpt8\" (UniqueName: \"kubernetes.io/projected/a67caefc-004c-4cd3-92b1-191f9531044a-kube-api-access-jcpt8\") pod \"heat-operator-controller-manager-5b4fc86755-g27k4\" (UID: \"a67caefc-004c-4cd3-92b1-191f9531044a\") " pod="openstack-operators/heat-operator-controller-manager-5b4fc86755-g27k4" Oct 01 15:17:26 crc kubenswrapper[4869]: I1001 15:17:26.474930 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-549qw\" (UniqueName: \"kubernetes.io/projected/1f43a837-ea14-4bdb-9b91-ffbd20f1bad3-kube-api-access-549qw\") pod \"horizon-operator-controller-manager-679b4759bb-mrrm6\" (UID: \"1f43a837-ea14-4bdb-9b91-ffbd20f1bad3\") " pod="openstack-operators/horizon-operator-controller-manager-679b4759bb-mrrm6" Oct 01 15:17:26 crc kubenswrapper[4869]: I1001 15:17:26.476358 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-54czt\" (UniqueName: \"kubernetes.io/projected/ccda625c-cf12-415b-9a87-dd77a4c0fa1b-kube-api-access-54czt\") pod \"infra-operator-controller-manager-5c8fdc4d5c-pd6wq\" (UID: \"ccda625c-cf12-415b-9a87-dd77a4c0fa1b\") " pod="openstack-operators/infra-operator-controller-manager-5c8fdc4d5c-pd6wq" Oct 01 15:17:26 crc kubenswrapper[4869]: I1001 15:17:26.477327 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-baremetal-operator-controller-manager-659bb84579nbl75"] Oct 01 15:17:26 crc kubenswrapper[4869]: I1001 15:17:26.477449 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/octavia-operator-controller-manager-75f8d67d86-42l4j" Oct 01 15:17:26 crc kubenswrapper[4869]: I1001 15:17:26.478708 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-baremetal-operator-controller-manager-659bb84579nbl75" Oct 01 15:17:26 crc kubenswrapper[4869]: I1001 15:17:26.481364 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/octavia-operator-controller-manager-75f8d67d86-42l4j"] Oct 01 15:17:26 crc kubenswrapper[4869]: I1001 15:17:26.487570 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/horizon-operator-controller-manager-679b4759bb-mrrm6" Oct 01 15:17:26 crc kubenswrapper[4869]: I1001 15:17:26.492171 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"octavia-operator-controller-manager-dockercfg-xxs87" Oct 01 15:17:26 crc kubenswrapper[4869]: I1001 15:17:26.492363 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-baremetal-operator-webhook-server-cert" Oct 01 15:17:26 crc kubenswrapper[4869]: I1001 15:17:26.493608 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4pjp5\" (UniqueName: \"kubernetes.io/projected/e1f0cffe-e44a-432b-bd66-03be980080b2-kube-api-access-4pjp5\") pod \"keystone-operator-controller-manager-59d7dc95cf-mqdq6\" (UID: \"e1f0cffe-e44a-432b-bd66-03be980080b2\") " pod="openstack-operators/keystone-operator-controller-manager-59d7dc95cf-mqdq6" Oct 01 15:17:26 crc kubenswrapper[4869]: I1001 15:17:26.493640 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rd4gv\" (UniqueName: \"kubernetes.io/projected/1ee62730-fdac-40bd-b923-d5544be938e1-kube-api-access-rd4gv\") pod \"mariadb-operator-controller-manager-67bf5bb885-fv6pk\" (UID: \"1ee62730-fdac-40bd-b923-d5544be938e1\") " pod="openstack-operators/mariadb-operator-controller-manager-67bf5bb885-fv6pk" Oct 01 15:17:26 crc kubenswrapper[4869]: I1001 15:17:26.493669 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gpbdr\" (UniqueName: \"kubernetes.io/projected/e51d646d-01fe-48e5-af48-29db1e16c849-kube-api-access-gpbdr\") pod \"manila-operator-controller-manager-b7cf8cb5f-6clgt\" (UID: \"e51d646d-01fe-48e5-af48-29db1e16c849\") " pod="openstack-operators/manila-operator-controller-manager-b7cf8cb5f-6clgt" Oct 01 15:17:26 crc kubenswrapper[4869]: I1001 15:17:26.493695 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nv2sq\" (UniqueName: \"kubernetes.io/projected/d7e4e3d7-4b52-46b2-8097-b00b4de3b87a-kube-api-access-nv2sq\") pod \"ironic-operator-controller-manager-5f45cd594f-mh4jm\" (UID: \"d7e4e3d7-4b52-46b2-8097-b00b4de3b87a\") " pod="openstack-operators/ironic-operator-controller-manager-5f45cd594f-mh4jm" Oct 01 15:17:26 crc kubenswrapper[4869]: I1001 15:17:26.493779 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4fdz7\" (UniqueName: \"kubernetes.io/projected/0d140b62-4c85-405b-9eff-8dc02ad9e2ed-kube-api-access-4fdz7\") pod \"nova-operator-controller-manager-7fd5b6bbc6-5tcmh\" (UID: \"0d140b62-4c85-405b-9eff-8dc02ad9e2ed\") " pod="openstack-operators/nova-operator-controller-manager-7fd5b6bbc6-5tcmh" Oct 01 15:17:26 crc kubenswrapper[4869]: I1001 15:17:26.493801 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9sssm\" (UniqueName: \"kubernetes.io/projected/cbe9be45-3694-4fe5-ae10-c03fbd176bbc-kube-api-access-9sssm\") pod \"neutron-operator-controller-manager-54fbbfcd44-j6zn8\" (UID: \"cbe9be45-3694-4fe5-ae10-c03fbd176bbc\") " pod="openstack-operators/neutron-operator-controller-manager-54fbbfcd44-j6zn8" Oct 01 15:17:26 crc kubenswrapper[4869]: I1001 15:17:26.501998 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/ovn-operator-controller-manager-84c745747f-gv26d"] Oct 01 15:17:26 crc kubenswrapper[4869]: I1001 15:17:26.502965 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/ovn-operator-controller-manager-84c745747f-gv26d" Oct 01 15:17:26 crc kubenswrapper[4869]: I1001 15:17:26.510631 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-baremetal-operator-controller-manager-dockercfg-285xj" Oct 01 15:17:26 crc kubenswrapper[4869]: I1001 15:17:26.516658 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"ovn-operator-controller-manager-dockercfg-2wbj4" Oct 01 15:17:26 crc kubenswrapper[4869]: I1001 15:17:26.526881 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/placement-operator-controller-manager-598c4c8547-lq48f"] Oct 01 15:17:26 crc kubenswrapper[4869]: I1001 15:17:26.528042 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/placement-operator-controller-manager-598c4c8547-lq48f" Oct 01 15:17:26 crc kubenswrapper[4869]: I1001 15:17:26.539961 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"placement-operator-controller-manager-dockercfg-6r4gw" Oct 01 15:17:26 crc kubenswrapper[4869]: I1001 15:17:26.550172 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nv2sq\" (UniqueName: \"kubernetes.io/projected/d7e4e3d7-4b52-46b2-8097-b00b4de3b87a-kube-api-access-nv2sq\") pod \"ironic-operator-controller-manager-5f45cd594f-mh4jm\" (UID: \"d7e4e3d7-4b52-46b2-8097-b00b4de3b87a\") " pod="openstack-operators/ironic-operator-controller-manager-5f45cd594f-mh4jm" Oct 01 15:17:26 crc kubenswrapper[4869]: I1001 15:17:26.550774 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gpbdr\" (UniqueName: \"kubernetes.io/projected/e51d646d-01fe-48e5-af48-29db1e16c849-kube-api-access-gpbdr\") pod \"manila-operator-controller-manager-b7cf8cb5f-6clgt\" (UID: \"e51d646d-01fe-48e5-af48-29db1e16c849\") " pod="openstack-operators/manila-operator-controller-manager-b7cf8cb5f-6clgt" Oct 01 15:17:26 crc kubenswrapper[4869]: I1001 15:17:26.553754 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rd4gv\" (UniqueName: \"kubernetes.io/projected/1ee62730-fdac-40bd-b923-d5544be938e1-kube-api-access-rd4gv\") pod \"mariadb-operator-controller-manager-67bf5bb885-fv6pk\" (UID: \"1ee62730-fdac-40bd-b923-d5544be938e1\") " pod="openstack-operators/mariadb-operator-controller-manager-67bf5bb885-fv6pk" Oct 01 15:17:26 crc kubenswrapper[4869]: I1001 15:17:26.558273 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/ironic-operator-controller-manager-5f45cd594f-mh4jm" Oct 01 15:17:26 crc kubenswrapper[4869]: I1001 15:17:26.560655 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4pjp5\" (UniqueName: \"kubernetes.io/projected/e1f0cffe-e44a-432b-bd66-03be980080b2-kube-api-access-4pjp5\") pod \"keystone-operator-controller-manager-59d7dc95cf-mqdq6\" (UID: \"e1f0cffe-e44a-432b-bd66-03be980080b2\") " pod="openstack-operators/keystone-operator-controller-manager-59d7dc95cf-mqdq6" Oct 01 15:17:26 crc kubenswrapper[4869]: I1001 15:17:26.585476 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-baremetal-operator-controller-manager-659bb84579nbl75"] Oct 01 15:17:26 crc kubenswrapper[4869]: I1001 15:17:26.596873 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/14d20f5e-b22a-4c46-8712-f65e973ee387-cert\") pod \"openstack-baremetal-operator-controller-manager-659bb84579nbl75\" (UID: \"14d20f5e-b22a-4c46-8712-f65e973ee387\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-659bb84579nbl75" Oct 01 15:17:26 crc kubenswrapper[4869]: I1001 15:17:26.596944 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zn2z7\" (UniqueName: \"kubernetes.io/projected/14d20f5e-b22a-4c46-8712-f65e973ee387-kube-api-access-zn2z7\") pod \"openstack-baremetal-operator-controller-manager-659bb84579nbl75\" (UID: \"14d20f5e-b22a-4c46-8712-f65e973ee387\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-659bb84579nbl75" Oct 01 15:17:26 crc kubenswrapper[4869]: I1001 15:17:26.596970 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4fdz7\" (UniqueName: \"kubernetes.io/projected/0d140b62-4c85-405b-9eff-8dc02ad9e2ed-kube-api-access-4fdz7\") pod \"nova-operator-controller-manager-7fd5b6bbc6-5tcmh\" (UID: \"0d140b62-4c85-405b-9eff-8dc02ad9e2ed\") " pod="openstack-operators/nova-operator-controller-manager-7fd5b6bbc6-5tcmh" Oct 01 15:17:26 crc kubenswrapper[4869]: I1001 15:17:26.596989 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9sssm\" (UniqueName: \"kubernetes.io/projected/cbe9be45-3694-4fe5-ae10-c03fbd176bbc-kube-api-access-9sssm\") pod \"neutron-operator-controller-manager-54fbbfcd44-j6zn8\" (UID: \"cbe9be45-3694-4fe5-ae10-c03fbd176bbc\") " pod="openstack-operators/neutron-operator-controller-manager-54fbbfcd44-j6zn8" Oct 01 15:17:26 crc kubenswrapper[4869]: I1001 15:17:26.597012 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xr2fp\" (UniqueName: \"kubernetes.io/projected/64c75872-eff8-45af-bf14-88b5896489ee-kube-api-access-xr2fp\") pod \"octavia-operator-controller-manager-75f8d67d86-42l4j\" (UID: \"64c75872-eff8-45af-bf14-88b5896489ee\") " pod="openstack-operators/octavia-operator-controller-manager-75f8d67d86-42l4j" Oct 01 15:17:26 crc kubenswrapper[4869]: I1001 15:17:26.611115 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/ovn-operator-controller-manager-84c745747f-gv26d"] Oct 01 15:17:26 crc kubenswrapper[4869]: I1001 15:17:26.627859 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/placement-operator-controller-manager-598c4c8547-lq48f"] Oct 01 15:17:26 crc kubenswrapper[4869]: I1001 15:17:26.632603 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/cinder-operator-controller-manager-859cd486d-s8dwj" Oct 01 15:17:26 crc kubenswrapper[4869]: I1001 15:17:26.637549 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4fdz7\" (UniqueName: \"kubernetes.io/projected/0d140b62-4c85-405b-9eff-8dc02ad9e2ed-kube-api-access-4fdz7\") pod \"nova-operator-controller-manager-7fd5b6bbc6-5tcmh\" (UID: \"0d140b62-4c85-405b-9eff-8dc02ad9e2ed\") " pod="openstack-operators/nova-operator-controller-manager-7fd5b6bbc6-5tcmh" Oct 01 15:17:26 crc kubenswrapper[4869]: I1001 15:17:26.657759 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/designate-operator-controller-manager-77fb7bcf5b-7pl4z" Oct 01 15:17:26 crc kubenswrapper[4869]: I1001 15:17:26.676714 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/telemetry-operator-controller-manager-cb66d6b59-r69lh"] Oct 01 15:17:26 crc kubenswrapper[4869]: I1001 15:17:26.677657 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/telemetry-operator-controller-manager-cb66d6b59-r69lh" Oct 01 15:17:26 crc kubenswrapper[4869]: I1001 15:17:26.680088 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9sssm\" (UniqueName: \"kubernetes.io/projected/cbe9be45-3694-4fe5-ae10-c03fbd176bbc-kube-api-access-9sssm\") pod \"neutron-operator-controller-manager-54fbbfcd44-j6zn8\" (UID: \"cbe9be45-3694-4fe5-ae10-c03fbd176bbc\") " pod="openstack-operators/neutron-operator-controller-manager-54fbbfcd44-j6zn8" Oct 01 15:17:26 crc kubenswrapper[4869]: I1001 15:17:26.680147 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"telemetry-operator-controller-manager-dockercfg-hlsfz" Oct 01 15:17:26 crc kubenswrapper[4869]: I1001 15:17:26.703095 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jncq4\" (UniqueName: \"kubernetes.io/projected/c74d5ad2-5385-45ee-af5a-db7c45af2bef-kube-api-access-jncq4\") pod \"placement-operator-controller-manager-598c4c8547-lq48f\" (UID: \"c74d5ad2-5385-45ee-af5a-db7c45af2bef\") " pod="openstack-operators/placement-operator-controller-manager-598c4c8547-lq48f" Oct 01 15:17:26 crc kubenswrapper[4869]: I1001 15:17:26.703142 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/14d20f5e-b22a-4c46-8712-f65e973ee387-cert\") pod \"openstack-baremetal-operator-controller-manager-659bb84579nbl75\" (UID: \"14d20f5e-b22a-4c46-8712-f65e973ee387\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-659bb84579nbl75" Oct 01 15:17:26 crc kubenswrapper[4869]: I1001 15:17:26.703196 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9k5wp\" (UniqueName: \"kubernetes.io/projected/bcc95d48-0c42-425f-97de-90db5f8d02c8-kube-api-access-9k5wp\") pod \"ovn-operator-controller-manager-84c745747f-gv26d\" (UID: \"bcc95d48-0c42-425f-97de-90db5f8d02c8\") " pod="openstack-operators/ovn-operator-controller-manager-84c745747f-gv26d" Oct 01 15:17:26 crc kubenswrapper[4869]: I1001 15:17:26.703229 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zn2z7\" (UniqueName: \"kubernetes.io/projected/14d20f5e-b22a-4c46-8712-f65e973ee387-kube-api-access-zn2z7\") pod \"openstack-baremetal-operator-controller-manager-659bb84579nbl75\" (UID: \"14d20f5e-b22a-4c46-8712-f65e973ee387\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-659bb84579nbl75" Oct 01 15:17:26 crc kubenswrapper[4869]: I1001 15:17:26.703273 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xr2fp\" (UniqueName: \"kubernetes.io/projected/64c75872-eff8-45af-bf14-88b5896489ee-kube-api-access-xr2fp\") pod \"octavia-operator-controller-manager-75f8d67d86-42l4j\" (UID: \"64c75872-eff8-45af-bf14-88b5896489ee\") " pod="openstack-operators/octavia-operator-controller-manager-75f8d67d86-42l4j" Oct 01 15:17:26 crc kubenswrapper[4869]: E1001 15:17:26.708915 4869 secret.go:188] Couldn't get secret openstack-operators/openstack-baremetal-operator-webhook-server-cert: secret "openstack-baremetal-operator-webhook-server-cert" not found Oct 01 15:17:26 crc kubenswrapper[4869]: E1001 15:17:26.708963 4869 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/14d20f5e-b22a-4c46-8712-f65e973ee387-cert podName:14d20f5e-b22a-4c46-8712-f65e973ee387 nodeName:}" failed. No retries permitted until 2025-10-01 15:17:27.208947939 +0000 UTC m=+756.355791055 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/14d20f5e-b22a-4c46-8712-f65e973ee387-cert") pod "openstack-baremetal-operator-controller-manager-659bb84579nbl75" (UID: "14d20f5e-b22a-4c46-8712-f65e973ee387") : secret "openstack-baremetal-operator-webhook-server-cert" not found Oct 01 15:17:26 crc kubenswrapper[4869]: I1001 15:17:26.714108 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/swift-operator-controller-manager-689b4f76c9-7fhd4"] Oct 01 15:17:26 crc kubenswrapper[4869]: I1001 15:17:26.718336 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/heat-operator-controller-manager-5b4fc86755-g27k4" Oct 01 15:17:26 crc kubenswrapper[4869]: I1001 15:17:26.729018 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/telemetry-operator-controller-manager-cb66d6b59-r69lh"] Oct 01 15:17:26 crc kubenswrapper[4869]: I1001 15:17:26.729116 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/swift-operator-controller-manager-689b4f76c9-7fhd4" Oct 01 15:17:26 crc kubenswrapper[4869]: I1001 15:17:26.732393 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"swift-operator-controller-manager-dockercfg-r7pwl" Oct 01 15:17:26 crc kubenswrapper[4869]: I1001 15:17:26.732509 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/manila-operator-controller-manager-b7cf8cb5f-6clgt" Oct 01 15:17:26 crc kubenswrapper[4869]: I1001 15:17:26.744286 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zn2z7\" (UniqueName: \"kubernetes.io/projected/14d20f5e-b22a-4c46-8712-f65e973ee387-kube-api-access-zn2z7\") pod \"openstack-baremetal-operator-controller-manager-659bb84579nbl75\" (UID: \"14d20f5e-b22a-4c46-8712-f65e973ee387\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-659bb84579nbl75" Oct 01 15:17:26 crc kubenswrapper[4869]: I1001 15:17:26.745864 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xr2fp\" (UniqueName: \"kubernetes.io/projected/64c75872-eff8-45af-bf14-88b5896489ee-kube-api-access-xr2fp\") pod \"octavia-operator-controller-manager-75f8d67d86-42l4j\" (UID: \"64c75872-eff8-45af-bf14-88b5896489ee\") " pod="openstack-operators/octavia-operator-controller-manager-75f8d67d86-42l4j" Oct 01 15:17:26 crc kubenswrapper[4869]: I1001 15:17:26.754957 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/test-operator-controller-manager-cbdf6dc66-n8xn2"] Oct 01 15:17:26 crc kubenswrapper[4869]: I1001 15:17:26.756476 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/test-operator-controller-manager-cbdf6dc66-n8xn2" Oct 01 15:17:26 crc kubenswrapper[4869]: I1001 15:17:26.759702 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"test-operator-controller-manager-dockercfg-cnjqc" Oct 01 15:17:26 crc kubenswrapper[4869]: I1001 15:17:26.760800 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/swift-operator-controller-manager-689b4f76c9-7fhd4"] Oct 01 15:17:26 crc kubenswrapper[4869]: I1001 15:17:26.769778 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/mariadb-operator-controller-manager-67bf5bb885-fv6pk" Oct 01 15:17:26 crc kubenswrapper[4869]: I1001 15:17:26.778672 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/test-operator-controller-manager-cbdf6dc66-n8xn2"] Oct 01 15:17:26 crc kubenswrapper[4869]: I1001 15:17:26.791598 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/watcher-operator-controller-manager-68d7bc5569-9wzmb"] Oct 01 15:17:26 crc kubenswrapper[4869]: I1001 15:17:26.792898 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/watcher-operator-controller-manager-68d7bc5569-9wzmb" Oct 01 15:17:26 crc kubenswrapper[4869]: I1001 15:17:26.802704 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/neutron-operator-controller-manager-54fbbfcd44-j6zn8" Oct 01 15:17:26 crc kubenswrapper[4869]: I1001 15:17:26.802974 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"watcher-operator-controller-manager-dockercfg-mcmhd" Oct 01 15:17:26 crc kubenswrapper[4869]: I1001 15:17:26.809364 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jncq4\" (UniqueName: \"kubernetes.io/projected/c74d5ad2-5385-45ee-af5a-db7c45af2bef-kube-api-access-jncq4\") pod \"placement-operator-controller-manager-598c4c8547-lq48f\" (UID: \"c74d5ad2-5385-45ee-af5a-db7c45af2bef\") " pod="openstack-operators/placement-operator-controller-manager-598c4c8547-lq48f" Oct 01 15:17:26 crc kubenswrapper[4869]: I1001 15:17:26.809406 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vxklr\" (UniqueName: \"kubernetes.io/projected/4d9e486b-4e16-422f-b594-f6e6bf76c569-kube-api-access-vxklr\") pod \"telemetry-operator-controller-manager-cb66d6b59-r69lh\" (UID: \"4d9e486b-4e16-422f-b594-f6e6bf76c569\") " pod="openstack-operators/telemetry-operator-controller-manager-cb66d6b59-r69lh" Oct 01 15:17:26 crc kubenswrapper[4869]: I1001 15:17:26.809453 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9k5wp\" (UniqueName: \"kubernetes.io/projected/bcc95d48-0c42-425f-97de-90db5f8d02c8-kube-api-access-9k5wp\") pod \"ovn-operator-controller-manager-84c745747f-gv26d\" (UID: \"bcc95d48-0c42-425f-97de-90db5f8d02c8\") " pod="openstack-operators/ovn-operator-controller-manager-84c745747f-gv26d" Oct 01 15:17:26 crc kubenswrapper[4869]: I1001 15:17:26.820507 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/watcher-operator-controller-manager-68d7bc5569-9wzmb"] Oct 01 15:17:26 crc kubenswrapper[4869]: I1001 15:17:26.845718 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/keystone-operator-controller-manager-59d7dc95cf-mqdq6" Oct 01 15:17:26 crc kubenswrapper[4869]: I1001 15:17:26.850092 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jncq4\" (UniqueName: \"kubernetes.io/projected/c74d5ad2-5385-45ee-af5a-db7c45af2bef-kube-api-access-jncq4\") pod \"placement-operator-controller-manager-598c4c8547-lq48f\" (UID: \"c74d5ad2-5385-45ee-af5a-db7c45af2bef\") " pod="openstack-operators/placement-operator-controller-manager-598c4c8547-lq48f" Oct 01 15:17:26 crc kubenswrapper[4869]: I1001 15:17:26.851603 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9k5wp\" (UniqueName: \"kubernetes.io/projected/bcc95d48-0c42-425f-97de-90db5f8d02c8-kube-api-access-9k5wp\") pod \"ovn-operator-controller-manager-84c745747f-gv26d\" (UID: \"bcc95d48-0c42-425f-97de-90db5f8d02c8\") " pod="openstack-operators/ovn-operator-controller-manager-84c745747f-gv26d" Oct 01 15:17:26 crc kubenswrapper[4869]: I1001 15:17:26.878644 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-operator-controller-manager-6c7b6bcb7c-hnlxr"] Oct 01 15:17:26 crc kubenswrapper[4869]: I1001 15:17:26.879696 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-controller-manager-6c7b6bcb7c-hnlxr" Oct 01 15:17:26 crc kubenswrapper[4869]: I1001 15:17:26.880555 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/nova-operator-controller-manager-7fd5b6bbc6-5tcmh" Oct 01 15:17:26 crc kubenswrapper[4869]: I1001 15:17:26.882048 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"webhook-server-cert" Oct 01 15:17:26 crc kubenswrapper[4869]: I1001 15:17:26.882110 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-operator-controller-manager-dockercfg-fbzk9" Oct 01 15:17:26 crc kubenswrapper[4869]: I1001 15:17:26.914402 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pmtfg\" (UniqueName: \"kubernetes.io/projected/a74ba78b-be87-40e4-a1a1-f59a10612f6c-kube-api-access-pmtfg\") pod \"test-operator-controller-manager-cbdf6dc66-n8xn2\" (UID: \"a74ba78b-be87-40e4-a1a1-f59a10612f6c\") " pod="openstack-operators/test-operator-controller-manager-cbdf6dc66-n8xn2" Oct 01 15:17:26 crc kubenswrapper[4869]: I1001 15:17:26.914451 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/ccda625c-cf12-415b-9a87-dd77a4c0fa1b-cert\") pod \"infra-operator-controller-manager-5c8fdc4d5c-pd6wq\" (UID: \"ccda625c-cf12-415b-9a87-dd77a4c0fa1b\") " pod="openstack-operators/infra-operator-controller-manager-5c8fdc4d5c-pd6wq" Oct 01 15:17:26 crc kubenswrapper[4869]: I1001 15:17:26.914491 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vxklr\" (UniqueName: \"kubernetes.io/projected/4d9e486b-4e16-422f-b594-f6e6bf76c569-kube-api-access-vxklr\") pod \"telemetry-operator-controller-manager-cb66d6b59-r69lh\" (UID: \"4d9e486b-4e16-422f-b594-f6e6bf76c569\") " pod="openstack-operators/telemetry-operator-controller-manager-cb66d6b59-r69lh" Oct 01 15:17:26 crc kubenswrapper[4869]: I1001 15:17:26.914532 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2cwwn\" (UniqueName: \"kubernetes.io/projected/a49df948-6460-4d87-82d4-f65bf570cb7b-kube-api-access-2cwwn\") pod \"swift-operator-controller-manager-689b4f76c9-7fhd4\" (UID: \"a49df948-6460-4d87-82d4-f65bf570cb7b\") " pod="openstack-operators/swift-operator-controller-manager-689b4f76c9-7fhd4" Oct 01 15:17:26 crc kubenswrapper[4869]: I1001 15:17:26.914552 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6vrvv\" (UniqueName: \"kubernetes.io/projected/22cc979e-ec88-4ed2-bed9-fe4e685cae46-kube-api-access-6vrvv\") pod \"watcher-operator-controller-manager-68d7bc5569-9wzmb\" (UID: \"22cc979e-ec88-4ed2-bed9-fe4e685cae46\") " pod="openstack-operators/watcher-operator-controller-manager-68d7bc5569-9wzmb" Oct 01 15:17:26 crc kubenswrapper[4869]: E1001 15:17:26.914646 4869 secret.go:188] Couldn't get secret openstack-operators/infra-operator-webhook-server-cert: secret "infra-operator-webhook-server-cert" not found Oct 01 15:17:26 crc kubenswrapper[4869]: E1001 15:17:26.914699 4869 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/ccda625c-cf12-415b-9a87-dd77a4c0fa1b-cert podName:ccda625c-cf12-415b-9a87-dd77a4c0fa1b nodeName:}" failed. No retries permitted until 2025-10-01 15:17:27.914684595 +0000 UTC m=+757.061527711 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/ccda625c-cf12-415b-9a87-dd77a4c0fa1b-cert") pod "infra-operator-controller-manager-5c8fdc4d5c-pd6wq" (UID: "ccda625c-cf12-415b-9a87-dd77a4c0fa1b") : secret "infra-operator-webhook-server-cert" not found Oct 01 15:17:26 crc kubenswrapper[4869]: I1001 15:17:26.936474 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/octavia-operator-controller-manager-75f8d67d86-42l4j" Oct 01 15:17:26 crc kubenswrapper[4869]: I1001 15:17:26.949329 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-controller-manager-6c7b6bcb7c-hnlxr"] Oct 01 15:17:26 crc kubenswrapper[4869]: I1001 15:17:26.954672 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vxklr\" (UniqueName: \"kubernetes.io/projected/4d9e486b-4e16-422f-b594-f6e6bf76c569-kube-api-access-vxklr\") pod \"telemetry-operator-controller-manager-cb66d6b59-r69lh\" (UID: \"4d9e486b-4e16-422f-b594-f6e6bf76c569\") " pod="openstack-operators/telemetry-operator-controller-manager-cb66d6b59-r69lh" Oct 01 15:17:26 crc kubenswrapper[4869]: I1001 15:17:26.988744 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/ovn-operator-controller-manager-84c745747f-gv26d" Oct 01 15:17:26 crc kubenswrapper[4869]: I1001 15:17:26.996515 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/rabbitmq-cluster-operator-manager-5f97d8c699-6mcvk"] Oct 01 15:17:27 crc kubenswrapper[4869]: I1001 15:17:27.004914 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/rabbitmq-cluster-operator-manager-5f97d8c699-6mcvk" Oct 01 15:17:27 crc kubenswrapper[4869]: I1001 15:17:27.006636 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/placement-operator-controller-manager-598c4c8547-lq48f" Oct 01 15:17:27 crc kubenswrapper[4869]: I1001 15:17:27.007483 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"rabbitmq-cluster-operator-controller-manager-dockercfg-k4t26" Oct 01 15:17:27 crc kubenswrapper[4869]: I1001 15:17:27.016456 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pmtfg\" (UniqueName: \"kubernetes.io/projected/a74ba78b-be87-40e4-a1a1-f59a10612f6c-kube-api-access-pmtfg\") pod \"test-operator-controller-manager-cbdf6dc66-n8xn2\" (UID: \"a74ba78b-be87-40e4-a1a1-f59a10612f6c\") " pod="openstack-operators/test-operator-controller-manager-cbdf6dc66-n8xn2" Oct 01 15:17:27 crc kubenswrapper[4869]: I1001 15:17:27.016509 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xm6gb\" (UniqueName: \"kubernetes.io/projected/3f500075-2bfe-440a-856c-976d2404158f-kube-api-access-xm6gb\") pod \"openstack-operator-controller-manager-6c7b6bcb7c-hnlxr\" (UID: \"3f500075-2bfe-440a-856c-976d2404158f\") " pod="openstack-operators/openstack-operator-controller-manager-6c7b6bcb7c-hnlxr" Oct 01 15:17:27 crc kubenswrapper[4869]: I1001 15:17:27.016601 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/3f500075-2bfe-440a-856c-976d2404158f-cert\") pod \"openstack-operator-controller-manager-6c7b6bcb7c-hnlxr\" (UID: \"3f500075-2bfe-440a-856c-976d2404158f\") " pod="openstack-operators/openstack-operator-controller-manager-6c7b6bcb7c-hnlxr" Oct 01 15:17:27 crc kubenswrapper[4869]: I1001 15:17:27.016628 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2cwwn\" (UniqueName: \"kubernetes.io/projected/a49df948-6460-4d87-82d4-f65bf570cb7b-kube-api-access-2cwwn\") pod \"swift-operator-controller-manager-689b4f76c9-7fhd4\" (UID: \"a49df948-6460-4d87-82d4-f65bf570cb7b\") " pod="openstack-operators/swift-operator-controller-manager-689b4f76c9-7fhd4" Oct 01 15:17:27 crc kubenswrapper[4869]: I1001 15:17:27.016651 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6vrvv\" (UniqueName: \"kubernetes.io/projected/22cc979e-ec88-4ed2-bed9-fe4e685cae46-kube-api-access-6vrvv\") pod \"watcher-operator-controller-manager-68d7bc5569-9wzmb\" (UID: \"22cc979e-ec88-4ed2-bed9-fe4e685cae46\") " pod="openstack-operators/watcher-operator-controller-manager-68d7bc5569-9wzmb" Oct 01 15:17:27 crc kubenswrapper[4869]: I1001 15:17:27.023805 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/rabbitmq-cluster-operator-manager-5f97d8c699-6mcvk"] Oct 01 15:17:27 crc kubenswrapper[4869]: I1001 15:17:27.033489 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/telemetry-operator-controller-manager-cb66d6b59-r69lh" Oct 01 15:17:27 crc kubenswrapper[4869]: I1001 15:17:27.046508 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pmtfg\" (UniqueName: \"kubernetes.io/projected/a74ba78b-be87-40e4-a1a1-f59a10612f6c-kube-api-access-pmtfg\") pod \"test-operator-controller-manager-cbdf6dc66-n8xn2\" (UID: \"a74ba78b-be87-40e4-a1a1-f59a10612f6c\") " pod="openstack-operators/test-operator-controller-manager-cbdf6dc66-n8xn2" Oct 01 15:17:27 crc kubenswrapper[4869]: I1001 15:17:27.048240 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2cwwn\" (UniqueName: \"kubernetes.io/projected/a49df948-6460-4d87-82d4-f65bf570cb7b-kube-api-access-2cwwn\") pod \"swift-operator-controller-manager-689b4f76c9-7fhd4\" (UID: \"a49df948-6460-4d87-82d4-f65bf570cb7b\") " pod="openstack-operators/swift-operator-controller-manager-689b4f76c9-7fhd4" Oct 01 15:17:27 crc kubenswrapper[4869]: I1001 15:17:27.052548 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6vrvv\" (UniqueName: \"kubernetes.io/projected/22cc979e-ec88-4ed2-bed9-fe4e685cae46-kube-api-access-6vrvv\") pod \"watcher-operator-controller-manager-68d7bc5569-9wzmb\" (UID: \"22cc979e-ec88-4ed2-bed9-fe4e685cae46\") " pod="openstack-operators/watcher-operator-controller-manager-68d7bc5569-9wzmb" Oct 01 15:17:27 crc kubenswrapper[4869]: I1001 15:17:27.086855 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/swift-operator-controller-manager-689b4f76c9-7fhd4" Oct 01 15:17:27 crc kubenswrapper[4869]: I1001 15:17:27.087247 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/barbican-operator-controller-manager-f7f98cb69-79zfb"] Oct 01 15:17:27 crc kubenswrapper[4869]: I1001 15:17:27.117191 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/test-operator-controller-manager-cbdf6dc66-n8xn2" Oct 01 15:17:27 crc kubenswrapper[4869]: I1001 15:17:27.117802 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7lngm\" (UniqueName: \"kubernetes.io/projected/34b12def-835e-430c-9e9a-29f191900a00-kube-api-access-7lngm\") pod \"rabbitmq-cluster-operator-manager-5f97d8c699-6mcvk\" (UID: \"34b12def-835e-430c-9e9a-29f191900a00\") " pod="openstack-operators/rabbitmq-cluster-operator-manager-5f97d8c699-6mcvk" Oct 01 15:17:27 crc kubenswrapper[4869]: I1001 15:17:27.117875 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xm6gb\" (UniqueName: \"kubernetes.io/projected/3f500075-2bfe-440a-856c-976d2404158f-kube-api-access-xm6gb\") pod \"openstack-operator-controller-manager-6c7b6bcb7c-hnlxr\" (UID: \"3f500075-2bfe-440a-856c-976d2404158f\") " pod="openstack-operators/openstack-operator-controller-manager-6c7b6bcb7c-hnlxr" Oct 01 15:17:27 crc kubenswrapper[4869]: I1001 15:17:27.117948 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/3f500075-2bfe-440a-856c-976d2404158f-cert\") pod \"openstack-operator-controller-manager-6c7b6bcb7c-hnlxr\" (UID: \"3f500075-2bfe-440a-856c-976d2404158f\") " pod="openstack-operators/openstack-operator-controller-manager-6c7b6bcb7c-hnlxr" Oct 01 15:17:27 crc kubenswrapper[4869]: E1001 15:17:27.118080 4869 secret.go:188] Couldn't get secret openstack-operators/webhook-server-cert: secret "webhook-server-cert" not found Oct 01 15:17:27 crc kubenswrapper[4869]: E1001 15:17:27.118133 4869 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/3f500075-2bfe-440a-856c-976d2404158f-cert podName:3f500075-2bfe-440a-856c-976d2404158f nodeName:}" failed. No retries permitted until 2025-10-01 15:17:27.618118243 +0000 UTC m=+756.764961349 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/3f500075-2bfe-440a-856c-976d2404158f-cert") pod "openstack-operator-controller-manager-6c7b6bcb7c-hnlxr" (UID: "3f500075-2bfe-440a-856c-976d2404158f") : secret "webhook-server-cert" not found Oct 01 15:17:27 crc kubenswrapper[4869]: I1001 15:17:27.139032 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xm6gb\" (UniqueName: \"kubernetes.io/projected/3f500075-2bfe-440a-856c-976d2404158f-kube-api-access-xm6gb\") pod \"openstack-operator-controller-manager-6c7b6bcb7c-hnlxr\" (UID: \"3f500075-2bfe-440a-856c-976d2404158f\") " pod="openstack-operators/openstack-operator-controller-manager-6c7b6bcb7c-hnlxr" Oct 01 15:17:27 crc kubenswrapper[4869]: I1001 15:17:27.144128 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/watcher-operator-controller-manager-68d7bc5569-9wzmb" Oct 01 15:17:27 crc kubenswrapper[4869]: I1001 15:17:27.212509 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/glance-operator-controller-manager-8bc4775b5-wmx8b"] Oct 01 15:17:27 crc kubenswrapper[4869]: I1001 15:17:27.219176 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/14d20f5e-b22a-4c46-8712-f65e973ee387-cert\") pod \"openstack-baremetal-operator-controller-manager-659bb84579nbl75\" (UID: \"14d20f5e-b22a-4c46-8712-f65e973ee387\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-659bb84579nbl75" Oct 01 15:17:27 crc kubenswrapper[4869]: I1001 15:17:27.219274 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7lngm\" (UniqueName: \"kubernetes.io/projected/34b12def-835e-430c-9e9a-29f191900a00-kube-api-access-7lngm\") pod \"rabbitmq-cluster-operator-manager-5f97d8c699-6mcvk\" (UID: \"34b12def-835e-430c-9e9a-29f191900a00\") " pod="openstack-operators/rabbitmq-cluster-operator-manager-5f97d8c699-6mcvk" Oct 01 15:17:27 crc kubenswrapper[4869]: I1001 15:17:27.228320 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/14d20f5e-b22a-4c46-8712-f65e973ee387-cert\") pod \"openstack-baremetal-operator-controller-manager-659bb84579nbl75\" (UID: \"14d20f5e-b22a-4c46-8712-f65e973ee387\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-659bb84579nbl75" Oct 01 15:17:27 crc kubenswrapper[4869]: W1001 15:17:27.234558 4869 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podee8b5119_5b8d_494f_8864_8f0cf2a10631.slice/crio-8c5755c9d309ab59d149bf847acddd609e2834dc4868835b39848beeeed38579 WatchSource:0}: Error finding container 8c5755c9d309ab59d149bf847acddd609e2834dc4868835b39848beeeed38579: Status 404 returned error can't find the container with id 8c5755c9d309ab59d149bf847acddd609e2834dc4868835b39848beeeed38579 Oct 01 15:17:27 crc kubenswrapper[4869]: I1001 15:17:27.240746 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7lngm\" (UniqueName: \"kubernetes.io/projected/34b12def-835e-430c-9e9a-29f191900a00-kube-api-access-7lngm\") pod \"rabbitmq-cluster-operator-manager-5f97d8c699-6mcvk\" (UID: \"34b12def-835e-430c-9e9a-29f191900a00\") " pod="openstack-operators/rabbitmq-cluster-operator-manager-5f97d8c699-6mcvk" Oct 01 15:17:27 crc kubenswrapper[4869]: I1001 15:17:27.268408 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-baremetal-operator-controller-manager-659bb84579nbl75" Oct 01 15:17:27 crc kubenswrapper[4869]: I1001 15:17:27.318392 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/horizon-operator-controller-manager-679b4759bb-mrrm6"] Oct 01 15:17:27 crc kubenswrapper[4869]: I1001 15:17:27.352487 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/rabbitmq-cluster-operator-manager-5f97d8c699-6mcvk" Oct 01 15:17:27 crc kubenswrapper[4869]: W1001 15:17:27.356696 4869 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod1f43a837_ea14_4bdb_9b91_ffbd20f1bad3.slice/crio-9be8d631f2e87a5524330e2f02fffbdfb90b94c21e70b6002f84142ada530bed WatchSource:0}: Error finding container 9be8d631f2e87a5524330e2f02fffbdfb90b94c21e70b6002f84142ada530bed: Status 404 returned error can't find the container with id 9be8d631f2e87a5524330e2f02fffbdfb90b94c21e70b6002f84142ada530bed Oct 01 15:17:27 crc kubenswrapper[4869]: I1001 15:17:27.454225 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/glance-operator-controller-manager-8bc4775b5-wmx8b" event={"ID":"ee8b5119-5b8d-494f-8864-8f0cf2a10631","Type":"ContainerStarted","Data":"8c5755c9d309ab59d149bf847acddd609e2834dc4868835b39848beeeed38579"} Oct 01 15:17:27 crc kubenswrapper[4869]: I1001 15:17:27.455710 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/barbican-operator-controller-manager-f7f98cb69-79zfb" event={"ID":"62fc6158-1408-44bd-891b-ef7ead1f5867","Type":"ContainerStarted","Data":"f9ddf6cc8f6d6ecf7c46b05a5c4c774291c78eb948a3d27c5cd577132711da3a"} Oct 01 15:17:27 crc kubenswrapper[4869]: I1001 15:17:27.457030 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/horizon-operator-controller-manager-679b4759bb-mrrm6" event={"ID":"1f43a837-ea14-4bdb-9b91-ffbd20f1bad3","Type":"ContainerStarted","Data":"9be8d631f2e87a5524330e2f02fffbdfb90b94c21e70b6002f84142ada530bed"} Oct 01 15:17:27 crc kubenswrapper[4869]: I1001 15:17:27.471536 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/ironic-operator-controller-manager-5f45cd594f-mh4jm"] Oct 01 15:17:27 crc kubenswrapper[4869]: I1001 15:17:27.501088 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/designate-operator-controller-manager-77fb7bcf5b-7pl4z"] Oct 01 15:17:27 crc kubenswrapper[4869]: I1001 15:17:27.626887 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/3f500075-2bfe-440a-856c-976d2404158f-cert\") pod \"openstack-operator-controller-manager-6c7b6bcb7c-hnlxr\" (UID: \"3f500075-2bfe-440a-856c-976d2404158f\") " pod="openstack-operators/openstack-operator-controller-manager-6c7b6bcb7c-hnlxr" Oct 01 15:17:27 crc kubenswrapper[4869]: E1001 15:17:27.627133 4869 secret.go:188] Couldn't get secret openstack-operators/webhook-server-cert: secret "webhook-server-cert" not found Oct 01 15:17:27 crc kubenswrapper[4869]: E1001 15:17:27.627197 4869 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/3f500075-2bfe-440a-856c-976d2404158f-cert podName:3f500075-2bfe-440a-856c-976d2404158f nodeName:}" failed. No retries permitted until 2025-10-01 15:17:28.627182301 +0000 UTC m=+757.774025417 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/3f500075-2bfe-440a-856c-976d2404158f-cert") pod "openstack-operator-controller-manager-6c7b6bcb7c-hnlxr" (UID: "3f500075-2bfe-440a-856c-976d2404158f") : secret "webhook-server-cert" not found Oct 01 15:17:27 crc kubenswrapper[4869]: I1001 15:17:27.690130 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/keystone-operator-controller-manager-59d7dc95cf-mqdq6"] Oct 01 15:17:27 crc kubenswrapper[4869]: I1001 15:17:27.714796 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/cinder-operator-controller-manager-859cd486d-s8dwj"] Oct 01 15:17:27 crc kubenswrapper[4869]: I1001 15:17:27.733386 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/manila-operator-controller-manager-b7cf8cb5f-6clgt"] Oct 01 15:17:27 crc kubenswrapper[4869]: I1001 15:17:27.738551 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/heat-operator-controller-manager-5b4fc86755-g27k4"] Oct 01 15:17:27 crc kubenswrapper[4869]: W1001 15:17:27.741095 4869 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-poda67caefc_004c_4cd3_92b1_191f9531044a.slice/crio-cf328fca8f46cfca8ade1801a6d17881cff0349f376da5d60b3a68ed8d688ed8 WatchSource:0}: Error finding container cf328fca8f46cfca8ade1801a6d17881cff0349f376da5d60b3a68ed8d688ed8: Status 404 returned error can't find the container with id cf328fca8f46cfca8ade1801a6d17881cff0349f376da5d60b3a68ed8d688ed8 Oct 01 15:17:27 crc kubenswrapper[4869]: W1001 15:17:27.741716 4869 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod1ee62730_fdac_40bd_b923_d5544be938e1.slice/crio-95ecb4ae14d4fd1fa1b301a8258ebee64aa19e3d85fad66ea37368af898fec23 WatchSource:0}: Error finding container 95ecb4ae14d4fd1fa1b301a8258ebee64aa19e3d85fad66ea37368af898fec23: Status 404 returned error can't find the container with id 95ecb4ae14d4fd1fa1b301a8258ebee64aa19e3d85fad66ea37368af898fec23 Oct 01 15:17:27 crc kubenswrapper[4869]: I1001 15:17:27.743404 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/mariadb-operator-controller-manager-67bf5bb885-fv6pk"] Oct 01 15:17:27 crc kubenswrapper[4869]: I1001 15:17:27.930589 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/ccda625c-cf12-415b-9a87-dd77a4c0fa1b-cert\") pod \"infra-operator-controller-manager-5c8fdc4d5c-pd6wq\" (UID: \"ccda625c-cf12-415b-9a87-dd77a4c0fa1b\") " pod="openstack-operators/infra-operator-controller-manager-5c8fdc4d5c-pd6wq" Oct 01 15:17:27 crc kubenswrapper[4869]: I1001 15:17:27.935232 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/ccda625c-cf12-415b-9a87-dd77a4c0fa1b-cert\") pod \"infra-operator-controller-manager-5c8fdc4d5c-pd6wq\" (UID: \"ccda625c-cf12-415b-9a87-dd77a4c0fa1b\") " pod="openstack-operators/infra-operator-controller-manager-5c8fdc4d5c-pd6wq" Oct 01 15:17:28 crc kubenswrapper[4869]: I1001 15:17:28.027527 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/infra-operator-controller-manager-5c8fdc4d5c-pd6wq" Oct 01 15:17:28 crc kubenswrapper[4869]: I1001 15:17:28.100562 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-baremetal-operator-controller-manager-659bb84579nbl75"] Oct 01 15:17:28 crc kubenswrapper[4869]: I1001 15:17:28.114669 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/neutron-operator-controller-manager-54fbbfcd44-j6zn8"] Oct 01 15:17:28 crc kubenswrapper[4869]: I1001 15:17:28.122393 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/placement-operator-controller-manager-598c4c8547-lq48f"] Oct 01 15:17:28 crc kubenswrapper[4869]: I1001 15:17:28.132576 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/nova-operator-controller-manager-7fd5b6bbc6-5tcmh"] Oct 01 15:17:28 crc kubenswrapper[4869]: I1001 15:17:28.132621 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/test-operator-controller-manager-cbdf6dc66-n8xn2"] Oct 01 15:17:28 crc kubenswrapper[4869]: I1001 15:17:28.138710 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/telemetry-operator-controller-manager-cb66d6b59-r69lh"] Oct 01 15:17:28 crc kubenswrapper[4869]: W1001 15:17:28.152450 4869 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podc74d5ad2_5385_45ee_af5a_db7c45af2bef.slice/crio-cad3d1418c08dbc09a3ce4b7a8a1a0f59dffda3475a174e5fe9cca8df98d2753 WatchSource:0}: Error finding container cad3d1418c08dbc09a3ce4b7a8a1a0f59dffda3475a174e5fe9cca8df98d2753: Status 404 returned error can't find the container with id cad3d1418c08dbc09a3ce4b7a8a1a0f59dffda3475a174e5fe9cca8df98d2753 Oct 01 15:17:28 crc kubenswrapper[4869]: I1001 15:17:28.160283 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/swift-operator-controller-manager-689b4f76c9-7fhd4"] Oct 01 15:17:28 crc kubenswrapper[4869]: W1001 15:17:28.200180 4869 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-poda74ba78b_be87_40e4_a1a1_f59a10612f6c.slice/crio-a3f6f0ad2a1c0177738beeccd7dab29ff55a89e62e80a70d3efabc3b7f60a976 WatchSource:0}: Error finding container a3f6f0ad2a1c0177738beeccd7dab29ff55a89e62e80a70d3efabc3b7f60a976: Status 404 returned error can't find the container with id a3f6f0ad2a1c0177738beeccd7dab29ff55a89e62e80a70d3efabc3b7f60a976 Oct 01 15:17:28 crc kubenswrapper[4869]: W1001 15:17:28.201829 4869 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod22cc979e_ec88_4ed2_bed9_fe4e685cae46.slice/crio-7cc3d91770c6eea7242476ece5491f7363016c3ed23e80a807a28cb2ef61c60d WatchSource:0}: Error finding container 7cc3d91770c6eea7242476ece5491f7363016c3ed23e80a807a28cb2ef61c60d: Status 404 returned error can't find the container with id 7cc3d91770c6eea7242476ece5491f7363016c3ed23e80a807a28cb2ef61c60d Oct 01 15:17:28 crc kubenswrapper[4869]: W1001 15:17:28.202770 4869 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod4d9e486b_4e16_422f_b594_f6e6bf76c569.slice/crio-6d387d17e4c07878a2591bb41cd647f281dc050a83d1538cc5086339f73022e7 WatchSource:0}: Error finding container 6d387d17e4c07878a2591bb41cd647f281dc050a83d1538cc5086339f73022e7: Status 404 returned error can't find the container with id 6d387d17e4c07878a2591bb41cd647f281dc050a83d1538cc5086339f73022e7 Oct 01 15:17:28 crc kubenswrapper[4869]: E1001 15:17:28.203117 4869 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/test-operator@sha256:f61fdfbfd12027ce6b4e7ad553ec0582f080de0cfb472de6dc04ad3078bb17e3,Command:[/manager],Args:[--health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080 --leader-elect],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-pmtfg,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod test-operator-controller-manager-cbdf6dc66-n8xn2_openstack-operators(a74ba78b-be87-40e4-a1a1-f59a10612f6c): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Oct 01 15:17:28 crc kubenswrapper[4869]: E1001 15:17:28.205195 4869 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/watcher-operator@sha256:09c2f519ea218f6038b7be039b8e6ac33ee93b217b9be0d2d18a5e7f94faae06,Command:[/manager],Args:[--health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080 --leader-elect],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-6vrvv,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod watcher-operator-controller-manager-68d7bc5569-9wzmb_openstack-operators(22cc979e-ec88-4ed2-bed9-fe4e685cae46): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Oct 01 15:17:28 crc kubenswrapper[4869]: I1001 15:17:28.209343 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/octavia-operator-controller-manager-75f8d67d86-42l4j"] Oct 01 15:17:28 crc kubenswrapper[4869]: I1001 15:17:28.212594 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/watcher-operator-controller-manager-68d7bc5569-9wzmb"] Oct 01 15:17:28 crc kubenswrapper[4869]: E1001 15:17:28.214203 4869 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/telemetry-operator@sha256:8fdf377daf05e2fa7346505017078fa81981dd945bf635a64c8022633c68118f,Command:[/manager],Args:[--health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080 --leader-elect],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-vxklr,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod telemetry-operator-controller-manager-cb66d6b59-r69lh_openstack-operators(4d9e486b-4e16-422f-b594-f6e6bf76c569): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Oct 01 15:17:28 crc kubenswrapper[4869]: E1001 15:17:28.214393 4869 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/nova-operator@sha256:a517abc6427ab73fed93b0bd89a6eb52d0311fbfb0c00752f889baf8ffd5068f,Command:[/manager],Args:[--health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080 --leader-elect],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-4fdz7,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod nova-operator-controller-manager-7fd5b6bbc6-5tcmh_openstack-operators(0d140b62-4c85-405b-9eff-8dc02ad9e2ed): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Oct 01 15:17:28 crc kubenswrapper[4869]: E1001 15:17:28.216232 4869 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:operator,Image:quay.io/openstack-k8s-operators/rabbitmq-cluster-operator@sha256:893e66303c1b0bc1d00a299a3f0380bad55c8dc813c8a1c6a4aab379f5aa12a2,Command:[/manager],Args:[],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:metrics,HostPort:0,ContainerPort:9782,Protocol:TCP,HostIP:,},},Env:[]EnvVar{EnvVar{Name:OPERATOR_NAMESPACE,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:metadata.namespace,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{200 -3} {} 200m DecimalSI},memory: {{524288000 0} {} 500Mi BinarySI},},Requests:ResourceList{cpu: {{5 -3} {} 5m DecimalSI},memory: {{67108864 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-7lngm,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000660000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod rabbitmq-cluster-operator-manager-5f97d8c699-6mcvk_openstack-operators(34b12def-835e-430c-9e9a-29f191900a00): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Oct 01 15:17:28 crc kubenswrapper[4869]: E1001 15:17:28.217679 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"operator\" with ErrImagePull: \"pull QPS exceeded\"" pod="openstack-operators/rabbitmq-cluster-operator-manager-5f97d8c699-6mcvk" podUID="34b12def-835e-430c-9e9a-29f191900a00" Oct 01 15:17:28 crc kubenswrapper[4869]: I1001 15:17:28.218331 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/ovn-operator-controller-manager-84c745747f-gv26d"] Oct 01 15:17:28 crc kubenswrapper[4869]: I1001 15:17:28.221761 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/rabbitmq-cluster-operator-manager-5f97d8c699-6mcvk"] Oct 01 15:17:28 crc kubenswrapper[4869]: E1001 15:17:28.229854 4869 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/ovn-operator@sha256:1051afc168038fb814f75e7a5f07c588b295a83ebd143dcd8b46d799e31ad302,Command:[/manager],Args:[--health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080 --leader-elect],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-9k5wp,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod ovn-operator-controller-manager-84c745747f-gv26d_openstack-operators(bcc95d48-0c42-425f-97de-90db5f8d02c8): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Oct 01 15:17:28 crc kubenswrapper[4869]: I1001 15:17:28.467204 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-baremetal-operator-controller-manager-659bb84579nbl75" event={"ID":"14d20f5e-b22a-4c46-8712-f65e973ee387","Type":"ContainerStarted","Data":"c1b6ae0081d3d61d8b8a325e8139e474c881b715b568feea783d4d647dc303b7"} Oct 01 15:17:28 crc kubenswrapper[4869]: I1001 15:17:28.476278 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/swift-operator-controller-manager-689b4f76c9-7fhd4" event={"ID":"a49df948-6460-4d87-82d4-f65bf570cb7b","Type":"ContainerStarted","Data":"4174184dcc4bacc3048ab4e3d6ccd31094afebbb7fd006ac46131249dd6d6b51"} Oct 01 15:17:28 crc kubenswrapper[4869]: I1001 15:17:28.478008 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/nova-operator-controller-manager-7fd5b6bbc6-5tcmh" event={"ID":"0d140b62-4c85-405b-9eff-8dc02ad9e2ed","Type":"ContainerStarted","Data":"319d9c539a97e48f70a93d4cd09f6b56cfa4bce771d054b192eecd2e3c7fea1b"} Oct 01 15:17:28 crc kubenswrapper[4869]: I1001 15:17:28.479889 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/rabbitmq-cluster-operator-manager-5f97d8c699-6mcvk" event={"ID":"34b12def-835e-430c-9e9a-29f191900a00","Type":"ContainerStarted","Data":"186ac27318e12e0ae90897486038817e06426cd4e070f6cccf81f705da72b3ce"} Oct 01 15:17:28 crc kubenswrapper[4869]: E1001 15:17:28.481568 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"operator\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/rabbitmq-cluster-operator@sha256:893e66303c1b0bc1d00a299a3f0380bad55c8dc813c8a1c6a4aab379f5aa12a2\\\"\"" pod="openstack-operators/rabbitmq-cluster-operator-manager-5f97d8c699-6mcvk" podUID="34b12def-835e-430c-9e9a-29f191900a00" Oct 01 15:17:28 crc kubenswrapper[4869]: I1001 15:17:28.482385 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ovn-operator-controller-manager-84c745747f-gv26d" event={"ID":"bcc95d48-0c42-425f-97de-90db5f8d02c8","Type":"ContainerStarted","Data":"c4a90a2f6211aaf83615daae3e14ae513512e6c5f8c48a66f16447e955116e67"} Oct 01 15:17:28 crc kubenswrapper[4869]: I1001 15:17:28.484850 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/manila-operator-controller-manager-b7cf8cb5f-6clgt" event={"ID":"e51d646d-01fe-48e5-af48-29db1e16c849","Type":"ContainerStarted","Data":"210f9c946f02f3e12c08575f39f15f616356cc6989da1aeb8c65b913273097fe"} Oct 01 15:17:28 crc kubenswrapper[4869]: I1001 15:17:28.489545 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/test-operator-controller-manager-cbdf6dc66-n8xn2" event={"ID":"a74ba78b-be87-40e4-a1a1-f59a10612f6c","Type":"ContainerStarted","Data":"a3f6f0ad2a1c0177738beeccd7dab29ff55a89e62e80a70d3efabc3b7f60a976"} Oct 01 15:17:28 crc kubenswrapper[4869]: I1001 15:17:28.500903 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ironic-operator-controller-manager-5f45cd594f-mh4jm" event={"ID":"d7e4e3d7-4b52-46b2-8097-b00b4de3b87a","Type":"ContainerStarted","Data":"eca788667ed621dd05fab1b76dd10596a340c97bcb67eeee4bfd99c9e45a44cc"} Oct 01 15:17:28 crc kubenswrapper[4869]: I1001 15:17:28.502821 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/placement-operator-controller-manager-598c4c8547-lq48f" event={"ID":"c74d5ad2-5385-45ee-af5a-db7c45af2bef","Type":"ContainerStarted","Data":"cad3d1418c08dbc09a3ce4b7a8a1a0f59dffda3475a174e5fe9cca8df98d2753"} Oct 01 15:17:28 crc kubenswrapper[4869]: I1001 15:17:28.503817 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/keystone-operator-controller-manager-59d7dc95cf-mqdq6" event={"ID":"e1f0cffe-e44a-432b-bd66-03be980080b2","Type":"ContainerStarted","Data":"1d9523e3a4e008c004022aa11a4d30f9da150bb24f9d35fd02231e292ca85a87"} Oct 01 15:17:28 crc kubenswrapper[4869]: I1001 15:17:28.506903 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/heat-operator-controller-manager-5b4fc86755-g27k4" event={"ID":"a67caefc-004c-4cd3-92b1-191f9531044a","Type":"ContainerStarted","Data":"cf328fca8f46cfca8ade1801a6d17881cff0349f376da5d60b3a68ed8d688ed8"} Oct 01 15:17:28 crc kubenswrapper[4869]: I1001 15:17:28.509527 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/telemetry-operator-controller-manager-cb66d6b59-r69lh" event={"ID":"4d9e486b-4e16-422f-b594-f6e6bf76c569","Type":"ContainerStarted","Data":"6d387d17e4c07878a2591bb41cd647f281dc050a83d1538cc5086339f73022e7"} Oct 01 15:17:28 crc kubenswrapper[4869]: I1001 15:17:28.511626 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/octavia-operator-controller-manager-75f8d67d86-42l4j" event={"ID":"64c75872-eff8-45af-bf14-88b5896489ee","Type":"ContainerStarted","Data":"eb323868e6f070cda873cea5b26a1821573fd9fa584986e237c49a595e12a535"} Oct 01 15:17:28 crc kubenswrapper[4869]: I1001 15:17:28.513573 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/watcher-operator-controller-manager-68d7bc5569-9wzmb" event={"ID":"22cc979e-ec88-4ed2-bed9-fe4e685cae46","Type":"ContainerStarted","Data":"7cc3d91770c6eea7242476ece5491f7363016c3ed23e80a807a28cb2ef61c60d"} Oct 01 15:17:28 crc kubenswrapper[4869]: I1001 15:17:28.516030 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/designate-operator-controller-manager-77fb7bcf5b-7pl4z" event={"ID":"c7cb96bc-1269-4c0c-b3f3-1575ee10543e","Type":"ContainerStarted","Data":"cb68018d5e4e4956e22f511c4acabcab3f83f922d6f846006a6a06033193bd96"} Oct 01 15:17:28 crc kubenswrapper[4869]: I1001 15:17:28.519124 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/neutron-operator-controller-manager-54fbbfcd44-j6zn8" event={"ID":"cbe9be45-3694-4fe5-ae10-c03fbd176bbc","Type":"ContainerStarted","Data":"05662cf524bf67c69e69e31d2747edebd95bdd75d3879c4f203e2145855d6d75"} Oct 01 15:17:28 crc kubenswrapper[4869]: I1001 15:17:28.520603 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/cinder-operator-controller-manager-859cd486d-s8dwj" event={"ID":"3e866523-b046-49e3-88f5-1c657a204a14","Type":"ContainerStarted","Data":"d0fd0c93b99fce738f0005a872986c67b43ee384a5fd774777217ddf3125c647"} Oct 01 15:17:28 crc kubenswrapper[4869]: I1001 15:17:28.522895 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/mariadb-operator-controller-manager-67bf5bb885-fv6pk" event={"ID":"1ee62730-fdac-40bd-b923-d5544be938e1","Type":"ContainerStarted","Data":"95ecb4ae14d4fd1fa1b301a8258ebee64aa19e3d85fad66ea37368af898fec23"} Oct 01 15:17:28 crc kubenswrapper[4869]: I1001 15:17:28.548546 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/infra-operator-controller-manager-5c8fdc4d5c-pd6wq"] Oct 01 15:17:28 crc kubenswrapper[4869]: W1001 15:17:28.558087 4869 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podccda625c_cf12_415b_9a87_dd77a4c0fa1b.slice/crio-7a8bf1c65f96e0730d6e99e14da45fe6f9842f720c91c54c11c2267bba55e7aa WatchSource:0}: Error finding container 7a8bf1c65f96e0730d6e99e14da45fe6f9842f720c91c54c11c2267bba55e7aa: Status 404 returned error can't find the container with id 7a8bf1c65f96e0730d6e99e14da45fe6f9842f720c91c54c11c2267bba55e7aa Oct 01 15:17:28 crc kubenswrapper[4869]: E1001 15:17:28.578783 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\"" pod="openstack-operators/nova-operator-controller-manager-7fd5b6bbc6-5tcmh" podUID="0d140b62-4c85-405b-9eff-8dc02ad9e2ed" Oct 01 15:17:28 crc kubenswrapper[4869]: E1001 15:17:28.607971 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\"" pod="openstack-operators/watcher-operator-controller-manager-68d7bc5569-9wzmb" podUID="22cc979e-ec88-4ed2-bed9-fe4e685cae46" Oct 01 15:17:28 crc kubenswrapper[4869]: E1001 15:17:28.608127 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\"" pod="openstack-operators/test-operator-controller-manager-cbdf6dc66-n8xn2" podUID="a74ba78b-be87-40e4-a1a1-f59a10612f6c" Oct 01 15:17:28 crc kubenswrapper[4869]: E1001 15:17:28.611119 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\"" pod="openstack-operators/telemetry-operator-controller-manager-cb66d6b59-r69lh" podUID="4d9e486b-4e16-422f-b594-f6e6bf76c569" Oct 01 15:17:28 crc kubenswrapper[4869]: I1001 15:17:28.640488 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/3f500075-2bfe-440a-856c-976d2404158f-cert\") pod \"openstack-operator-controller-manager-6c7b6bcb7c-hnlxr\" (UID: \"3f500075-2bfe-440a-856c-976d2404158f\") " pod="openstack-operators/openstack-operator-controller-manager-6c7b6bcb7c-hnlxr" Oct 01 15:17:28 crc kubenswrapper[4869]: E1001 15:17:28.650984 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\"" pod="openstack-operators/ovn-operator-controller-manager-84c745747f-gv26d" podUID="bcc95d48-0c42-425f-97de-90db5f8d02c8" Oct 01 15:17:28 crc kubenswrapper[4869]: I1001 15:17:28.664707 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/3f500075-2bfe-440a-856c-976d2404158f-cert\") pod \"openstack-operator-controller-manager-6c7b6bcb7c-hnlxr\" (UID: \"3f500075-2bfe-440a-856c-976d2404158f\") " pod="openstack-operators/openstack-operator-controller-manager-6c7b6bcb7c-hnlxr" Oct 01 15:17:28 crc kubenswrapper[4869]: I1001 15:17:28.716884 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-controller-manager-6c7b6bcb7c-hnlxr" Oct 01 15:17:29 crc kubenswrapper[4869]: I1001 15:17:29.204117 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-controller-manager-6c7b6bcb7c-hnlxr"] Oct 01 15:17:29 crc kubenswrapper[4869]: I1001 15:17:29.535522 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/telemetry-operator-controller-manager-cb66d6b59-r69lh" event={"ID":"4d9e486b-4e16-422f-b594-f6e6bf76c569","Type":"ContainerStarted","Data":"2a5a4b3f13d94196c7e17ecf6bb2d31a92c5c7305d1391256ea83ad75cfa8d9b"} Oct 01 15:17:29 crc kubenswrapper[4869]: I1001 15:17:29.539773 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/watcher-operator-controller-manager-68d7bc5569-9wzmb" event={"ID":"22cc979e-ec88-4ed2-bed9-fe4e685cae46","Type":"ContainerStarted","Data":"518e22bb08c7bea2008365e7e12984546925607e6419ff795ecf4c19d6c1efdf"} Oct 01 15:17:29 crc kubenswrapper[4869]: I1001 15:17:29.555673 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/nova-operator-controller-manager-7fd5b6bbc6-5tcmh" event={"ID":"0d140b62-4c85-405b-9eff-8dc02ad9e2ed","Type":"ContainerStarted","Data":"1c53d77b4cad2babd71dfb1abb544bfcc778d8d093a6d0c0b3f94ab409559757"} Oct 01 15:17:29 crc kubenswrapper[4869]: E1001 15:17:29.555669 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/watcher-operator@sha256:09c2f519ea218f6038b7be039b8e6ac33ee93b217b9be0d2d18a5e7f94faae06\\\"\"" pod="openstack-operators/watcher-operator-controller-manager-68d7bc5569-9wzmb" podUID="22cc979e-ec88-4ed2-bed9-fe4e685cae46" Oct 01 15:17:29 crc kubenswrapper[4869]: E1001 15:17:29.556699 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/telemetry-operator@sha256:8fdf377daf05e2fa7346505017078fa81981dd945bf635a64c8022633c68118f\\\"\"" pod="openstack-operators/telemetry-operator-controller-manager-cb66d6b59-r69lh" podUID="4d9e486b-4e16-422f-b594-f6e6bf76c569" Oct 01 15:17:29 crc kubenswrapper[4869]: I1001 15:17:29.560209 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-manager-6c7b6bcb7c-hnlxr" event={"ID":"3f500075-2bfe-440a-856c-976d2404158f","Type":"ContainerStarted","Data":"df874d09befdc3ea98172c776c9018e0ed3056b104461a848689a2caab522dd9"} Oct 01 15:17:29 crc kubenswrapper[4869]: I1001 15:17:29.560244 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-manager-6c7b6bcb7c-hnlxr" event={"ID":"3f500075-2bfe-440a-856c-976d2404158f","Type":"ContainerStarted","Data":"099cfbdaace00150e9d5d26d93fce08463df7a9304f59381d219756b64cfd48f"} Oct 01 15:17:29 crc kubenswrapper[4869]: I1001 15:17:29.569008 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ovn-operator-controller-manager-84c745747f-gv26d" event={"ID":"bcc95d48-0c42-425f-97de-90db5f8d02c8","Type":"ContainerStarted","Data":"7f8fb421486ba62cf87b9507432b824d5ee28ef7eb2fbc5e28da7a384926d79c"} Oct 01 15:17:29 crc kubenswrapper[4869]: E1001 15:17:29.564518 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/nova-operator@sha256:a517abc6427ab73fed93b0bd89a6eb52d0311fbfb0c00752f889baf8ffd5068f\\\"\"" pod="openstack-operators/nova-operator-controller-manager-7fd5b6bbc6-5tcmh" podUID="0d140b62-4c85-405b-9eff-8dc02ad9e2ed" Oct 01 15:17:29 crc kubenswrapper[4869]: E1001 15:17:29.570352 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/ovn-operator@sha256:1051afc168038fb814f75e7a5f07c588b295a83ebd143dcd8b46d799e31ad302\\\"\"" pod="openstack-operators/ovn-operator-controller-manager-84c745747f-gv26d" podUID="bcc95d48-0c42-425f-97de-90db5f8d02c8" Oct 01 15:17:29 crc kubenswrapper[4869]: I1001 15:17:29.572227 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/test-operator-controller-manager-cbdf6dc66-n8xn2" event={"ID":"a74ba78b-be87-40e4-a1a1-f59a10612f6c","Type":"ContainerStarted","Data":"c73c4c225544f48d834cb5c560b25dbd1a12d0dfffe68b2a15940950355683fb"} Oct 01 15:17:29 crc kubenswrapper[4869]: E1001 15:17:29.576058 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/test-operator@sha256:f61fdfbfd12027ce6b4e7ad553ec0582f080de0cfb472de6dc04ad3078bb17e3\\\"\"" pod="openstack-operators/test-operator-controller-manager-cbdf6dc66-n8xn2" podUID="a74ba78b-be87-40e4-a1a1-f59a10612f6c" Oct 01 15:17:29 crc kubenswrapper[4869]: E1001 15:17:29.583823 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"operator\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/rabbitmq-cluster-operator@sha256:893e66303c1b0bc1d00a299a3f0380bad55c8dc813c8a1c6a4aab379f5aa12a2\\\"\"" pod="openstack-operators/rabbitmq-cluster-operator-manager-5f97d8c699-6mcvk" podUID="34b12def-835e-430c-9e9a-29f191900a00" Oct 01 15:17:29 crc kubenswrapper[4869]: I1001 15:17:29.593705 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/infra-operator-controller-manager-5c8fdc4d5c-pd6wq" event={"ID":"ccda625c-cf12-415b-9a87-dd77a4c0fa1b","Type":"ContainerStarted","Data":"7a8bf1c65f96e0730d6e99e14da45fe6f9842f720c91c54c11c2267bba55e7aa"} Oct 01 15:17:30 crc kubenswrapper[4869]: I1001 15:17:30.597119 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-manager-6c7b6bcb7c-hnlxr" event={"ID":"3f500075-2bfe-440a-856c-976d2404158f","Type":"ContainerStarted","Data":"63416cffec9bc3f56eb31790afdb8b0f94ca99d9ba6674aa69d960ac8307c494"} Oct 01 15:17:30 crc kubenswrapper[4869]: I1001 15:17:30.598292 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/openstack-operator-controller-manager-6c7b6bcb7c-hnlxr" Oct 01 15:17:30 crc kubenswrapper[4869]: E1001 15:17:30.598654 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/watcher-operator@sha256:09c2f519ea218f6038b7be039b8e6ac33ee93b217b9be0d2d18a5e7f94faae06\\\"\"" pod="openstack-operators/watcher-operator-controller-manager-68d7bc5569-9wzmb" podUID="22cc979e-ec88-4ed2-bed9-fe4e685cae46" Oct 01 15:17:30 crc kubenswrapper[4869]: E1001 15:17:30.598765 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/ovn-operator@sha256:1051afc168038fb814f75e7a5f07c588b295a83ebd143dcd8b46d799e31ad302\\\"\"" pod="openstack-operators/ovn-operator-controller-manager-84c745747f-gv26d" podUID="bcc95d48-0c42-425f-97de-90db5f8d02c8" Oct 01 15:17:30 crc kubenswrapper[4869]: E1001 15:17:30.598841 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/test-operator@sha256:f61fdfbfd12027ce6b4e7ad553ec0582f080de0cfb472de6dc04ad3078bb17e3\\\"\"" pod="openstack-operators/test-operator-controller-manager-cbdf6dc66-n8xn2" podUID="a74ba78b-be87-40e4-a1a1-f59a10612f6c" Oct 01 15:17:30 crc kubenswrapper[4869]: E1001 15:17:30.599121 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/telemetry-operator@sha256:8fdf377daf05e2fa7346505017078fa81981dd945bf635a64c8022633c68118f\\\"\"" pod="openstack-operators/telemetry-operator-controller-manager-cb66d6b59-r69lh" podUID="4d9e486b-4e16-422f-b594-f6e6bf76c569" Oct 01 15:17:30 crc kubenswrapper[4869]: E1001 15:17:30.600052 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/nova-operator@sha256:a517abc6427ab73fed93b0bd89a6eb52d0311fbfb0c00752f889baf8ffd5068f\\\"\"" pod="openstack-operators/nova-operator-controller-manager-7fd5b6bbc6-5tcmh" podUID="0d140b62-4c85-405b-9eff-8dc02ad9e2ed" Oct 01 15:17:30 crc kubenswrapper[4869]: I1001 15:17:30.667961 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-operator-controller-manager-6c7b6bcb7c-hnlxr" podStartSLOduration=4.667942741 podStartE2EDuration="4.667942741s" podCreationTimestamp="2025-10-01 15:17:26 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-01 15:17:30.664302659 +0000 UTC m=+759.811145765" watchObservedRunningTime="2025-10-01 15:17:30.667942741 +0000 UTC m=+759.814785857" Oct 01 15:17:38 crc kubenswrapper[4869]: I1001 15:17:38.678512 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/keystone-operator-controller-manager-59d7dc95cf-mqdq6" event={"ID":"e1f0cffe-e44a-432b-bd66-03be980080b2","Type":"ContainerStarted","Data":"ab89d08b756eb0c90fb0948f984c2a8459bbcb8f5dc0ed6c8f31c9fd40b3a51c"} Oct 01 15:17:38 crc kubenswrapper[4869]: I1001 15:17:38.692532 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/horizon-operator-controller-manager-679b4759bb-mrrm6" event={"ID":"1f43a837-ea14-4bdb-9b91-ffbd20f1bad3","Type":"ContainerStarted","Data":"c7db617b0884b83dba82160b24cef185c91b24a932bfca3dc124e3288d1f8607"} Oct 01 15:17:38 crc kubenswrapper[4869]: I1001 15:17:38.711784 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/mariadb-operator-controller-manager-67bf5bb885-fv6pk" event={"ID":"1ee62730-fdac-40bd-b923-d5544be938e1","Type":"ContainerStarted","Data":"d0e17e291a78a869b2c9341debab04a1c759eb0e9e15780c52dc979063750c2a"} Oct 01 15:17:38 crc kubenswrapper[4869]: I1001 15:17:38.711828 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/mariadb-operator-controller-manager-67bf5bb885-fv6pk" event={"ID":"1ee62730-fdac-40bd-b923-d5544be938e1","Type":"ContainerStarted","Data":"c0c0024ea40cf57e229e458c2b624b7b1af17ae07984dc6d457d5829c431492b"} Oct 01 15:17:38 crc kubenswrapper[4869]: I1001 15:17:38.711955 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/mariadb-operator-controller-manager-67bf5bb885-fv6pk" Oct 01 15:17:38 crc kubenswrapper[4869]: I1001 15:17:38.719663 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/heat-operator-controller-manager-5b4fc86755-g27k4" event={"ID":"a67caefc-004c-4cd3-92b1-191f9531044a","Type":"ContainerStarted","Data":"22327d6a0353b10a1673ca1b9fa69b4d1fcb13519d531a752e5771bfff2a85bf"} Oct 01 15:17:38 crc kubenswrapper[4869]: I1001 15:17:38.725824 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/openstack-operator-controller-manager-6c7b6bcb7c-hnlxr" Oct 01 15:17:38 crc kubenswrapper[4869]: I1001 15:17:38.726176 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/infra-operator-controller-manager-5c8fdc4d5c-pd6wq" event={"ID":"ccda625c-cf12-415b-9a87-dd77a4c0fa1b","Type":"ContainerStarted","Data":"7eed9eee134f9b7eee479699951af93e7a53bee31a95c36b09e08614162bf50f"} Oct 01 15:17:38 crc kubenswrapper[4869]: I1001 15:17:38.746561 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/designate-operator-controller-manager-77fb7bcf5b-7pl4z" event={"ID":"c7cb96bc-1269-4c0c-b3f3-1575ee10543e","Type":"ContainerStarted","Data":"36209ea7e6a480d4374d3b73d5ae9b47471994369a71f11e673755735a29e789"} Oct 01 15:17:38 crc kubenswrapper[4869]: I1001 15:17:38.749005 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ironic-operator-controller-manager-5f45cd594f-mh4jm" event={"ID":"d7e4e3d7-4b52-46b2-8097-b00b4de3b87a","Type":"ContainerStarted","Data":"fd93515d73ef48ff79a0f4a969c0df5a51e5ccc491f5d7894a203908fdf2a5b2"} Oct 01 15:17:38 crc kubenswrapper[4869]: I1001 15:17:38.767670 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/mariadb-operator-controller-manager-67bf5bb885-fv6pk" podStartSLOduration=2.666246364 podStartE2EDuration="12.767651515s" podCreationTimestamp="2025-10-01 15:17:26 +0000 UTC" firstStartedPulling="2025-10-01 15:17:27.745281463 +0000 UTC m=+756.892124579" lastFinishedPulling="2025-10-01 15:17:37.846686604 +0000 UTC m=+766.993529730" observedRunningTime="2025-10-01 15:17:38.748184853 +0000 UTC m=+767.895027969" watchObservedRunningTime="2025-10-01 15:17:38.767651515 +0000 UTC m=+767.914494631" Oct 01 15:17:38 crc kubenswrapper[4869]: I1001 15:17:38.808496 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-baremetal-operator-controller-manager-659bb84579nbl75" event={"ID":"14d20f5e-b22a-4c46-8712-f65e973ee387","Type":"ContainerStarted","Data":"11de6a42ac0f44c250e814e5a0a5351d19f60c7d0ce4e581c9d7f546bedb4a47"} Oct 01 15:17:38 crc kubenswrapper[4869]: I1001 15:17:38.847391 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/barbican-operator-controller-manager-f7f98cb69-79zfb" event={"ID":"62fc6158-1408-44bd-891b-ef7ead1f5867","Type":"ContainerStarted","Data":"86433e8fe16771b934652ebbb115f913ef8bf3b37fa1bc67fbb6e789520c047c"} Oct 01 15:17:38 crc kubenswrapper[4869]: I1001 15:17:38.847449 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/barbican-operator-controller-manager-f7f98cb69-79zfb" event={"ID":"62fc6158-1408-44bd-891b-ef7ead1f5867","Type":"ContainerStarted","Data":"aa5114ba5774285c0abd472ca5fb7de8236a8c93352afc9a5e96a103220099d9"} Oct 01 15:17:38 crc kubenswrapper[4869]: I1001 15:17:38.848381 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/barbican-operator-controller-manager-f7f98cb69-79zfb" Oct 01 15:17:38 crc kubenswrapper[4869]: I1001 15:17:38.860509 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/manila-operator-controller-manager-b7cf8cb5f-6clgt" event={"ID":"e51d646d-01fe-48e5-af48-29db1e16c849","Type":"ContainerStarted","Data":"14be6bb1dcea7461b584fcb89f976d9ec019450786ff1bc99a1d90bb5d955314"} Oct 01 15:17:38 crc kubenswrapper[4869]: I1001 15:17:38.879486 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/glance-operator-controller-manager-8bc4775b5-wmx8b" event={"ID":"ee8b5119-5b8d-494f-8864-8f0cf2a10631","Type":"ContainerStarted","Data":"b3c6cb611298e4ffc69b49bdc9d6154911354a6680ad8a0ce35028fa0b154f54"} Oct 01 15:17:38 crc kubenswrapper[4869]: I1001 15:17:38.883198 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/barbican-operator-controller-manager-f7f98cb69-79zfb" podStartSLOduration=3.214894415 podStartE2EDuration="13.883182703s" podCreationTimestamp="2025-10-01 15:17:25 +0000 UTC" firstStartedPulling="2025-10-01 15:17:27.132994789 +0000 UTC m=+756.279837905" lastFinishedPulling="2025-10-01 15:17:37.801283077 +0000 UTC m=+766.948126193" observedRunningTime="2025-10-01 15:17:38.879669964 +0000 UTC m=+768.026513100" watchObservedRunningTime="2025-10-01 15:17:38.883182703 +0000 UTC m=+768.030025819" Oct 01 15:17:38 crc kubenswrapper[4869]: I1001 15:17:38.889697 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/cinder-operator-controller-manager-859cd486d-s8dwj" event={"ID":"3e866523-b046-49e3-88f5-1c657a204a14","Type":"ContainerStarted","Data":"7c504adaad9b6df0a5135580c81657a38a418756093d7aad81bf9af9b92d2067"} Oct 01 15:17:38 crc kubenswrapper[4869]: I1001 15:17:38.903154 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/placement-operator-controller-manager-598c4c8547-lq48f" event={"ID":"c74d5ad2-5385-45ee-af5a-db7c45af2bef","Type":"ContainerStarted","Data":"1ba7de3f4a826c242f1388fef918789a6677e06d963afa5bcfd77ccaf917c388"} Oct 01 15:17:38 crc kubenswrapper[4869]: I1001 15:17:38.921525 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/octavia-operator-controller-manager-75f8d67d86-42l4j" event={"ID":"64c75872-eff8-45af-bf14-88b5896489ee","Type":"ContainerStarted","Data":"17828a70bb8b01a05099ac517d779e94f336a2d006302fb66fbb014a5066678c"} Oct 01 15:17:38 crc kubenswrapper[4869]: I1001 15:17:38.933223 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/neutron-operator-controller-manager-54fbbfcd44-j6zn8" event={"ID":"cbe9be45-3694-4fe5-ae10-c03fbd176bbc","Type":"ContainerStarted","Data":"7260a92a66f8539310eaefc29357c847772b695538a00dde8fce06dfa591398d"} Oct 01 15:17:38 crc kubenswrapper[4869]: I1001 15:17:38.939025 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/swift-operator-controller-manager-689b4f76c9-7fhd4" event={"ID":"a49df948-6460-4d87-82d4-f65bf570cb7b","Type":"ContainerStarted","Data":"764e3e222248ffaaec8636ae53c4a010ad6453795cdb25d4e42dce9bb37c659a"} Oct 01 15:17:39 crc kubenswrapper[4869]: I1001 15:17:39.945537 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/octavia-operator-controller-manager-75f8d67d86-42l4j" event={"ID":"64c75872-eff8-45af-bf14-88b5896489ee","Type":"ContainerStarted","Data":"abb3098f235d0b625417f799cdcc7d8cfb7a2b7508f6c3bc74e31c4102e8ea01"} Oct 01 15:17:39 crc kubenswrapper[4869]: I1001 15:17:39.946234 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/octavia-operator-controller-manager-75f8d67d86-42l4j" Oct 01 15:17:39 crc kubenswrapper[4869]: I1001 15:17:39.947815 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/neutron-operator-controller-manager-54fbbfcd44-j6zn8" event={"ID":"cbe9be45-3694-4fe5-ae10-c03fbd176bbc","Type":"ContainerStarted","Data":"742c0e3ae827412c57fdbf9cbbe21bc15fb63aee829188f18a2d7c6bc75df967"} Oct 01 15:17:39 crc kubenswrapper[4869]: I1001 15:17:39.948141 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/neutron-operator-controller-manager-54fbbfcd44-j6zn8" Oct 01 15:17:39 crc kubenswrapper[4869]: I1001 15:17:39.949974 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/horizon-operator-controller-manager-679b4759bb-mrrm6" event={"ID":"1f43a837-ea14-4bdb-9b91-ffbd20f1bad3","Type":"ContainerStarted","Data":"2507224a82022c328b05d0405d9cf59a9a8db1283666608f8de90fbfd5d49505"} Oct 01 15:17:39 crc kubenswrapper[4869]: I1001 15:17:39.950047 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/horizon-operator-controller-manager-679b4759bb-mrrm6" Oct 01 15:17:39 crc kubenswrapper[4869]: I1001 15:17:39.951765 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/glance-operator-controller-manager-8bc4775b5-wmx8b" event={"ID":"ee8b5119-5b8d-494f-8864-8f0cf2a10631","Type":"ContainerStarted","Data":"07f22b5c9f9edbd64a1b3af98275dab68cc49dea1fa9ad918b7a850ca5b15a16"} Oct 01 15:17:39 crc kubenswrapper[4869]: I1001 15:17:39.951823 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/glance-operator-controller-manager-8bc4775b5-wmx8b" Oct 01 15:17:39 crc kubenswrapper[4869]: I1001 15:17:39.952881 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/placement-operator-controller-manager-598c4c8547-lq48f" event={"ID":"c74d5ad2-5385-45ee-af5a-db7c45af2bef","Type":"ContainerStarted","Data":"e27ce73a677ce832f9cbd6c035fbaf8963d06451e093eb439aa7c74b6c5d3cca"} Oct 01 15:17:39 crc kubenswrapper[4869]: I1001 15:17:39.953027 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/placement-operator-controller-manager-598c4c8547-lq48f" Oct 01 15:17:39 crc kubenswrapper[4869]: I1001 15:17:39.954607 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/designate-operator-controller-manager-77fb7bcf5b-7pl4z" event={"ID":"c7cb96bc-1269-4c0c-b3f3-1575ee10543e","Type":"ContainerStarted","Data":"5c89296bddf5c4587fec234711d608a86950d528fac7c19e0274cf09ac2b7cff"} Oct 01 15:17:39 crc kubenswrapper[4869]: I1001 15:17:39.954722 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/designate-operator-controller-manager-77fb7bcf5b-7pl4z" Oct 01 15:17:39 crc kubenswrapper[4869]: I1001 15:17:39.956066 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ironic-operator-controller-manager-5f45cd594f-mh4jm" event={"ID":"d7e4e3d7-4b52-46b2-8097-b00b4de3b87a","Type":"ContainerStarted","Data":"7815a3fe4c9bf4dce58965d99c37e34642b5d251c66d213be75b0acfe583ca0d"} Oct 01 15:17:39 crc kubenswrapper[4869]: I1001 15:17:39.956154 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/ironic-operator-controller-manager-5f45cd594f-mh4jm" Oct 01 15:17:39 crc kubenswrapper[4869]: I1001 15:17:39.958099 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/cinder-operator-controller-manager-859cd486d-s8dwj" event={"ID":"3e866523-b046-49e3-88f5-1c657a204a14","Type":"ContainerStarted","Data":"0463fae889641ad4b7e1208bde4b47ffe02b909191a458767fcbe7aa387d62aa"} Oct 01 15:17:39 crc kubenswrapper[4869]: I1001 15:17:39.958189 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/cinder-operator-controller-manager-859cd486d-s8dwj" Oct 01 15:17:39 crc kubenswrapper[4869]: I1001 15:17:39.959559 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/manila-operator-controller-manager-b7cf8cb5f-6clgt" event={"ID":"e51d646d-01fe-48e5-af48-29db1e16c849","Type":"ContainerStarted","Data":"5763074a925653e55a426def42954defb06a1fe24188b3548be80fa37f06caca"} Oct 01 15:17:39 crc kubenswrapper[4869]: I1001 15:17:39.959677 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/manila-operator-controller-manager-b7cf8cb5f-6clgt" Oct 01 15:17:39 crc kubenswrapper[4869]: I1001 15:17:39.960949 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/heat-operator-controller-manager-5b4fc86755-g27k4" event={"ID":"a67caefc-004c-4cd3-92b1-191f9531044a","Type":"ContainerStarted","Data":"a4ee628376733760e629c9bfbd96f6b64f399d8fa3973f7ca3d79a2b87a80e08"} Oct 01 15:17:39 crc kubenswrapper[4869]: I1001 15:17:39.961069 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/heat-operator-controller-manager-5b4fc86755-g27k4" Oct 01 15:17:39 crc kubenswrapper[4869]: I1001 15:17:39.962341 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/infra-operator-controller-manager-5c8fdc4d5c-pd6wq" event={"ID":"ccda625c-cf12-415b-9a87-dd77a4c0fa1b","Type":"ContainerStarted","Data":"b7905adb03ecfee6e806d393a7c2bc54d3d7a8123ae25de29e64ae74ae2acaec"} Oct 01 15:17:39 crc kubenswrapper[4869]: I1001 15:17:39.962511 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/infra-operator-controller-manager-5c8fdc4d5c-pd6wq" Oct 01 15:17:39 crc kubenswrapper[4869]: I1001 15:17:39.964148 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/swift-operator-controller-manager-689b4f76c9-7fhd4" event={"ID":"a49df948-6460-4d87-82d4-f65bf570cb7b","Type":"ContainerStarted","Data":"fa8bbaf9fe23cbd94ca09031ddd47e4c0a2d5ac82cbad40740818f903816cbdc"} Oct 01 15:17:39 crc kubenswrapper[4869]: I1001 15:17:39.964232 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/swift-operator-controller-manager-689b4f76c9-7fhd4" Oct 01 15:17:39 crc kubenswrapper[4869]: I1001 15:17:39.965467 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/keystone-operator-controller-manager-59d7dc95cf-mqdq6" event={"ID":"e1f0cffe-e44a-432b-bd66-03be980080b2","Type":"ContainerStarted","Data":"1a55c0211a137ab448f78f0e86ea00f5bd00a88477ca34ceb5327168aa3c15a3"} Oct 01 15:17:39 crc kubenswrapper[4869]: I1001 15:17:39.965504 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/keystone-operator-controller-manager-59d7dc95cf-mqdq6" Oct 01 15:17:39 crc kubenswrapper[4869]: I1001 15:17:39.967061 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-baremetal-operator-controller-manager-659bb84579nbl75" event={"ID":"14d20f5e-b22a-4c46-8712-f65e973ee387","Type":"ContainerStarted","Data":"ff0a66516dff22c90a07b83af0391a9b6406171482c75642e21bc10c0f774a53"} Oct 01 15:17:39 crc kubenswrapper[4869]: I1001 15:17:39.978812 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/octavia-operator-controller-manager-75f8d67d86-42l4j" podStartSLOduration=4.288009384 podStartE2EDuration="13.978794904s" podCreationTimestamp="2025-10-01 15:17:26 +0000 UTC" firstStartedPulling="2025-10-01 15:17:28.153008251 +0000 UTC m=+757.299851367" lastFinishedPulling="2025-10-01 15:17:37.843793771 +0000 UTC m=+766.990636887" observedRunningTime="2025-10-01 15:17:39.973611203 +0000 UTC m=+769.120454309" watchObservedRunningTime="2025-10-01 15:17:39.978794904 +0000 UTC m=+769.125638020" Oct 01 15:17:40 crc kubenswrapper[4869]: I1001 15:17:40.003488 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/ironic-operator-controller-manager-5f45cd594f-mh4jm" podStartSLOduration=3.742418244 podStartE2EDuration="14.003471367s" podCreationTimestamp="2025-10-01 15:17:26 +0000 UTC" firstStartedPulling="2025-10-01 15:17:27.550863103 +0000 UTC m=+756.697706219" lastFinishedPulling="2025-10-01 15:17:37.811916226 +0000 UTC m=+766.958759342" observedRunningTime="2025-10-01 15:17:39.999541318 +0000 UTC m=+769.146384454" watchObservedRunningTime="2025-10-01 15:17:40.003471367 +0000 UTC m=+769.150314483" Oct 01 15:17:40 crc kubenswrapper[4869]: I1001 15:17:40.027240 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/heat-operator-controller-manager-5b4fc86755-g27k4" podStartSLOduration=3.922118403 podStartE2EDuration="14.027224357s" podCreationTimestamp="2025-10-01 15:17:26 +0000 UTC" firstStartedPulling="2025-10-01 15:17:27.743045147 +0000 UTC m=+756.889888263" lastFinishedPulling="2025-10-01 15:17:37.848151111 +0000 UTC m=+766.994994217" observedRunningTime="2025-10-01 15:17:40.024919479 +0000 UTC m=+769.171762595" watchObservedRunningTime="2025-10-01 15:17:40.027224357 +0000 UTC m=+769.174067473" Oct 01 15:17:40 crc kubenswrapper[4869]: I1001 15:17:40.044484 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/neutron-operator-controller-manager-54fbbfcd44-j6zn8" podStartSLOduration=4.375104265 podStartE2EDuration="14.044470273s" podCreationTimestamp="2025-10-01 15:17:26 +0000 UTC" firstStartedPulling="2025-10-01 15:17:28.167709533 +0000 UTC m=+757.314552649" lastFinishedPulling="2025-10-01 15:17:37.837075541 +0000 UTC m=+766.983918657" observedRunningTime="2025-10-01 15:17:40.040346779 +0000 UTC m=+769.187189895" watchObservedRunningTime="2025-10-01 15:17:40.044470273 +0000 UTC m=+769.191313379" Oct 01 15:17:40 crc kubenswrapper[4869]: I1001 15:17:40.059552 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/cinder-operator-controller-manager-859cd486d-s8dwj" podStartSLOduration=4.966113134 podStartE2EDuration="15.059533373s" podCreationTimestamp="2025-10-01 15:17:25 +0000 UTC" firstStartedPulling="2025-10-01 15:17:27.728454878 +0000 UTC m=+756.875297984" lastFinishedPulling="2025-10-01 15:17:37.821875107 +0000 UTC m=+766.968718223" observedRunningTime="2025-10-01 15:17:40.059192975 +0000 UTC m=+769.206036091" watchObservedRunningTime="2025-10-01 15:17:40.059533373 +0000 UTC m=+769.206376489" Oct 01 15:17:40 crc kubenswrapper[4869]: I1001 15:17:40.079757 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/placement-operator-controller-manager-598c4c8547-lq48f" podStartSLOduration=4.393227631 podStartE2EDuration="14.079739333s" podCreationTimestamp="2025-10-01 15:17:26 +0000 UTC" firstStartedPulling="2025-10-01 15:17:28.165780524 +0000 UTC m=+757.312623640" lastFinishedPulling="2025-10-01 15:17:37.852292216 +0000 UTC m=+766.999135342" observedRunningTime="2025-10-01 15:17:40.07720494 +0000 UTC m=+769.224048066" watchObservedRunningTime="2025-10-01 15:17:40.079739333 +0000 UTC m=+769.226582449" Oct 01 15:17:40 crc kubenswrapper[4869]: I1001 15:17:40.114468 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-baremetal-operator-controller-manager-659bb84579nbl75" podStartSLOduration=4.391224251 podStartE2EDuration="14.11445295s" podCreationTimestamp="2025-10-01 15:17:26 +0000 UTC" firstStartedPulling="2025-10-01 15:17:28.119190367 +0000 UTC m=+757.266033483" lastFinishedPulling="2025-10-01 15:17:37.842419056 +0000 UTC m=+766.989262182" observedRunningTime="2025-10-01 15:17:40.110809398 +0000 UTC m=+769.257652514" watchObservedRunningTime="2025-10-01 15:17:40.11445295 +0000 UTC m=+769.261296066" Oct 01 15:17:40 crc kubenswrapper[4869]: I1001 15:17:40.126845 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/keystone-operator-controller-manager-59d7dc95cf-mqdq6" podStartSLOduration=3.965628332 podStartE2EDuration="14.126836113s" podCreationTimestamp="2025-10-01 15:17:26 +0000 UTC" firstStartedPulling="2025-10-01 15:17:27.701523708 +0000 UTC m=+756.848366814" lastFinishedPulling="2025-10-01 15:17:37.862731479 +0000 UTC m=+767.009574595" observedRunningTime="2025-10-01 15:17:40.126120995 +0000 UTC m=+769.272964111" watchObservedRunningTime="2025-10-01 15:17:40.126836113 +0000 UTC m=+769.273679229" Oct 01 15:17:40 crc kubenswrapper[4869]: I1001 15:17:40.140389 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/manila-operator-controller-manager-b7cf8cb5f-6clgt" podStartSLOduration=4.05460764 podStartE2EDuration="14.140365685s" podCreationTimestamp="2025-10-01 15:17:26 +0000 UTC" firstStartedPulling="2025-10-01 15:17:27.736195154 +0000 UTC m=+756.883038270" lastFinishedPulling="2025-10-01 15:17:37.821953199 +0000 UTC m=+766.968796315" observedRunningTime="2025-10-01 15:17:40.139305528 +0000 UTC m=+769.286148644" watchObservedRunningTime="2025-10-01 15:17:40.140365685 +0000 UTC m=+769.287208811" Oct 01 15:17:40 crc kubenswrapper[4869]: I1001 15:17:40.158530 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/horizon-operator-controller-manager-679b4759bb-mrrm6" podStartSLOduration=3.723822155 podStartE2EDuration="14.158513853s" podCreationTimestamp="2025-10-01 15:17:26 +0000 UTC" firstStartedPulling="2025-10-01 15:17:27.412003166 +0000 UTC m=+756.558846282" lastFinishedPulling="2025-10-01 15:17:37.846694844 +0000 UTC m=+766.993537980" observedRunningTime="2025-10-01 15:17:40.153562718 +0000 UTC m=+769.300405854" watchObservedRunningTime="2025-10-01 15:17:40.158513853 +0000 UTC m=+769.305356969" Oct 01 15:17:40 crc kubenswrapper[4869]: I1001 15:17:40.169205 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/infra-operator-controller-manager-5c8fdc4d5c-pd6wq" podStartSLOduration=4.893380073 podStartE2EDuration="14.169188323s" podCreationTimestamp="2025-10-01 15:17:26 +0000 UTC" firstStartedPulling="2025-10-01 15:17:28.559937618 +0000 UTC m=+757.706780724" lastFinishedPulling="2025-10-01 15:17:37.835745858 +0000 UTC m=+766.982588974" observedRunningTime="2025-10-01 15:17:40.168212978 +0000 UTC m=+769.315056114" watchObservedRunningTime="2025-10-01 15:17:40.169188323 +0000 UTC m=+769.316031429" Oct 01 15:17:40 crc kubenswrapper[4869]: I1001 15:17:40.189422 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/designate-operator-controller-manager-77fb7bcf5b-7pl4z" podStartSLOduration=4.897227484 podStartE2EDuration="15.189396333s" podCreationTimestamp="2025-10-01 15:17:25 +0000 UTC" firstStartedPulling="2025-10-01 15:17:27.550803161 +0000 UTC m=+756.697646277" lastFinishedPulling="2025-10-01 15:17:37.84297202 +0000 UTC m=+766.989815126" observedRunningTime="2025-10-01 15:17:40.187885475 +0000 UTC m=+769.334728611" watchObservedRunningTime="2025-10-01 15:17:40.189396333 +0000 UTC m=+769.336239449" Oct 01 15:17:40 crc kubenswrapper[4869]: I1001 15:17:40.205998 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/glance-operator-controller-manager-8bc4775b5-wmx8b" podStartSLOduration=3.6098516959999998 podStartE2EDuration="14.205981182s" podCreationTimestamp="2025-10-01 15:17:26 +0000 UTC" firstStartedPulling="2025-10-01 15:17:27.240428712 +0000 UTC m=+756.387271828" lastFinishedPulling="2025-10-01 15:17:37.836558198 +0000 UTC m=+766.983401314" observedRunningTime="2025-10-01 15:17:40.202319309 +0000 UTC m=+769.349162435" watchObservedRunningTime="2025-10-01 15:17:40.205981182 +0000 UTC m=+769.352824298" Oct 01 15:17:40 crc kubenswrapper[4869]: I1001 15:17:40.219596 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/swift-operator-controller-manager-689b4f76c9-7fhd4" podStartSLOduration=4.536237924 podStartE2EDuration="14.219577435s" podCreationTimestamp="2025-10-01 15:17:26 +0000 UTC" firstStartedPulling="2025-10-01 15:17:28.167543059 +0000 UTC m=+757.314386175" lastFinishedPulling="2025-10-01 15:17:37.85088256 +0000 UTC m=+766.997725686" observedRunningTime="2025-10-01 15:17:40.21578795 +0000 UTC m=+769.362631066" watchObservedRunningTime="2025-10-01 15:17:40.219577435 +0000 UTC m=+769.366420551" Oct 01 15:17:40 crc kubenswrapper[4869]: I1001 15:17:40.979643 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/openstack-baremetal-operator-controller-manager-659bb84579nbl75" Oct 01 15:17:46 crc kubenswrapper[4869]: I1001 15:17:46.323056 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/barbican-operator-controller-manager-f7f98cb69-79zfb" Oct 01 15:17:46 crc kubenswrapper[4869]: I1001 15:17:46.384285 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/glance-operator-controller-manager-8bc4775b5-wmx8b" Oct 01 15:17:46 crc kubenswrapper[4869]: I1001 15:17:46.491626 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/horizon-operator-controller-manager-679b4759bb-mrrm6" Oct 01 15:17:46 crc kubenswrapper[4869]: I1001 15:17:46.561972 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/ironic-operator-controller-manager-5f45cd594f-mh4jm" Oct 01 15:17:46 crc kubenswrapper[4869]: I1001 15:17:46.636823 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/cinder-operator-controller-manager-859cd486d-s8dwj" Oct 01 15:17:46 crc kubenswrapper[4869]: I1001 15:17:46.673655 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/designate-operator-controller-manager-77fb7bcf5b-7pl4z" Oct 01 15:17:46 crc kubenswrapper[4869]: I1001 15:17:46.722237 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/heat-operator-controller-manager-5b4fc86755-g27k4" Oct 01 15:17:46 crc kubenswrapper[4869]: I1001 15:17:46.737390 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/manila-operator-controller-manager-b7cf8cb5f-6clgt" Oct 01 15:17:46 crc kubenswrapper[4869]: I1001 15:17:46.773732 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/mariadb-operator-controller-manager-67bf5bb885-fv6pk" Oct 01 15:17:46 crc kubenswrapper[4869]: I1001 15:17:46.805965 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/neutron-operator-controller-manager-54fbbfcd44-j6zn8" Oct 01 15:17:46 crc kubenswrapper[4869]: I1001 15:17:46.848543 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/keystone-operator-controller-manager-59d7dc95cf-mqdq6" Oct 01 15:17:46 crc kubenswrapper[4869]: I1001 15:17:46.939727 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/octavia-operator-controller-manager-75f8d67d86-42l4j" Oct 01 15:17:47 crc kubenswrapper[4869]: I1001 15:17:47.010334 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/placement-operator-controller-manager-598c4c8547-lq48f" Oct 01 15:17:47 crc kubenswrapper[4869]: I1001 15:17:47.090694 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/swift-operator-controller-manager-689b4f76c9-7fhd4" Oct 01 15:17:47 crc kubenswrapper[4869]: I1001 15:17:47.277252 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/openstack-baremetal-operator-controller-manager-659bb84579nbl75" Oct 01 15:17:48 crc kubenswrapper[4869]: I1001 15:17:48.033828 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/infra-operator-controller-manager-5c8fdc4d5c-pd6wq" Oct 01 15:17:51 crc kubenswrapper[4869]: I1001 15:17:51.085949 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/test-operator-controller-manager-cbdf6dc66-n8xn2" event={"ID":"a74ba78b-be87-40e4-a1a1-f59a10612f6c","Type":"ContainerStarted","Data":"e2593693f67975a92dcd05606c0ffeb91344e76a150930ad2763aeda80e3eae0"} Oct 01 15:17:51 crc kubenswrapper[4869]: I1001 15:17:51.087467 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/test-operator-controller-manager-cbdf6dc66-n8xn2" Oct 01 15:17:51 crc kubenswrapper[4869]: I1001 15:17:51.092356 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/telemetry-operator-controller-manager-cb66d6b59-r69lh" event={"ID":"4d9e486b-4e16-422f-b594-f6e6bf76c569","Type":"ContainerStarted","Data":"36053253a6bf9786c335bbd1a70b80ac58c686857bbff9c6f9b836c7617859c2"} Oct 01 15:17:51 crc kubenswrapper[4869]: I1001 15:17:51.092697 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/telemetry-operator-controller-manager-cb66d6b59-r69lh" Oct 01 15:17:51 crc kubenswrapper[4869]: I1001 15:17:51.094626 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/watcher-operator-controller-manager-68d7bc5569-9wzmb" event={"ID":"22cc979e-ec88-4ed2-bed9-fe4e685cae46","Type":"ContainerStarted","Data":"9151fc1628a03895acf9553a2b46e81e0c2d835735f78a0bfb5a9cb837079eb6"} Oct 01 15:17:51 crc kubenswrapper[4869]: I1001 15:17:51.095250 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/watcher-operator-controller-manager-68d7bc5569-9wzmb" Oct 01 15:17:51 crc kubenswrapper[4869]: I1001 15:17:51.100880 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/nova-operator-controller-manager-7fd5b6bbc6-5tcmh" event={"ID":"0d140b62-4c85-405b-9eff-8dc02ad9e2ed","Type":"ContainerStarted","Data":"d55efa28114ee2abf7c3a8adc0bd8519abe9ac0dcb6f7b8a9567fef440d6c801"} Oct 01 15:17:51 crc kubenswrapper[4869]: I1001 15:17:51.101095 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/nova-operator-controller-manager-7fd5b6bbc6-5tcmh" Oct 01 15:17:51 crc kubenswrapper[4869]: I1001 15:17:51.102281 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/rabbitmq-cluster-operator-manager-5f97d8c699-6mcvk" event={"ID":"34b12def-835e-430c-9e9a-29f191900a00","Type":"ContainerStarted","Data":"ab7b3b411fb17056d881772fc291555923bb885bfd661c13987405f24e357cec"} Oct 01 15:17:51 crc kubenswrapper[4869]: I1001 15:17:51.104188 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ovn-operator-controller-manager-84c745747f-gv26d" event={"ID":"bcc95d48-0c42-425f-97de-90db5f8d02c8","Type":"ContainerStarted","Data":"db06ce60ec9644d15ecb87c66274a2d6f374f92e497c5d34f1b1ecc5f216b0a2"} Oct 01 15:17:51 crc kubenswrapper[4869]: I1001 15:17:51.104621 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/ovn-operator-controller-manager-84c745747f-gv26d" Oct 01 15:17:51 crc kubenswrapper[4869]: I1001 15:17:51.145214 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/test-operator-controller-manager-cbdf6dc66-n8xn2" podStartSLOduration=2.908340158 podStartE2EDuration="25.145201003s" podCreationTimestamp="2025-10-01 15:17:26 +0000 UTC" firstStartedPulling="2025-10-01 15:17:28.202730206 +0000 UTC m=+757.349573322" lastFinishedPulling="2025-10-01 15:17:50.439591051 +0000 UTC m=+779.586434167" observedRunningTime="2025-10-01 15:17:51.127303001 +0000 UTC m=+780.274146127" watchObservedRunningTime="2025-10-01 15:17:51.145201003 +0000 UTC m=+780.292044119" Oct 01 15:17:51 crc kubenswrapper[4869]: I1001 15:17:51.147230 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/watcher-operator-controller-manager-68d7bc5569-9wzmb" podStartSLOduration=2.87634612 podStartE2EDuration="25.147225304s" podCreationTimestamp="2025-10-01 15:17:26 +0000 UTC" firstStartedPulling="2025-10-01 15:17:28.205069915 +0000 UTC m=+757.351913031" lastFinishedPulling="2025-10-01 15:17:50.475949099 +0000 UTC m=+779.622792215" observedRunningTime="2025-10-01 15:17:51.144513725 +0000 UTC m=+780.291356841" watchObservedRunningTime="2025-10-01 15:17:51.147225304 +0000 UTC m=+780.294068420" Oct 01 15:17:51 crc kubenswrapper[4869]: I1001 15:17:51.168785 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/nova-operator-controller-manager-7fd5b6bbc6-5tcmh" podStartSLOduration=2.954645758 podStartE2EDuration="25.168764618s" podCreationTimestamp="2025-10-01 15:17:26 +0000 UTC" firstStartedPulling="2025-10-01 15:17:28.214200676 +0000 UTC m=+757.361043792" lastFinishedPulling="2025-10-01 15:17:50.428319536 +0000 UTC m=+779.575162652" observedRunningTime="2025-10-01 15:17:51.164853329 +0000 UTC m=+780.311696445" watchObservedRunningTime="2025-10-01 15:17:51.168764618 +0000 UTC m=+780.315607744" Oct 01 15:17:51 crc kubenswrapper[4869]: I1001 15:17:51.198811 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/rabbitmq-cluster-operator-manager-5f97d8c699-6mcvk" podStartSLOduration=2.936495529 podStartE2EDuration="25.198794416s" podCreationTimestamp="2025-10-01 15:17:26 +0000 UTC" firstStartedPulling="2025-10-01 15:17:28.216016852 +0000 UTC m=+757.362859968" lastFinishedPulling="2025-10-01 15:17:50.478315739 +0000 UTC m=+779.625158855" observedRunningTime="2025-10-01 15:17:51.196848297 +0000 UTC m=+780.343691423" watchObservedRunningTime="2025-10-01 15:17:51.198794416 +0000 UTC m=+780.345637532" Oct 01 15:17:51 crc kubenswrapper[4869]: I1001 15:17:51.202161 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/telemetry-operator-controller-manager-cb66d6b59-r69lh" podStartSLOduration=2.977154366 podStartE2EDuration="25.202153061s" podCreationTimestamp="2025-10-01 15:17:26 +0000 UTC" firstStartedPulling="2025-10-01 15:17:28.21395588 +0000 UTC m=+757.360798986" lastFinishedPulling="2025-10-01 15:17:50.438954525 +0000 UTC m=+779.585797681" observedRunningTime="2025-10-01 15:17:51.181247823 +0000 UTC m=+780.328090939" watchObservedRunningTime="2025-10-01 15:17:51.202153061 +0000 UTC m=+780.348996177" Oct 01 15:17:51 crc kubenswrapper[4869]: I1001 15:17:51.214389 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/ovn-operator-controller-manager-84c745747f-gv26d" podStartSLOduration=2.948022651 podStartE2EDuration="25.21437427s" podCreationTimestamp="2025-10-01 15:17:26 +0000 UTC" firstStartedPulling="2025-10-01 15:17:28.229723528 +0000 UTC m=+757.376566644" lastFinishedPulling="2025-10-01 15:17:50.496075157 +0000 UTC m=+779.642918263" observedRunningTime="2025-10-01 15:17:51.211133158 +0000 UTC m=+780.357976294" watchObservedRunningTime="2025-10-01 15:17:51.21437427 +0000 UTC m=+780.361217386" Oct 01 15:17:56 crc kubenswrapper[4869]: I1001 15:17:56.885806 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/nova-operator-controller-manager-7fd5b6bbc6-5tcmh" Oct 01 15:17:56 crc kubenswrapper[4869]: I1001 15:17:56.992958 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/ovn-operator-controller-manager-84c745747f-gv26d" Oct 01 15:17:57 crc kubenswrapper[4869]: I1001 15:17:57.041939 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/telemetry-operator-controller-manager-cb66d6b59-r69lh" Oct 01 15:17:57 crc kubenswrapper[4869]: I1001 15:17:57.121864 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/test-operator-controller-manager-cbdf6dc66-n8xn2" Oct 01 15:17:57 crc kubenswrapper[4869]: I1001 15:17:57.151115 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/watcher-operator-controller-manager-68d7bc5569-9wzmb" Oct 01 15:18:05 crc kubenswrapper[4869]: I1001 15:18:05.101815 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-86gjq"] Oct 01 15:18:05 crc kubenswrapper[4869]: I1001 15:18:05.104449 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-86gjq" Oct 01 15:18:05 crc kubenswrapper[4869]: I1001 15:18:05.124303 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-86gjq"] Oct 01 15:18:05 crc kubenswrapper[4869]: I1001 15:18:05.239408 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4mgnz\" (UniqueName: \"kubernetes.io/projected/152f2134-8a6e-4400-a2b5-978fe7d9f23f-kube-api-access-4mgnz\") pod \"redhat-operators-86gjq\" (UID: \"152f2134-8a6e-4400-a2b5-978fe7d9f23f\") " pod="openshift-marketplace/redhat-operators-86gjq" Oct 01 15:18:05 crc kubenswrapper[4869]: I1001 15:18:05.239478 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/152f2134-8a6e-4400-a2b5-978fe7d9f23f-utilities\") pod \"redhat-operators-86gjq\" (UID: \"152f2134-8a6e-4400-a2b5-978fe7d9f23f\") " pod="openshift-marketplace/redhat-operators-86gjq" Oct 01 15:18:05 crc kubenswrapper[4869]: I1001 15:18:05.239604 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/152f2134-8a6e-4400-a2b5-978fe7d9f23f-catalog-content\") pod \"redhat-operators-86gjq\" (UID: \"152f2134-8a6e-4400-a2b5-978fe7d9f23f\") " pod="openshift-marketplace/redhat-operators-86gjq" Oct 01 15:18:05 crc kubenswrapper[4869]: I1001 15:18:05.341014 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/152f2134-8a6e-4400-a2b5-978fe7d9f23f-catalog-content\") pod \"redhat-operators-86gjq\" (UID: \"152f2134-8a6e-4400-a2b5-978fe7d9f23f\") " pod="openshift-marketplace/redhat-operators-86gjq" Oct 01 15:18:05 crc kubenswrapper[4869]: I1001 15:18:05.341148 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4mgnz\" (UniqueName: \"kubernetes.io/projected/152f2134-8a6e-4400-a2b5-978fe7d9f23f-kube-api-access-4mgnz\") pod \"redhat-operators-86gjq\" (UID: \"152f2134-8a6e-4400-a2b5-978fe7d9f23f\") " pod="openshift-marketplace/redhat-operators-86gjq" Oct 01 15:18:05 crc kubenswrapper[4869]: I1001 15:18:05.341186 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/152f2134-8a6e-4400-a2b5-978fe7d9f23f-utilities\") pod \"redhat-operators-86gjq\" (UID: \"152f2134-8a6e-4400-a2b5-978fe7d9f23f\") " pod="openshift-marketplace/redhat-operators-86gjq" Oct 01 15:18:05 crc kubenswrapper[4869]: I1001 15:18:05.341886 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/152f2134-8a6e-4400-a2b5-978fe7d9f23f-utilities\") pod \"redhat-operators-86gjq\" (UID: \"152f2134-8a6e-4400-a2b5-978fe7d9f23f\") " pod="openshift-marketplace/redhat-operators-86gjq" Oct 01 15:18:05 crc kubenswrapper[4869]: I1001 15:18:05.341901 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/152f2134-8a6e-4400-a2b5-978fe7d9f23f-catalog-content\") pod \"redhat-operators-86gjq\" (UID: \"152f2134-8a6e-4400-a2b5-978fe7d9f23f\") " pod="openshift-marketplace/redhat-operators-86gjq" Oct 01 15:18:05 crc kubenswrapper[4869]: I1001 15:18:05.365556 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4mgnz\" (UniqueName: \"kubernetes.io/projected/152f2134-8a6e-4400-a2b5-978fe7d9f23f-kube-api-access-4mgnz\") pod \"redhat-operators-86gjq\" (UID: \"152f2134-8a6e-4400-a2b5-978fe7d9f23f\") " pod="openshift-marketplace/redhat-operators-86gjq" Oct 01 15:18:05 crc kubenswrapper[4869]: I1001 15:18:05.435912 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-86gjq" Oct 01 15:18:05 crc kubenswrapper[4869]: I1001 15:18:05.726630 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-86gjq"] Oct 01 15:18:06 crc kubenswrapper[4869]: I1001 15:18:06.243017 4869 generic.go:334] "Generic (PLEG): container finished" podID="152f2134-8a6e-4400-a2b5-978fe7d9f23f" containerID="672e434343a8faff365258ee6182d46dc58de53550c85c4d75884eda9034bf0d" exitCode=0 Oct 01 15:18:06 crc kubenswrapper[4869]: I1001 15:18:06.243075 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-86gjq" event={"ID":"152f2134-8a6e-4400-a2b5-978fe7d9f23f","Type":"ContainerDied","Data":"672e434343a8faff365258ee6182d46dc58de53550c85c4d75884eda9034bf0d"} Oct 01 15:18:06 crc kubenswrapper[4869]: I1001 15:18:06.243099 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-86gjq" event={"ID":"152f2134-8a6e-4400-a2b5-978fe7d9f23f","Type":"ContainerStarted","Data":"6cea9cea2f7fa44af50f1eaf58c6d9a07eb7f533a8b2c65ed89e73a14c1eeacb"} Oct 01 15:18:06 crc kubenswrapper[4869]: I1001 15:18:06.245216 4869 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Oct 01 15:18:07 crc kubenswrapper[4869]: I1001 15:18:07.251557 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-86gjq" event={"ID":"152f2134-8a6e-4400-a2b5-978fe7d9f23f","Type":"ContainerStarted","Data":"944696ea634e12b6d7306604867e86d3499319908ef87d4752188f7c73fbb81e"} Oct 01 15:18:08 crc kubenswrapper[4869]: I1001 15:18:08.265230 4869 generic.go:334] "Generic (PLEG): container finished" podID="152f2134-8a6e-4400-a2b5-978fe7d9f23f" containerID="944696ea634e12b6d7306604867e86d3499319908ef87d4752188f7c73fbb81e" exitCode=0 Oct 01 15:18:08 crc kubenswrapper[4869]: I1001 15:18:08.265331 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-86gjq" event={"ID":"152f2134-8a6e-4400-a2b5-978fe7d9f23f","Type":"ContainerDied","Data":"944696ea634e12b6d7306604867e86d3499319908ef87d4752188f7c73fbb81e"} Oct 01 15:18:10 crc kubenswrapper[4869]: I1001 15:18:10.283714 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-86gjq" event={"ID":"152f2134-8a6e-4400-a2b5-978fe7d9f23f","Type":"ContainerStarted","Data":"1cc1902e10650a448496aced3377102121f56ecf7073ef579b067e946451ca25"} Oct 01 15:18:10 crc kubenswrapper[4869]: I1001 15:18:10.315134 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-86gjq" podStartSLOduration=2.428754756 podStartE2EDuration="5.315108936s" podCreationTimestamp="2025-10-01 15:18:05 +0000 UTC" firstStartedPulling="2025-10-01 15:18:06.244975607 +0000 UTC m=+795.391818723" lastFinishedPulling="2025-10-01 15:18:09.131329787 +0000 UTC m=+798.278172903" observedRunningTime="2025-10-01 15:18:10.305580835 +0000 UTC m=+799.452423951" watchObservedRunningTime="2025-10-01 15:18:10.315108936 +0000 UTC m=+799.461952092" Oct 01 15:18:15 crc kubenswrapper[4869]: I1001 15:18:15.286537 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-b8b69cf79-z8c6l"] Oct 01 15:18:15 crc kubenswrapper[4869]: I1001 15:18:15.289307 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-b8b69cf79-z8c6l" Oct 01 15:18:15 crc kubenswrapper[4869]: I1001 15:18:15.291240 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dnsmasq-dns-dockercfg-cm7ps" Oct 01 15:18:15 crc kubenswrapper[4869]: I1001 15:18:15.292239 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openshift-service-ca.crt" Oct 01 15:18:15 crc kubenswrapper[4869]: I1001 15:18:15.292512 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"dns" Oct 01 15:18:15 crc kubenswrapper[4869]: I1001 15:18:15.292726 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-b8b69cf79-z8c6l"] Oct 01 15:18:15 crc kubenswrapper[4869]: I1001 15:18:15.295627 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"kube-root-ca.crt" Oct 01 15:18:15 crc kubenswrapper[4869]: I1001 15:18:15.364231 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-d5f6f49c7-dqwqq"] Oct 01 15:18:15 crc kubenswrapper[4869]: I1001 15:18:15.366408 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-d5f6f49c7-dqwqq" Oct 01 15:18:15 crc kubenswrapper[4869]: I1001 15:18:15.368328 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"dns-svc" Oct 01 15:18:15 crc kubenswrapper[4869]: I1001 15:18:15.382952 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-d5f6f49c7-dqwqq"] Oct 01 15:18:15 crc kubenswrapper[4869]: I1001 15:18:15.392392 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e90c4d77-8795-43ca-8c46-a3d86440cde6-config\") pod \"dnsmasq-dns-b8b69cf79-z8c6l\" (UID: \"e90c4d77-8795-43ca-8c46-a3d86440cde6\") " pod="openstack/dnsmasq-dns-b8b69cf79-z8c6l" Oct 01 15:18:15 crc kubenswrapper[4869]: I1001 15:18:15.392457 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tkzln\" (UniqueName: \"kubernetes.io/projected/e90c4d77-8795-43ca-8c46-a3d86440cde6-kube-api-access-tkzln\") pod \"dnsmasq-dns-b8b69cf79-z8c6l\" (UID: \"e90c4d77-8795-43ca-8c46-a3d86440cde6\") " pod="openstack/dnsmasq-dns-b8b69cf79-z8c6l" Oct 01 15:18:15 crc kubenswrapper[4869]: I1001 15:18:15.436906 4869 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-86gjq" Oct 01 15:18:15 crc kubenswrapper[4869]: I1001 15:18:15.438509 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-86gjq" Oct 01 15:18:15 crc kubenswrapper[4869]: I1001 15:18:15.487941 4869 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-86gjq" Oct 01 15:18:15 crc kubenswrapper[4869]: I1001 15:18:15.493772 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e90c4d77-8795-43ca-8c46-a3d86440cde6-config\") pod \"dnsmasq-dns-b8b69cf79-z8c6l\" (UID: \"e90c4d77-8795-43ca-8c46-a3d86440cde6\") " pod="openstack/dnsmasq-dns-b8b69cf79-z8c6l" Oct 01 15:18:15 crc kubenswrapper[4869]: I1001 15:18:15.493839 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tkzln\" (UniqueName: \"kubernetes.io/projected/e90c4d77-8795-43ca-8c46-a3d86440cde6-kube-api-access-tkzln\") pod \"dnsmasq-dns-b8b69cf79-z8c6l\" (UID: \"e90c4d77-8795-43ca-8c46-a3d86440cde6\") " pod="openstack/dnsmasq-dns-b8b69cf79-z8c6l" Oct 01 15:18:15 crc kubenswrapper[4869]: I1001 15:18:15.493868 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qz84c\" (UniqueName: \"kubernetes.io/projected/0cdcbca4-2cf3-4cb7-a859-82c4e39d2f7d-kube-api-access-qz84c\") pod \"dnsmasq-dns-d5f6f49c7-dqwqq\" (UID: \"0cdcbca4-2cf3-4cb7-a859-82c4e39d2f7d\") " pod="openstack/dnsmasq-dns-d5f6f49c7-dqwqq" Oct 01 15:18:15 crc kubenswrapper[4869]: I1001 15:18:15.493898 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/0cdcbca4-2cf3-4cb7-a859-82c4e39d2f7d-dns-svc\") pod \"dnsmasq-dns-d5f6f49c7-dqwqq\" (UID: \"0cdcbca4-2cf3-4cb7-a859-82c4e39d2f7d\") " pod="openstack/dnsmasq-dns-d5f6f49c7-dqwqq" Oct 01 15:18:15 crc kubenswrapper[4869]: I1001 15:18:15.493922 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0cdcbca4-2cf3-4cb7-a859-82c4e39d2f7d-config\") pod \"dnsmasq-dns-d5f6f49c7-dqwqq\" (UID: \"0cdcbca4-2cf3-4cb7-a859-82c4e39d2f7d\") " pod="openstack/dnsmasq-dns-d5f6f49c7-dqwqq" Oct 01 15:18:15 crc kubenswrapper[4869]: I1001 15:18:15.494903 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e90c4d77-8795-43ca-8c46-a3d86440cde6-config\") pod \"dnsmasq-dns-b8b69cf79-z8c6l\" (UID: \"e90c4d77-8795-43ca-8c46-a3d86440cde6\") " pod="openstack/dnsmasq-dns-b8b69cf79-z8c6l" Oct 01 15:18:15 crc kubenswrapper[4869]: I1001 15:18:15.524155 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tkzln\" (UniqueName: \"kubernetes.io/projected/e90c4d77-8795-43ca-8c46-a3d86440cde6-kube-api-access-tkzln\") pod \"dnsmasq-dns-b8b69cf79-z8c6l\" (UID: \"e90c4d77-8795-43ca-8c46-a3d86440cde6\") " pod="openstack/dnsmasq-dns-b8b69cf79-z8c6l" Oct 01 15:18:15 crc kubenswrapper[4869]: I1001 15:18:15.595823 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qz84c\" (UniqueName: \"kubernetes.io/projected/0cdcbca4-2cf3-4cb7-a859-82c4e39d2f7d-kube-api-access-qz84c\") pod \"dnsmasq-dns-d5f6f49c7-dqwqq\" (UID: \"0cdcbca4-2cf3-4cb7-a859-82c4e39d2f7d\") " pod="openstack/dnsmasq-dns-d5f6f49c7-dqwqq" Oct 01 15:18:15 crc kubenswrapper[4869]: I1001 15:18:15.595886 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/0cdcbca4-2cf3-4cb7-a859-82c4e39d2f7d-dns-svc\") pod \"dnsmasq-dns-d5f6f49c7-dqwqq\" (UID: \"0cdcbca4-2cf3-4cb7-a859-82c4e39d2f7d\") " pod="openstack/dnsmasq-dns-d5f6f49c7-dqwqq" Oct 01 15:18:15 crc kubenswrapper[4869]: I1001 15:18:15.595916 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0cdcbca4-2cf3-4cb7-a859-82c4e39d2f7d-config\") pod \"dnsmasq-dns-d5f6f49c7-dqwqq\" (UID: \"0cdcbca4-2cf3-4cb7-a859-82c4e39d2f7d\") " pod="openstack/dnsmasq-dns-d5f6f49c7-dqwqq" Oct 01 15:18:15 crc kubenswrapper[4869]: I1001 15:18:15.596809 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/0cdcbca4-2cf3-4cb7-a859-82c4e39d2f7d-dns-svc\") pod \"dnsmasq-dns-d5f6f49c7-dqwqq\" (UID: \"0cdcbca4-2cf3-4cb7-a859-82c4e39d2f7d\") " pod="openstack/dnsmasq-dns-d5f6f49c7-dqwqq" Oct 01 15:18:15 crc kubenswrapper[4869]: I1001 15:18:15.597189 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0cdcbca4-2cf3-4cb7-a859-82c4e39d2f7d-config\") pod \"dnsmasq-dns-d5f6f49c7-dqwqq\" (UID: \"0cdcbca4-2cf3-4cb7-a859-82c4e39d2f7d\") " pod="openstack/dnsmasq-dns-d5f6f49c7-dqwqq" Oct 01 15:18:15 crc kubenswrapper[4869]: I1001 15:18:15.602757 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-b8b69cf79-z8c6l" Oct 01 15:18:15 crc kubenswrapper[4869]: I1001 15:18:15.619142 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qz84c\" (UniqueName: \"kubernetes.io/projected/0cdcbca4-2cf3-4cb7-a859-82c4e39d2f7d-kube-api-access-qz84c\") pod \"dnsmasq-dns-d5f6f49c7-dqwqq\" (UID: \"0cdcbca4-2cf3-4cb7-a859-82c4e39d2f7d\") " pod="openstack/dnsmasq-dns-d5f6f49c7-dqwqq" Oct 01 15:18:15 crc kubenswrapper[4869]: I1001 15:18:15.683387 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-d5f6f49c7-dqwqq" Oct 01 15:18:16 crc kubenswrapper[4869]: I1001 15:18:16.071946 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-b8b69cf79-z8c6l"] Oct 01 15:18:16 crc kubenswrapper[4869]: I1001 15:18:16.136462 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-d5f6f49c7-dqwqq"] Oct 01 15:18:16 crc kubenswrapper[4869]: I1001 15:18:16.334026 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-d5f6f49c7-dqwqq" event={"ID":"0cdcbca4-2cf3-4cb7-a859-82c4e39d2f7d","Type":"ContainerStarted","Data":"18d82e31aa98d64c33da199a271fdc69c25257e8b4263d748102f87269e913a4"} Oct 01 15:18:16 crc kubenswrapper[4869]: I1001 15:18:16.335673 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-b8b69cf79-z8c6l" event={"ID":"e90c4d77-8795-43ca-8c46-a3d86440cde6","Type":"ContainerStarted","Data":"45a3f2e2797c54276b2b267a2aaa4240f927b367c067090a33adab2a277413b8"} Oct 01 15:18:16 crc kubenswrapper[4869]: I1001 15:18:16.396526 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-86gjq" Oct 01 15:18:16 crc kubenswrapper[4869]: I1001 15:18:16.722380 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-86gjq"] Oct 01 15:18:17 crc kubenswrapper[4869]: I1001 15:18:17.137756 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-b8b69cf79-z8c6l"] Oct 01 15:18:17 crc kubenswrapper[4869]: I1001 15:18:17.150278 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-b6f94bdfc-k4rnx"] Oct 01 15:18:17 crc kubenswrapper[4869]: I1001 15:18:17.151612 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-b6f94bdfc-k4rnx" Oct 01 15:18:17 crc kubenswrapper[4869]: I1001 15:18:17.171147 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-b6f94bdfc-k4rnx"] Oct 01 15:18:17 crc kubenswrapper[4869]: I1001 15:18:17.324752 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/99606142-049c-4f0a-813f-b7274041ec9e-config\") pod \"dnsmasq-dns-b6f94bdfc-k4rnx\" (UID: \"99606142-049c-4f0a-813f-b7274041ec9e\") " pod="openstack/dnsmasq-dns-b6f94bdfc-k4rnx" Oct 01 15:18:17 crc kubenswrapper[4869]: I1001 15:18:17.324838 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qjgzs\" (UniqueName: \"kubernetes.io/projected/99606142-049c-4f0a-813f-b7274041ec9e-kube-api-access-qjgzs\") pod \"dnsmasq-dns-b6f94bdfc-k4rnx\" (UID: \"99606142-049c-4f0a-813f-b7274041ec9e\") " pod="openstack/dnsmasq-dns-b6f94bdfc-k4rnx" Oct 01 15:18:17 crc kubenswrapper[4869]: I1001 15:18:17.324859 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/99606142-049c-4f0a-813f-b7274041ec9e-dns-svc\") pod \"dnsmasq-dns-b6f94bdfc-k4rnx\" (UID: \"99606142-049c-4f0a-813f-b7274041ec9e\") " pod="openstack/dnsmasq-dns-b6f94bdfc-k4rnx" Oct 01 15:18:17 crc kubenswrapper[4869]: I1001 15:18:17.380487 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-d5f6f49c7-dqwqq"] Oct 01 15:18:17 crc kubenswrapper[4869]: I1001 15:18:17.387667 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-77795d58f5-px8hm"] Oct 01 15:18:17 crc kubenswrapper[4869]: I1001 15:18:17.388731 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-77795d58f5-px8hm" Oct 01 15:18:17 crc kubenswrapper[4869]: I1001 15:18:17.410376 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-77795d58f5-px8hm"] Oct 01 15:18:17 crc kubenswrapper[4869]: I1001 15:18:17.428948 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/99606142-049c-4f0a-813f-b7274041ec9e-dns-svc\") pod \"dnsmasq-dns-b6f94bdfc-k4rnx\" (UID: \"99606142-049c-4f0a-813f-b7274041ec9e\") " pod="openstack/dnsmasq-dns-b6f94bdfc-k4rnx" Oct 01 15:18:17 crc kubenswrapper[4869]: I1001 15:18:17.428986 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qjgzs\" (UniqueName: \"kubernetes.io/projected/99606142-049c-4f0a-813f-b7274041ec9e-kube-api-access-qjgzs\") pod \"dnsmasq-dns-b6f94bdfc-k4rnx\" (UID: \"99606142-049c-4f0a-813f-b7274041ec9e\") " pod="openstack/dnsmasq-dns-b6f94bdfc-k4rnx" Oct 01 15:18:17 crc kubenswrapper[4869]: I1001 15:18:17.429052 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/99606142-049c-4f0a-813f-b7274041ec9e-config\") pod \"dnsmasq-dns-b6f94bdfc-k4rnx\" (UID: \"99606142-049c-4f0a-813f-b7274041ec9e\") " pod="openstack/dnsmasq-dns-b6f94bdfc-k4rnx" Oct 01 15:18:17 crc kubenswrapper[4869]: I1001 15:18:17.429829 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/99606142-049c-4f0a-813f-b7274041ec9e-config\") pod \"dnsmasq-dns-b6f94bdfc-k4rnx\" (UID: \"99606142-049c-4f0a-813f-b7274041ec9e\") " pod="openstack/dnsmasq-dns-b6f94bdfc-k4rnx" Oct 01 15:18:17 crc kubenswrapper[4869]: I1001 15:18:17.429957 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/99606142-049c-4f0a-813f-b7274041ec9e-dns-svc\") pod \"dnsmasq-dns-b6f94bdfc-k4rnx\" (UID: \"99606142-049c-4f0a-813f-b7274041ec9e\") " pod="openstack/dnsmasq-dns-b6f94bdfc-k4rnx" Oct 01 15:18:17 crc kubenswrapper[4869]: I1001 15:18:17.469974 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qjgzs\" (UniqueName: \"kubernetes.io/projected/99606142-049c-4f0a-813f-b7274041ec9e-kube-api-access-qjgzs\") pod \"dnsmasq-dns-b6f94bdfc-k4rnx\" (UID: \"99606142-049c-4f0a-813f-b7274041ec9e\") " pod="openstack/dnsmasq-dns-b6f94bdfc-k4rnx" Oct 01 15:18:17 crc kubenswrapper[4869]: I1001 15:18:17.483135 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-b6f94bdfc-k4rnx" Oct 01 15:18:17 crc kubenswrapper[4869]: I1001 15:18:17.531103 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/13f951e9-b94f-41aa-9547-59e5a0eff174-config\") pod \"dnsmasq-dns-77795d58f5-px8hm\" (UID: \"13f951e9-b94f-41aa-9547-59e5a0eff174\") " pod="openstack/dnsmasq-dns-77795d58f5-px8hm" Oct 01 15:18:17 crc kubenswrapper[4869]: I1001 15:18:17.531186 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/13f951e9-b94f-41aa-9547-59e5a0eff174-dns-svc\") pod \"dnsmasq-dns-77795d58f5-px8hm\" (UID: \"13f951e9-b94f-41aa-9547-59e5a0eff174\") " pod="openstack/dnsmasq-dns-77795d58f5-px8hm" Oct 01 15:18:17 crc kubenswrapper[4869]: I1001 15:18:17.531342 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8lv5n\" (UniqueName: \"kubernetes.io/projected/13f951e9-b94f-41aa-9547-59e5a0eff174-kube-api-access-8lv5n\") pod \"dnsmasq-dns-77795d58f5-px8hm\" (UID: \"13f951e9-b94f-41aa-9547-59e5a0eff174\") " pod="openstack/dnsmasq-dns-77795d58f5-px8hm" Oct 01 15:18:17 crc kubenswrapper[4869]: I1001 15:18:17.632318 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/13f951e9-b94f-41aa-9547-59e5a0eff174-config\") pod \"dnsmasq-dns-77795d58f5-px8hm\" (UID: \"13f951e9-b94f-41aa-9547-59e5a0eff174\") " pod="openstack/dnsmasq-dns-77795d58f5-px8hm" Oct 01 15:18:17 crc kubenswrapper[4869]: I1001 15:18:17.632398 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/13f951e9-b94f-41aa-9547-59e5a0eff174-dns-svc\") pod \"dnsmasq-dns-77795d58f5-px8hm\" (UID: \"13f951e9-b94f-41aa-9547-59e5a0eff174\") " pod="openstack/dnsmasq-dns-77795d58f5-px8hm" Oct 01 15:18:17 crc kubenswrapper[4869]: I1001 15:18:17.632428 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8lv5n\" (UniqueName: \"kubernetes.io/projected/13f951e9-b94f-41aa-9547-59e5a0eff174-kube-api-access-8lv5n\") pod \"dnsmasq-dns-77795d58f5-px8hm\" (UID: \"13f951e9-b94f-41aa-9547-59e5a0eff174\") " pod="openstack/dnsmasq-dns-77795d58f5-px8hm" Oct 01 15:18:17 crc kubenswrapper[4869]: I1001 15:18:17.635040 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/13f951e9-b94f-41aa-9547-59e5a0eff174-config\") pod \"dnsmasq-dns-77795d58f5-px8hm\" (UID: \"13f951e9-b94f-41aa-9547-59e5a0eff174\") " pod="openstack/dnsmasq-dns-77795d58f5-px8hm" Oct 01 15:18:17 crc kubenswrapper[4869]: I1001 15:18:17.635553 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/13f951e9-b94f-41aa-9547-59e5a0eff174-dns-svc\") pod \"dnsmasq-dns-77795d58f5-px8hm\" (UID: \"13f951e9-b94f-41aa-9547-59e5a0eff174\") " pod="openstack/dnsmasq-dns-77795d58f5-px8hm" Oct 01 15:18:17 crc kubenswrapper[4869]: I1001 15:18:17.661641 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8lv5n\" (UniqueName: \"kubernetes.io/projected/13f951e9-b94f-41aa-9547-59e5a0eff174-kube-api-access-8lv5n\") pod \"dnsmasq-dns-77795d58f5-px8hm\" (UID: \"13f951e9-b94f-41aa-9547-59e5a0eff174\") " pod="openstack/dnsmasq-dns-77795d58f5-px8hm" Oct 01 15:18:17 crc kubenswrapper[4869]: I1001 15:18:17.718229 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-77795d58f5-px8hm" Oct 01 15:18:18 crc kubenswrapper[4869]: I1001 15:18:18.067823 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-b6f94bdfc-k4rnx"] Oct 01 15:18:18 crc kubenswrapper[4869]: W1001 15:18:18.075165 4869 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod99606142_049c_4f0a_813f_b7274041ec9e.slice/crio-604b836dade644e0d905b316842453a925f045a7e823ea9d4aaafec617a49a58 WatchSource:0}: Error finding container 604b836dade644e0d905b316842453a925f045a7e823ea9d4aaafec617a49a58: Status 404 returned error can't find the container with id 604b836dade644e0d905b316842453a925f045a7e823ea9d4aaafec617a49a58 Oct 01 15:18:18 crc kubenswrapper[4869]: I1001 15:18:18.165131 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-77795d58f5-px8hm"] Oct 01 15:18:18 crc kubenswrapper[4869]: W1001 15:18:18.178854 4869 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod13f951e9_b94f_41aa_9547_59e5a0eff174.slice/crio-fb318105a718901c7fbd06083f97e4c3caf7821003df838e9fe13b3c2829ccae WatchSource:0}: Error finding container fb318105a718901c7fbd06083f97e4c3caf7821003df838e9fe13b3c2829ccae: Status 404 returned error can't find the container with id fb318105a718901c7fbd06083f97e4c3caf7821003df838e9fe13b3c2829ccae Oct 01 15:18:18 crc kubenswrapper[4869]: I1001 15:18:18.286742 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/rabbitmq-server-0"] Oct 01 15:18:18 crc kubenswrapper[4869]: I1001 15:18:18.287968 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Oct 01 15:18:18 crc kubenswrapper[4869]: I1001 15:18:18.291478 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-plugins-conf" Oct 01 15:18:18 crc kubenswrapper[4869]: I1001 15:18:18.291785 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-config-data" Oct 01 15:18:18 crc kubenswrapper[4869]: I1001 15:18:18.291948 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-default-user" Oct 01 15:18:18 crc kubenswrapper[4869]: I1001 15:18:18.292286 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-server-conf" Oct 01 15:18:18 crc kubenswrapper[4869]: I1001 15:18:18.292433 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-rabbitmq-svc" Oct 01 15:18:18 crc kubenswrapper[4869]: I1001 15:18:18.292704 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-server-dockercfg-4lr4p" Oct 01 15:18:18 crc kubenswrapper[4869]: I1001 15:18:18.292946 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-erlang-cookie" Oct 01 15:18:18 crc kubenswrapper[4869]: I1001 15:18:18.303598 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-server-0"] Oct 01 15:18:18 crc kubenswrapper[4869]: I1001 15:18:18.359771 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-77795d58f5-px8hm" event={"ID":"13f951e9-b94f-41aa-9547-59e5a0eff174","Type":"ContainerStarted","Data":"fb318105a718901c7fbd06083f97e4c3caf7821003df838e9fe13b3c2829ccae"} Oct 01 15:18:18 crc kubenswrapper[4869]: I1001 15:18:18.364351 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-b6f94bdfc-k4rnx" event={"ID":"99606142-049c-4f0a-813f-b7274041ec9e","Type":"ContainerStarted","Data":"604b836dade644e0d905b316842453a925f045a7e823ea9d4aaafec617a49a58"} Oct 01 15:18:18 crc kubenswrapper[4869]: I1001 15:18:18.364537 4869 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-86gjq" podUID="152f2134-8a6e-4400-a2b5-978fe7d9f23f" containerName="registry-server" containerID="cri-o://1cc1902e10650a448496aced3377102121f56ecf7073ef579b067e946451ca25" gracePeriod=2 Oct 01 15:18:18 crc kubenswrapper[4869]: I1001 15:18:18.446709 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/25c1e81e-fa0e-4ec6-b29c-bda2529fde66-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"25c1e81e-fa0e-4ec6-b29c-bda2529fde66\") " pod="openstack/rabbitmq-server-0" Oct 01 15:18:18 crc kubenswrapper[4869]: I1001 15:18:18.446782 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"rabbitmq-server-0\" (UID: \"25c1e81e-fa0e-4ec6-b29c-bda2529fde66\") " pod="openstack/rabbitmq-server-0" Oct 01 15:18:18 crc kubenswrapper[4869]: I1001 15:18:18.447111 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/25c1e81e-fa0e-4ec6-b29c-bda2529fde66-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"25c1e81e-fa0e-4ec6-b29c-bda2529fde66\") " pod="openstack/rabbitmq-server-0" Oct 01 15:18:18 crc kubenswrapper[4869]: I1001 15:18:18.447971 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/25c1e81e-fa0e-4ec6-b29c-bda2529fde66-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"25c1e81e-fa0e-4ec6-b29c-bda2529fde66\") " pod="openstack/rabbitmq-server-0" Oct 01 15:18:18 crc kubenswrapper[4869]: I1001 15:18:18.448040 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/25c1e81e-fa0e-4ec6-b29c-bda2529fde66-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"25c1e81e-fa0e-4ec6-b29c-bda2529fde66\") " pod="openstack/rabbitmq-server-0" Oct 01 15:18:18 crc kubenswrapper[4869]: I1001 15:18:18.448126 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/25c1e81e-fa0e-4ec6-b29c-bda2529fde66-config-data\") pod \"rabbitmq-server-0\" (UID: \"25c1e81e-fa0e-4ec6-b29c-bda2529fde66\") " pod="openstack/rabbitmq-server-0" Oct 01 15:18:18 crc kubenswrapper[4869]: I1001 15:18:18.448206 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-59bth\" (UniqueName: \"kubernetes.io/projected/25c1e81e-fa0e-4ec6-b29c-bda2529fde66-kube-api-access-59bth\") pod \"rabbitmq-server-0\" (UID: \"25c1e81e-fa0e-4ec6-b29c-bda2529fde66\") " pod="openstack/rabbitmq-server-0" Oct 01 15:18:18 crc kubenswrapper[4869]: I1001 15:18:18.448245 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/25c1e81e-fa0e-4ec6-b29c-bda2529fde66-pod-info\") pod \"rabbitmq-server-0\" (UID: \"25c1e81e-fa0e-4ec6-b29c-bda2529fde66\") " pod="openstack/rabbitmq-server-0" Oct 01 15:18:18 crc kubenswrapper[4869]: I1001 15:18:18.448434 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/25c1e81e-fa0e-4ec6-b29c-bda2529fde66-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"25c1e81e-fa0e-4ec6-b29c-bda2529fde66\") " pod="openstack/rabbitmq-server-0" Oct 01 15:18:18 crc kubenswrapper[4869]: I1001 15:18:18.448467 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/25c1e81e-fa0e-4ec6-b29c-bda2529fde66-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"25c1e81e-fa0e-4ec6-b29c-bda2529fde66\") " pod="openstack/rabbitmq-server-0" Oct 01 15:18:18 crc kubenswrapper[4869]: I1001 15:18:18.448507 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/25c1e81e-fa0e-4ec6-b29c-bda2529fde66-server-conf\") pod \"rabbitmq-server-0\" (UID: \"25c1e81e-fa0e-4ec6-b29c-bda2529fde66\") " pod="openstack/rabbitmq-server-0" Oct 01 15:18:18 crc kubenswrapper[4869]: I1001 15:18:18.517135 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Oct 01 15:18:18 crc kubenswrapper[4869]: I1001 15:18:18.518545 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Oct 01 15:18:18 crc kubenswrapper[4869]: I1001 15:18:18.521318 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-rabbitmq-cell1-svc" Oct 01 15:18:18 crc kubenswrapper[4869]: I1001 15:18:18.521494 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-server-dockercfg-djs62" Oct 01 15:18:18 crc kubenswrapper[4869]: I1001 15:18:18.521609 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-erlang-cookie" Oct 01 15:18:18 crc kubenswrapper[4869]: I1001 15:18:18.521794 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-server-conf" Oct 01 15:18:18 crc kubenswrapper[4869]: I1001 15:18:18.521908 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-default-user" Oct 01 15:18:18 crc kubenswrapper[4869]: I1001 15:18:18.522002 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-plugins-conf" Oct 01 15:18:18 crc kubenswrapper[4869]: I1001 15:18:18.522134 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-config-data" Oct 01 15:18:18 crc kubenswrapper[4869]: I1001 15:18:18.525631 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Oct 01 15:18:18 crc kubenswrapper[4869]: I1001 15:18:18.549944 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/25c1e81e-fa0e-4ec6-b29c-bda2529fde66-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"25c1e81e-fa0e-4ec6-b29c-bda2529fde66\") " pod="openstack/rabbitmq-server-0" Oct 01 15:18:18 crc kubenswrapper[4869]: I1001 15:18:18.549978 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/25c1e81e-fa0e-4ec6-b29c-bda2529fde66-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"25c1e81e-fa0e-4ec6-b29c-bda2529fde66\") " pod="openstack/rabbitmq-server-0" Oct 01 15:18:18 crc kubenswrapper[4869]: I1001 15:18:18.549994 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/25c1e81e-fa0e-4ec6-b29c-bda2529fde66-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"25c1e81e-fa0e-4ec6-b29c-bda2529fde66\") " pod="openstack/rabbitmq-server-0" Oct 01 15:18:18 crc kubenswrapper[4869]: I1001 15:18:18.550025 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/25c1e81e-fa0e-4ec6-b29c-bda2529fde66-config-data\") pod \"rabbitmq-server-0\" (UID: \"25c1e81e-fa0e-4ec6-b29c-bda2529fde66\") " pod="openstack/rabbitmq-server-0" Oct 01 15:18:18 crc kubenswrapper[4869]: I1001 15:18:18.550058 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-59bth\" (UniqueName: \"kubernetes.io/projected/25c1e81e-fa0e-4ec6-b29c-bda2529fde66-kube-api-access-59bth\") pod \"rabbitmq-server-0\" (UID: \"25c1e81e-fa0e-4ec6-b29c-bda2529fde66\") " pod="openstack/rabbitmq-server-0" Oct 01 15:18:18 crc kubenswrapper[4869]: I1001 15:18:18.550081 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/25c1e81e-fa0e-4ec6-b29c-bda2529fde66-pod-info\") pod \"rabbitmq-server-0\" (UID: \"25c1e81e-fa0e-4ec6-b29c-bda2529fde66\") " pod="openstack/rabbitmq-server-0" Oct 01 15:18:18 crc kubenswrapper[4869]: I1001 15:18:18.550110 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/25c1e81e-fa0e-4ec6-b29c-bda2529fde66-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"25c1e81e-fa0e-4ec6-b29c-bda2529fde66\") " pod="openstack/rabbitmq-server-0" Oct 01 15:18:18 crc kubenswrapper[4869]: I1001 15:18:18.550128 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/25c1e81e-fa0e-4ec6-b29c-bda2529fde66-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"25c1e81e-fa0e-4ec6-b29c-bda2529fde66\") " pod="openstack/rabbitmq-server-0" Oct 01 15:18:18 crc kubenswrapper[4869]: I1001 15:18:18.550147 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/25c1e81e-fa0e-4ec6-b29c-bda2529fde66-server-conf\") pod \"rabbitmq-server-0\" (UID: \"25c1e81e-fa0e-4ec6-b29c-bda2529fde66\") " pod="openstack/rabbitmq-server-0" Oct 01 15:18:18 crc kubenswrapper[4869]: I1001 15:18:18.550162 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/25c1e81e-fa0e-4ec6-b29c-bda2529fde66-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"25c1e81e-fa0e-4ec6-b29c-bda2529fde66\") " pod="openstack/rabbitmq-server-0" Oct 01 15:18:18 crc kubenswrapper[4869]: I1001 15:18:18.550193 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"rabbitmq-server-0\" (UID: \"25c1e81e-fa0e-4ec6-b29c-bda2529fde66\") " pod="openstack/rabbitmq-server-0" Oct 01 15:18:18 crc kubenswrapper[4869]: I1001 15:18:18.550543 4869 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"rabbitmq-server-0\" (UID: \"25c1e81e-fa0e-4ec6-b29c-bda2529fde66\") device mount path \"/mnt/openstack/pv01\"" pod="openstack/rabbitmq-server-0" Oct 01 15:18:18 crc kubenswrapper[4869]: I1001 15:18:18.551842 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/25c1e81e-fa0e-4ec6-b29c-bda2529fde66-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"25c1e81e-fa0e-4ec6-b29c-bda2529fde66\") " pod="openstack/rabbitmq-server-0" Oct 01 15:18:18 crc kubenswrapper[4869]: I1001 15:18:18.552957 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/25c1e81e-fa0e-4ec6-b29c-bda2529fde66-server-conf\") pod \"rabbitmq-server-0\" (UID: \"25c1e81e-fa0e-4ec6-b29c-bda2529fde66\") " pod="openstack/rabbitmq-server-0" Oct 01 15:18:18 crc kubenswrapper[4869]: I1001 15:18:18.553136 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/25c1e81e-fa0e-4ec6-b29c-bda2529fde66-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"25c1e81e-fa0e-4ec6-b29c-bda2529fde66\") " pod="openstack/rabbitmq-server-0" Oct 01 15:18:18 crc kubenswrapper[4869]: I1001 15:18:18.553171 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/25c1e81e-fa0e-4ec6-b29c-bda2529fde66-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"25c1e81e-fa0e-4ec6-b29c-bda2529fde66\") " pod="openstack/rabbitmq-server-0" Oct 01 15:18:18 crc kubenswrapper[4869]: I1001 15:18:18.553331 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/25c1e81e-fa0e-4ec6-b29c-bda2529fde66-config-data\") pod \"rabbitmq-server-0\" (UID: \"25c1e81e-fa0e-4ec6-b29c-bda2529fde66\") " pod="openstack/rabbitmq-server-0" Oct 01 15:18:18 crc kubenswrapper[4869]: I1001 15:18:18.558656 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/25c1e81e-fa0e-4ec6-b29c-bda2529fde66-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"25c1e81e-fa0e-4ec6-b29c-bda2529fde66\") " pod="openstack/rabbitmq-server-0" Oct 01 15:18:18 crc kubenswrapper[4869]: I1001 15:18:18.559373 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/25c1e81e-fa0e-4ec6-b29c-bda2529fde66-pod-info\") pod \"rabbitmq-server-0\" (UID: \"25c1e81e-fa0e-4ec6-b29c-bda2529fde66\") " pod="openstack/rabbitmq-server-0" Oct 01 15:18:18 crc kubenswrapper[4869]: I1001 15:18:18.560629 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/25c1e81e-fa0e-4ec6-b29c-bda2529fde66-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"25c1e81e-fa0e-4ec6-b29c-bda2529fde66\") " pod="openstack/rabbitmq-server-0" Oct 01 15:18:18 crc kubenswrapper[4869]: I1001 15:18:18.562045 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/25c1e81e-fa0e-4ec6-b29c-bda2529fde66-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"25c1e81e-fa0e-4ec6-b29c-bda2529fde66\") " pod="openstack/rabbitmq-server-0" Oct 01 15:18:18 crc kubenswrapper[4869]: I1001 15:18:18.565265 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-59bth\" (UniqueName: \"kubernetes.io/projected/25c1e81e-fa0e-4ec6-b29c-bda2529fde66-kube-api-access-59bth\") pod \"rabbitmq-server-0\" (UID: \"25c1e81e-fa0e-4ec6-b29c-bda2529fde66\") " pod="openstack/rabbitmq-server-0" Oct 01 15:18:18 crc kubenswrapper[4869]: I1001 15:18:18.576543 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"rabbitmq-server-0\" (UID: \"25c1e81e-fa0e-4ec6-b29c-bda2529fde66\") " pod="openstack/rabbitmq-server-0" Oct 01 15:18:18 crc kubenswrapper[4869]: I1001 15:18:18.609926 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Oct 01 15:18:18 crc kubenswrapper[4869]: I1001 15:18:18.651535 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/004ab312-4718-4cf2-80df-5a2b1eccc301-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"004ab312-4718-4cf2-80df-5a2b1eccc301\") " pod="openstack/rabbitmq-cell1-server-0" Oct 01 15:18:18 crc kubenswrapper[4869]: I1001 15:18:18.651592 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/004ab312-4718-4cf2-80df-5a2b1eccc301-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"004ab312-4718-4cf2-80df-5a2b1eccc301\") " pod="openstack/rabbitmq-cell1-server-0" Oct 01 15:18:18 crc kubenswrapper[4869]: I1001 15:18:18.651621 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/004ab312-4718-4cf2-80df-5a2b1eccc301-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"004ab312-4718-4cf2-80df-5a2b1eccc301\") " pod="openstack/rabbitmq-cell1-server-0" Oct 01 15:18:18 crc kubenswrapper[4869]: I1001 15:18:18.651640 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/004ab312-4718-4cf2-80df-5a2b1eccc301-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"004ab312-4718-4cf2-80df-5a2b1eccc301\") " pod="openstack/rabbitmq-cell1-server-0" Oct 01 15:18:18 crc kubenswrapper[4869]: I1001 15:18:18.651659 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/004ab312-4718-4cf2-80df-5a2b1eccc301-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"004ab312-4718-4cf2-80df-5a2b1eccc301\") " pod="openstack/rabbitmq-cell1-server-0" Oct 01 15:18:18 crc kubenswrapper[4869]: I1001 15:18:18.651685 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/004ab312-4718-4cf2-80df-5a2b1eccc301-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"004ab312-4718-4cf2-80df-5a2b1eccc301\") " pod="openstack/rabbitmq-cell1-server-0" Oct 01 15:18:18 crc kubenswrapper[4869]: I1001 15:18:18.651714 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/004ab312-4718-4cf2-80df-5a2b1eccc301-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"004ab312-4718-4cf2-80df-5a2b1eccc301\") " pod="openstack/rabbitmq-cell1-server-0" Oct 01 15:18:18 crc kubenswrapper[4869]: I1001 15:18:18.651772 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/004ab312-4718-4cf2-80df-5a2b1eccc301-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"004ab312-4718-4cf2-80df-5a2b1eccc301\") " pod="openstack/rabbitmq-cell1-server-0" Oct 01 15:18:18 crc kubenswrapper[4869]: I1001 15:18:18.651790 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/004ab312-4718-4cf2-80df-5a2b1eccc301-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"004ab312-4718-4cf2-80df-5a2b1eccc301\") " pod="openstack/rabbitmq-cell1-server-0" Oct 01 15:18:18 crc kubenswrapper[4869]: I1001 15:18:18.651810 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"004ab312-4718-4cf2-80df-5a2b1eccc301\") " pod="openstack/rabbitmq-cell1-server-0" Oct 01 15:18:18 crc kubenswrapper[4869]: I1001 15:18:18.651880 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2strd\" (UniqueName: \"kubernetes.io/projected/004ab312-4718-4cf2-80df-5a2b1eccc301-kube-api-access-2strd\") pod \"rabbitmq-cell1-server-0\" (UID: \"004ab312-4718-4cf2-80df-5a2b1eccc301\") " pod="openstack/rabbitmq-cell1-server-0" Oct 01 15:18:18 crc kubenswrapper[4869]: I1001 15:18:18.753585 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/004ab312-4718-4cf2-80df-5a2b1eccc301-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"004ab312-4718-4cf2-80df-5a2b1eccc301\") " pod="openstack/rabbitmq-cell1-server-0" Oct 01 15:18:18 crc kubenswrapper[4869]: I1001 15:18:18.754247 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/004ab312-4718-4cf2-80df-5a2b1eccc301-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"004ab312-4718-4cf2-80df-5a2b1eccc301\") " pod="openstack/rabbitmq-cell1-server-0" Oct 01 15:18:18 crc kubenswrapper[4869]: I1001 15:18:18.754298 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/004ab312-4718-4cf2-80df-5a2b1eccc301-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"004ab312-4718-4cf2-80df-5a2b1eccc301\") " pod="openstack/rabbitmq-cell1-server-0" Oct 01 15:18:18 crc kubenswrapper[4869]: I1001 15:18:18.754318 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"004ab312-4718-4cf2-80df-5a2b1eccc301\") " pod="openstack/rabbitmq-cell1-server-0" Oct 01 15:18:18 crc kubenswrapper[4869]: I1001 15:18:18.754540 4869 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"004ab312-4718-4cf2-80df-5a2b1eccc301\") device mount path \"/mnt/openstack/pv02\"" pod="openstack/rabbitmq-cell1-server-0" Oct 01 15:18:18 crc kubenswrapper[4869]: I1001 15:18:18.754918 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2strd\" (UniqueName: \"kubernetes.io/projected/004ab312-4718-4cf2-80df-5a2b1eccc301-kube-api-access-2strd\") pod \"rabbitmq-cell1-server-0\" (UID: \"004ab312-4718-4cf2-80df-5a2b1eccc301\") " pod="openstack/rabbitmq-cell1-server-0" Oct 01 15:18:18 crc kubenswrapper[4869]: I1001 15:18:18.754979 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/004ab312-4718-4cf2-80df-5a2b1eccc301-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"004ab312-4718-4cf2-80df-5a2b1eccc301\") " pod="openstack/rabbitmq-cell1-server-0" Oct 01 15:18:18 crc kubenswrapper[4869]: I1001 15:18:18.755000 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/004ab312-4718-4cf2-80df-5a2b1eccc301-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"004ab312-4718-4cf2-80df-5a2b1eccc301\") " pod="openstack/rabbitmq-cell1-server-0" Oct 01 15:18:18 crc kubenswrapper[4869]: I1001 15:18:18.755030 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/004ab312-4718-4cf2-80df-5a2b1eccc301-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"004ab312-4718-4cf2-80df-5a2b1eccc301\") " pod="openstack/rabbitmq-cell1-server-0" Oct 01 15:18:18 crc kubenswrapper[4869]: I1001 15:18:18.755060 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/004ab312-4718-4cf2-80df-5a2b1eccc301-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"004ab312-4718-4cf2-80df-5a2b1eccc301\") " pod="openstack/rabbitmq-cell1-server-0" Oct 01 15:18:18 crc kubenswrapper[4869]: I1001 15:18:18.755078 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/004ab312-4718-4cf2-80df-5a2b1eccc301-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"004ab312-4718-4cf2-80df-5a2b1eccc301\") " pod="openstack/rabbitmq-cell1-server-0" Oct 01 15:18:18 crc kubenswrapper[4869]: I1001 15:18:18.755132 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/004ab312-4718-4cf2-80df-5a2b1eccc301-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"004ab312-4718-4cf2-80df-5a2b1eccc301\") " pod="openstack/rabbitmq-cell1-server-0" Oct 01 15:18:18 crc kubenswrapper[4869]: I1001 15:18:18.755988 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/004ab312-4718-4cf2-80df-5a2b1eccc301-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"004ab312-4718-4cf2-80df-5a2b1eccc301\") " pod="openstack/rabbitmq-cell1-server-0" Oct 01 15:18:18 crc kubenswrapper[4869]: I1001 15:18:18.756331 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/004ab312-4718-4cf2-80df-5a2b1eccc301-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"004ab312-4718-4cf2-80df-5a2b1eccc301\") " pod="openstack/rabbitmq-cell1-server-0" Oct 01 15:18:18 crc kubenswrapper[4869]: I1001 15:18:18.756771 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/004ab312-4718-4cf2-80df-5a2b1eccc301-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"004ab312-4718-4cf2-80df-5a2b1eccc301\") " pod="openstack/rabbitmq-cell1-server-0" Oct 01 15:18:18 crc kubenswrapper[4869]: I1001 15:18:18.756837 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/004ab312-4718-4cf2-80df-5a2b1eccc301-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"004ab312-4718-4cf2-80df-5a2b1eccc301\") " pod="openstack/rabbitmq-cell1-server-0" Oct 01 15:18:18 crc kubenswrapper[4869]: I1001 15:18:18.757060 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/004ab312-4718-4cf2-80df-5a2b1eccc301-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"004ab312-4718-4cf2-80df-5a2b1eccc301\") " pod="openstack/rabbitmq-cell1-server-0" Oct 01 15:18:18 crc kubenswrapper[4869]: I1001 15:18:18.758993 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/004ab312-4718-4cf2-80df-5a2b1eccc301-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"004ab312-4718-4cf2-80df-5a2b1eccc301\") " pod="openstack/rabbitmq-cell1-server-0" Oct 01 15:18:18 crc kubenswrapper[4869]: I1001 15:18:18.759381 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/004ab312-4718-4cf2-80df-5a2b1eccc301-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"004ab312-4718-4cf2-80df-5a2b1eccc301\") " pod="openstack/rabbitmq-cell1-server-0" Oct 01 15:18:18 crc kubenswrapper[4869]: I1001 15:18:18.761381 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/004ab312-4718-4cf2-80df-5a2b1eccc301-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"004ab312-4718-4cf2-80df-5a2b1eccc301\") " pod="openstack/rabbitmq-cell1-server-0" Oct 01 15:18:18 crc kubenswrapper[4869]: I1001 15:18:18.770810 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2strd\" (UniqueName: \"kubernetes.io/projected/004ab312-4718-4cf2-80df-5a2b1eccc301-kube-api-access-2strd\") pod \"rabbitmq-cell1-server-0\" (UID: \"004ab312-4718-4cf2-80df-5a2b1eccc301\") " pod="openstack/rabbitmq-cell1-server-0" Oct 01 15:18:18 crc kubenswrapper[4869]: I1001 15:18:18.773743 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"004ab312-4718-4cf2-80df-5a2b1eccc301\") " pod="openstack/rabbitmq-cell1-server-0" Oct 01 15:18:18 crc kubenswrapper[4869]: I1001 15:18:18.774824 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/004ab312-4718-4cf2-80df-5a2b1eccc301-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"004ab312-4718-4cf2-80df-5a2b1eccc301\") " pod="openstack/rabbitmq-cell1-server-0" Oct 01 15:18:18 crc kubenswrapper[4869]: I1001 15:18:18.913333 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Oct 01 15:18:19 crc kubenswrapper[4869]: I1001 15:18:19.974963 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-server-0"] Oct 01 15:18:19 crc kubenswrapper[4869]: W1001 15:18:19.984609 4869 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod25c1e81e_fa0e_4ec6_b29c_bda2529fde66.slice/crio-212451a3d82ba1930abb9091bb7bf2db7427ad3b63a4620558bd76313d9513dc WatchSource:0}: Error finding container 212451a3d82ba1930abb9091bb7bf2db7427ad3b63a4620558bd76313d9513dc: Status 404 returned error can't find the container with id 212451a3d82ba1930abb9091bb7bf2db7427ad3b63a4620558bd76313d9513dc Oct 01 15:18:20 crc kubenswrapper[4869]: I1001 15:18:20.019171 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Oct 01 15:18:20 crc kubenswrapper[4869]: W1001 15:18:20.020911 4869 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod004ab312_4718_4cf2_80df_5a2b1eccc301.slice/crio-2ca351ebd7a6d92609dd02a379587ee6f1bba90d0bbb20f656a7cbfc49060f8d WatchSource:0}: Error finding container 2ca351ebd7a6d92609dd02a379587ee6f1bba90d0bbb20f656a7cbfc49060f8d: Status 404 returned error can't find the container with id 2ca351ebd7a6d92609dd02a379587ee6f1bba90d0bbb20f656a7cbfc49060f8d Oct 01 15:18:20 crc kubenswrapper[4869]: I1001 15:18:20.347708 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-86gjq" Oct 01 15:18:20 crc kubenswrapper[4869]: I1001 15:18:20.397966 4869 generic.go:334] "Generic (PLEG): container finished" podID="152f2134-8a6e-4400-a2b5-978fe7d9f23f" containerID="1cc1902e10650a448496aced3377102121f56ecf7073ef579b067e946451ca25" exitCode=0 Oct 01 15:18:20 crc kubenswrapper[4869]: I1001 15:18:20.398066 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-86gjq" Oct 01 15:18:20 crc kubenswrapper[4869]: I1001 15:18:20.398059 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-86gjq" event={"ID":"152f2134-8a6e-4400-a2b5-978fe7d9f23f","Type":"ContainerDied","Data":"1cc1902e10650a448496aced3377102121f56ecf7073ef579b067e946451ca25"} Oct 01 15:18:20 crc kubenswrapper[4869]: I1001 15:18:20.398759 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-86gjq" event={"ID":"152f2134-8a6e-4400-a2b5-978fe7d9f23f","Type":"ContainerDied","Data":"6cea9cea2f7fa44af50f1eaf58c6d9a07eb7f533a8b2c65ed89e73a14c1eeacb"} Oct 01 15:18:20 crc kubenswrapper[4869]: I1001 15:18:20.398793 4869 scope.go:117] "RemoveContainer" containerID="1cc1902e10650a448496aced3377102121f56ecf7073ef579b067e946451ca25" Oct 01 15:18:20 crc kubenswrapper[4869]: I1001 15:18:20.407680 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"004ab312-4718-4cf2-80df-5a2b1eccc301","Type":"ContainerStarted","Data":"2ca351ebd7a6d92609dd02a379587ee6f1bba90d0bbb20f656a7cbfc49060f8d"} Oct 01 15:18:20 crc kubenswrapper[4869]: I1001 15:18:20.415507 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"25c1e81e-fa0e-4ec6-b29c-bda2529fde66","Type":"ContainerStarted","Data":"212451a3d82ba1930abb9091bb7bf2db7427ad3b63a4620558bd76313d9513dc"} Oct 01 15:18:20 crc kubenswrapper[4869]: I1001 15:18:20.431687 4869 scope.go:117] "RemoveContainer" containerID="944696ea634e12b6d7306604867e86d3499319908ef87d4752188f7c73fbb81e" Oct 01 15:18:20 crc kubenswrapper[4869]: I1001 15:18:20.449423 4869 scope.go:117] "RemoveContainer" containerID="672e434343a8faff365258ee6182d46dc58de53550c85c4d75884eda9034bf0d" Oct 01 15:18:20 crc kubenswrapper[4869]: I1001 15:18:20.463086 4869 scope.go:117] "RemoveContainer" containerID="1cc1902e10650a448496aced3377102121f56ecf7073ef579b067e946451ca25" Oct 01 15:18:20 crc kubenswrapper[4869]: E1001 15:18:20.463410 4869 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1cc1902e10650a448496aced3377102121f56ecf7073ef579b067e946451ca25\": container with ID starting with 1cc1902e10650a448496aced3377102121f56ecf7073ef579b067e946451ca25 not found: ID does not exist" containerID="1cc1902e10650a448496aced3377102121f56ecf7073ef579b067e946451ca25" Oct 01 15:18:20 crc kubenswrapper[4869]: I1001 15:18:20.463448 4869 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1cc1902e10650a448496aced3377102121f56ecf7073ef579b067e946451ca25"} err="failed to get container status \"1cc1902e10650a448496aced3377102121f56ecf7073ef579b067e946451ca25\": rpc error: code = NotFound desc = could not find container \"1cc1902e10650a448496aced3377102121f56ecf7073ef579b067e946451ca25\": container with ID starting with 1cc1902e10650a448496aced3377102121f56ecf7073ef579b067e946451ca25 not found: ID does not exist" Oct 01 15:18:20 crc kubenswrapper[4869]: I1001 15:18:20.463473 4869 scope.go:117] "RemoveContainer" containerID="944696ea634e12b6d7306604867e86d3499319908ef87d4752188f7c73fbb81e" Oct 01 15:18:20 crc kubenswrapper[4869]: E1001 15:18:20.468336 4869 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"944696ea634e12b6d7306604867e86d3499319908ef87d4752188f7c73fbb81e\": container with ID starting with 944696ea634e12b6d7306604867e86d3499319908ef87d4752188f7c73fbb81e not found: ID does not exist" containerID="944696ea634e12b6d7306604867e86d3499319908ef87d4752188f7c73fbb81e" Oct 01 15:18:20 crc kubenswrapper[4869]: I1001 15:18:20.468389 4869 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"944696ea634e12b6d7306604867e86d3499319908ef87d4752188f7c73fbb81e"} err="failed to get container status \"944696ea634e12b6d7306604867e86d3499319908ef87d4752188f7c73fbb81e\": rpc error: code = NotFound desc = could not find container \"944696ea634e12b6d7306604867e86d3499319908ef87d4752188f7c73fbb81e\": container with ID starting with 944696ea634e12b6d7306604867e86d3499319908ef87d4752188f7c73fbb81e not found: ID does not exist" Oct 01 15:18:20 crc kubenswrapper[4869]: I1001 15:18:20.468416 4869 scope.go:117] "RemoveContainer" containerID="672e434343a8faff365258ee6182d46dc58de53550c85c4d75884eda9034bf0d" Oct 01 15:18:20 crc kubenswrapper[4869]: E1001 15:18:20.468872 4869 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"672e434343a8faff365258ee6182d46dc58de53550c85c4d75884eda9034bf0d\": container with ID starting with 672e434343a8faff365258ee6182d46dc58de53550c85c4d75884eda9034bf0d not found: ID does not exist" containerID="672e434343a8faff365258ee6182d46dc58de53550c85c4d75884eda9034bf0d" Oct 01 15:18:20 crc kubenswrapper[4869]: I1001 15:18:20.468900 4869 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"672e434343a8faff365258ee6182d46dc58de53550c85c4d75884eda9034bf0d"} err="failed to get container status \"672e434343a8faff365258ee6182d46dc58de53550c85c4d75884eda9034bf0d\": rpc error: code = NotFound desc = could not find container \"672e434343a8faff365258ee6182d46dc58de53550c85c4d75884eda9034bf0d\": container with ID starting with 672e434343a8faff365258ee6182d46dc58de53550c85c4d75884eda9034bf0d not found: ID does not exist" Oct 01 15:18:20 crc kubenswrapper[4869]: I1001 15:18:20.488722 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/152f2134-8a6e-4400-a2b5-978fe7d9f23f-utilities\") pod \"152f2134-8a6e-4400-a2b5-978fe7d9f23f\" (UID: \"152f2134-8a6e-4400-a2b5-978fe7d9f23f\") " Oct 01 15:18:20 crc kubenswrapper[4869]: I1001 15:18:20.488810 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4mgnz\" (UniqueName: \"kubernetes.io/projected/152f2134-8a6e-4400-a2b5-978fe7d9f23f-kube-api-access-4mgnz\") pod \"152f2134-8a6e-4400-a2b5-978fe7d9f23f\" (UID: \"152f2134-8a6e-4400-a2b5-978fe7d9f23f\") " Oct 01 15:18:20 crc kubenswrapper[4869]: I1001 15:18:20.488854 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/152f2134-8a6e-4400-a2b5-978fe7d9f23f-catalog-content\") pod \"152f2134-8a6e-4400-a2b5-978fe7d9f23f\" (UID: \"152f2134-8a6e-4400-a2b5-978fe7d9f23f\") " Oct 01 15:18:20 crc kubenswrapper[4869]: I1001 15:18:20.490962 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/152f2134-8a6e-4400-a2b5-978fe7d9f23f-utilities" (OuterVolumeSpecName: "utilities") pod "152f2134-8a6e-4400-a2b5-978fe7d9f23f" (UID: "152f2134-8a6e-4400-a2b5-978fe7d9f23f"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 15:18:20 crc kubenswrapper[4869]: I1001 15:18:20.495522 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/152f2134-8a6e-4400-a2b5-978fe7d9f23f-kube-api-access-4mgnz" (OuterVolumeSpecName: "kube-api-access-4mgnz") pod "152f2134-8a6e-4400-a2b5-978fe7d9f23f" (UID: "152f2134-8a6e-4400-a2b5-978fe7d9f23f"). InnerVolumeSpecName "kube-api-access-4mgnz". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 15:18:20 crc kubenswrapper[4869]: I1001 15:18:20.590559 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4mgnz\" (UniqueName: \"kubernetes.io/projected/152f2134-8a6e-4400-a2b5-978fe7d9f23f-kube-api-access-4mgnz\") on node \"crc\" DevicePath \"\"" Oct 01 15:18:20 crc kubenswrapper[4869]: I1001 15:18:20.590586 4869 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/152f2134-8a6e-4400-a2b5-978fe7d9f23f-utilities\") on node \"crc\" DevicePath \"\"" Oct 01 15:18:20 crc kubenswrapper[4869]: I1001 15:18:20.602309 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/152f2134-8a6e-4400-a2b5-978fe7d9f23f-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "152f2134-8a6e-4400-a2b5-978fe7d9f23f" (UID: "152f2134-8a6e-4400-a2b5-978fe7d9f23f"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 15:18:20 crc kubenswrapper[4869]: I1001 15:18:20.705590 4869 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/152f2134-8a6e-4400-a2b5-978fe7d9f23f-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 01 15:18:20 crc kubenswrapper[4869]: I1001 15:18:20.739523 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-86gjq"] Oct 01 15:18:20 crc kubenswrapper[4869]: I1001 15:18:20.743552 4869 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-86gjq"] Oct 01 15:18:21 crc kubenswrapper[4869]: I1001 15:18:21.354651 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/openstack-galera-0"] Oct 01 15:18:21 crc kubenswrapper[4869]: E1001 15:18:21.359481 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="152f2134-8a6e-4400-a2b5-978fe7d9f23f" containerName="extract-content" Oct 01 15:18:21 crc kubenswrapper[4869]: I1001 15:18:21.359731 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="152f2134-8a6e-4400-a2b5-978fe7d9f23f" containerName="extract-content" Oct 01 15:18:21 crc kubenswrapper[4869]: E1001 15:18:21.359921 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="152f2134-8a6e-4400-a2b5-978fe7d9f23f" containerName="registry-server" Oct 01 15:18:21 crc kubenswrapper[4869]: I1001 15:18:21.360088 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="152f2134-8a6e-4400-a2b5-978fe7d9f23f" containerName="registry-server" Oct 01 15:18:21 crc kubenswrapper[4869]: E1001 15:18:21.360338 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="152f2134-8a6e-4400-a2b5-978fe7d9f23f" containerName="extract-utilities" Oct 01 15:18:21 crc kubenswrapper[4869]: I1001 15:18:21.360389 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="152f2134-8a6e-4400-a2b5-978fe7d9f23f" containerName="extract-utilities" Oct 01 15:18:21 crc kubenswrapper[4869]: I1001 15:18:21.360963 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="152f2134-8a6e-4400-a2b5-978fe7d9f23f" containerName="registry-server" Oct 01 15:18:21 crc kubenswrapper[4869]: I1001 15:18:21.362467 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstack-galera-0"] Oct 01 15:18:21 crc kubenswrapper[4869]: I1001 15:18:21.362591 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-galera-0" Oct 01 15:18:21 crc kubenswrapper[4869]: I1001 15:18:21.366180 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"galera-openstack-dockercfg-w8fc7" Oct 01 15:18:21 crc kubenswrapper[4869]: I1001 15:18:21.371248 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"osp-secret" Oct 01 15:18:21 crc kubenswrapper[4869]: I1001 15:18:21.371542 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-galera-openstack-svc" Oct 01 15:18:21 crc kubenswrapper[4869]: I1001 15:18:21.371702 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-config-data" Oct 01 15:18:21 crc kubenswrapper[4869]: I1001 15:18:21.372027 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-scripts" Oct 01 15:18:21 crc kubenswrapper[4869]: I1001 15:18:21.379478 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"combined-ca-bundle" Oct 01 15:18:21 crc kubenswrapper[4869]: I1001 15:18:21.413893 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/openstack-cell1-galera-0"] Oct 01 15:18:21 crc kubenswrapper[4869]: I1001 15:18:21.415391 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-cell1-galera-0" Oct 01 15:18:21 crc kubenswrapper[4869]: I1001 15:18:21.416474 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/7980b3ae-7c3f-42ad-a9e4-9bfb05cb934a-operator-scripts\") pod \"openstack-galera-0\" (UID: \"7980b3ae-7c3f-42ad-a9e4-9bfb05cb934a\") " pod="openstack/openstack-galera-0" Oct 01 15:18:21 crc kubenswrapper[4869]: I1001 15:18:21.416523 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/7980b3ae-7c3f-42ad-a9e4-9bfb05cb934a-kolla-config\") pod \"openstack-galera-0\" (UID: \"7980b3ae-7c3f-42ad-a9e4-9bfb05cb934a\") " pod="openstack/openstack-galera-0" Oct 01 15:18:21 crc kubenswrapper[4869]: I1001 15:18:21.416565 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"openstack-galera-0\" (UID: \"7980b3ae-7c3f-42ad-a9e4-9bfb05cb934a\") " pod="openstack/openstack-galera-0" Oct 01 15:18:21 crc kubenswrapper[4869]: I1001 15:18:21.416592 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7980b3ae-7c3f-42ad-a9e4-9bfb05cb934a-combined-ca-bundle\") pod \"openstack-galera-0\" (UID: \"7980b3ae-7c3f-42ad-a9e4-9bfb05cb934a\") " pod="openstack/openstack-galera-0" Oct 01 15:18:21 crc kubenswrapper[4869]: I1001 15:18:21.416640 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/7980b3ae-7c3f-42ad-a9e4-9bfb05cb934a-galera-tls-certs\") pod \"openstack-galera-0\" (UID: \"7980b3ae-7c3f-42ad-a9e4-9bfb05cb934a\") " pod="openstack/openstack-galera-0" Oct 01 15:18:21 crc kubenswrapper[4869]: I1001 15:18:21.417065 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/7980b3ae-7c3f-42ad-a9e4-9bfb05cb934a-config-data-default\") pod \"openstack-galera-0\" (UID: \"7980b3ae-7c3f-42ad-a9e4-9bfb05cb934a\") " pod="openstack/openstack-galera-0" Oct 01 15:18:21 crc kubenswrapper[4869]: I1001 15:18:21.417120 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secrets\" (UniqueName: \"kubernetes.io/secret/7980b3ae-7c3f-42ad-a9e4-9bfb05cb934a-secrets\") pod \"openstack-galera-0\" (UID: \"7980b3ae-7c3f-42ad-a9e4-9bfb05cb934a\") " pod="openstack/openstack-galera-0" Oct 01 15:18:21 crc kubenswrapper[4869]: I1001 15:18:21.417222 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/7980b3ae-7c3f-42ad-a9e4-9bfb05cb934a-config-data-generated\") pod \"openstack-galera-0\" (UID: \"7980b3ae-7c3f-42ad-a9e4-9bfb05cb934a\") " pod="openstack/openstack-galera-0" Oct 01 15:18:21 crc kubenswrapper[4869]: I1001 15:18:21.417299 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ztlsf\" (UniqueName: \"kubernetes.io/projected/7980b3ae-7c3f-42ad-a9e4-9bfb05cb934a-kube-api-access-ztlsf\") pod \"openstack-galera-0\" (UID: \"7980b3ae-7c3f-42ad-a9e4-9bfb05cb934a\") " pod="openstack/openstack-galera-0" Oct 01 15:18:21 crc kubenswrapper[4869]: I1001 15:18:21.417087 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-galera-openstack-cell1-svc" Oct 01 15:18:21 crc kubenswrapper[4869]: I1001 15:18:21.417778 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-cell1-config-data" Oct 01 15:18:21 crc kubenswrapper[4869]: I1001 15:18:21.419975 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-cell1-scripts" Oct 01 15:18:21 crc kubenswrapper[4869]: I1001 15:18:21.422566 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"galera-openstack-cell1-dockercfg-lvzwc" Oct 01 15:18:21 crc kubenswrapper[4869]: I1001 15:18:21.430084 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstack-cell1-galera-0"] Oct 01 15:18:21 crc kubenswrapper[4869]: I1001 15:18:21.518279 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/7980b3ae-7c3f-42ad-a9e4-9bfb05cb934a-config-data-default\") pod \"openstack-galera-0\" (UID: \"7980b3ae-7c3f-42ad-a9e4-9bfb05cb934a\") " pod="openstack/openstack-galera-0" Oct 01 15:18:21 crc kubenswrapper[4869]: I1001 15:18:21.518330 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secrets\" (UniqueName: \"kubernetes.io/secret/7980b3ae-7c3f-42ad-a9e4-9bfb05cb934a-secrets\") pod \"openstack-galera-0\" (UID: \"7980b3ae-7c3f-42ad-a9e4-9bfb05cb934a\") " pod="openstack/openstack-galera-0" Oct 01 15:18:21 crc kubenswrapper[4869]: I1001 15:18:21.518374 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/7980b3ae-7c3f-42ad-a9e4-9bfb05cb934a-config-data-generated\") pod \"openstack-galera-0\" (UID: \"7980b3ae-7c3f-42ad-a9e4-9bfb05cb934a\") " pod="openstack/openstack-galera-0" Oct 01 15:18:21 crc kubenswrapper[4869]: I1001 15:18:21.518404 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ztlsf\" (UniqueName: \"kubernetes.io/projected/7980b3ae-7c3f-42ad-a9e4-9bfb05cb934a-kube-api-access-ztlsf\") pod \"openstack-galera-0\" (UID: \"7980b3ae-7c3f-42ad-a9e4-9bfb05cb934a\") " pod="openstack/openstack-galera-0" Oct 01 15:18:21 crc kubenswrapper[4869]: I1001 15:18:21.518448 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/7980b3ae-7c3f-42ad-a9e4-9bfb05cb934a-operator-scripts\") pod \"openstack-galera-0\" (UID: \"7980b3ae-7c3f-42ad-a9e4-9bfb05cb934a\") " pod="openstack/openstack-galera-0" Oct 01 15:18:21 crc kubenswrapper[4869]: I1001 15:18:21.518469 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/7980b3ae-7c3f-42ad-a9e4-9bfb05cb934a-kolla-config\") pod \"openstack-galera-0\" (UID: \"7980b3ae-7c3f-42ad-a9e4-9bfb05cb934a\") " pod="openstack/openstack-galera-0" Oct 01 15:18:21 crc kubenswrapper[4869]: I1001 15:18:21.518495 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"openstack-galera-0\" (UID: \"7980b3ae-7c3f-42ad-a9e4-9bfb05cb934a\") " pod="openstack/openstack-galera-0" Oct 01 15:18:21 crc kubenswrapper[4869]: I1001 15:18:21.518515 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7980b3ae-7c3f-42ad-a9e4-9bfb05cb934a-combined-ca-bundle\") pod \"openstack-galera-0\" (UID: \"7980b3ae-7c3f-42ad-a9e4-9bfb05cb934a\") " pod="openstack/openstack-galera-0" Oct 01 15:18:21 crc kubenswrapper[4869]: I1001 15:18:21.518543 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/7980b3ae-7c3f-42ad-a9e4-9bfb05cb934a-galera-tls-certs\") pod \"openstack-galera-0\" (UID: \"7980b3ae-7c3f-42ad-a9e4-9bfb05cb934a\") " pod="openstack/openstack-galera-0" Oct 01 15:18:21 crc kubenswrapper[4869]: I1001 15:18:21.519164 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/7980b3ae-7c3f-42ad-a9e4-9bfb05cb934a-config-data-default\") pod \"openstack-galera-0\" (UID: \"7980b3ae-7c3f-42ad-a9e4-9bfb05cb934a\") " pod="openstack/openstack-galera-0" Oct 01 15:18:21 crc kubenswrapper[4869]: I1001 15:18:21.519865 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/7980b3ae-7c3f-42ad-a9e4-9bfb05cb934a-config-data-generated\") pod \"openstack-galera-0\" (UID: \"7980b3ae-7c3f-42ad-a9e4-9bfb05cb934a\") " pod="openstack/openstack-galera-0" Oct 01 15:18:21 crc kubenswrapper[4869]: I1001 15:18:21.519910 4869 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"openstack-galera-0\" (UID: \"7980b3ae-7c3f-42ad-a9e4-9bfb05cb934a\") device mount path \"/mnt/openstack/pv05\"" pod="openstack/openstack-galera-0" Oct 01 15:18:21 crc kubenswrapper[4869]: I1001 15:18:21.520371 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/7980b3ae-7c3f-42ad-a9e4-9bfb05cb934a-kolla-config\") pod \"openstack-galera-0\" (UID: \"7980b3ae-7c3f-42ad-a9e4-9bfb05cb934a\") " pod="openstack/openstack-galera-0" Oct 01 15:18:21 crc kubenswrapper[4869]: I1001 15:18:21.522406 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/7980b3ae-7c3f-42ad-a9e4-9bfb05cb934a-operator-scripts\") pod \"openstack-galera-0\" (UID: \"7980b3ae-7c3f-42ad-a9e4-9bfb05cb934a\") " pod="openstack/openstack-galera-0" Oct 01 15:18:21 crc kubenswrapper[4869]: I1001 15:18:21.524581 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secrets\" (UniqueName: \"kubernetes.io/secret/7980b3ae-7c3f-42ad-a9e4-9bfb05cb934a-secrets\") pod \"openstack-galera-0\" (UID: \"7980b3ae-7c3f-42ad-a9e4-9bfb05cb934a\") " pod="openstack/openstack-galera-0" Oct 01 15:18:21 crc kubenswrapper[4869]: I1001 15:18:21.529253 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/7980b3ae-7c3f-42ad-a9e4-9bfb05cb934a-galera-tls-certs\") pod \"openstack-galera-0\" (UID: \"7980b3ae-7c3f-42ad-a9e4-9bfb05cb934a\") " pod="openstack/openstack-galera-0" Oct 01 15:18:21 crc kubenswrapper[4869]: I1001 15:18:21.530783 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7980b3ae-7c3f-42ad-a9e4-9bfb05cb934a-combined-ca-bundle\") pod \"openstack-galera-0\" (UID: \"7980b3ae-7c3f-42ad-a9e4-9bfb05cb934a\") " pod="openstack/openstack-galera-0" Oct 01 15:18:21 crc kubenswrapper[4869]: I1001 15:18:21.539508 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"openstack-galera-0\" (UID: \"7980b3ae-7c3f-42ad-a9e4-9bfb05cb934a\") " pod="openstack/openstack-galera-0" Oct 01 15:18:21 crc kubenswrapper[4869]: I1001 15:18:21.539526 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ztlsf\" (UniqueName: \"kubernetes.io/projected/7980b3ae-7c3f-42ad-a9e4-9bfb05cb934a-kube-api-access-ztlsf\") pod \"openstack-galera-0\" (UID: \"7980b3ae-7c3f-42ad-a9e4-9bfb05cb934a\") " pod="openstack/openstack-galera-0" Oct 01 15:18:21 crc kubenswrapper[4869]: I1001 15:18:21.601976 4869 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="152f2134-8a6e-4400-a2b5-978fe7d9f23f" path="/var/lib/kubelet/pods/152f2134-8a6e-4400-a2b5-978fe7d9f23f/volumes" Oct 01 15:18:21 crc kubenswrapper[4869]: I1001 15:18:21.620032 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/6c2b4697-41ce-422e-8602-a1c0190745df-config-data-default\") pod \"openstack-cell1-galera-0\" (UID: \"6c2b4697-41ce-422e-8602-a1c0190745df\") " pod="openstack/openstack-cell1-galera-0" Oct 01 15:18:21 crc kubenswrapper[4869]: I1001 15:18:21.620113 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"openstack-cell1-galera-0\" (UID: \"6c2b4697-41ce-422e-8602-a1c0190745df\") " pod="openstack/openstack-cell1-galera-0" Oct 01 15:18:21 crc kubenswrapper[4869]: I1001 15:18:21.620165 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8kfhg\" (UniqueName: \"kubernetes.io/projected/6c2b4697-41ce-422e-8602-a1c0190745df-kube-api-access-8kfhg\") pod \"openstack-cell1-galera-0\" (UID: \"6c2b4697-41ce-422e-8602-a1c0190745df\") " pod="openstack/openstack-cell1-galera-0" Oct 01 15:18:21 crc kubenswrapper[4869]: I1001 15:18:21.620228 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/6c2b4697-41ce-422e-8602-a1c0190745df-operator-scripts\") pod \"openstack-cell1-galera-0\" (UID: \"6c2b4697-41ce-422e-8602-a1c0190745df\") " pod="openstack/openstack-cell1-galera-0" Oct 01 15:18:21 crc kubenswrapper[4869]: I1001 15:18:21.620698 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/6c2b4697-41ce-422e-8602-a1c0190745df-kolla-config\") pod \"openstack-cell1-galera-0\" (UID: \"6c2b4697-41ce-422e-8602-a1c0190745df\") " pod="openstack/openstack-cell1-galera-0" Oct 01 15:18:21 crc kubenswrapper[4869]: I1001 15:18:21.620803 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6c2b4697-41ce-422e-8602-a1c0190745df-combined-ca-bundle\") pod \"openstack-cell1-galera-0\" (UID: \"6c2b4697-41ce-422e-8602-a1c0190745df\") " pod="openstack/openstack-cell1-galera-0" Oct 01 15:18:21 crc kubenswrapper[4869]: I1001 15:18:21.621109 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secrets\" (UniqueName: \"kubernetes.io/secret/6c2b4697-41ce-422e-8602-a1c0190745df-secrets\") pod \"openstack-cell1-galera-0\" (UID: \"6c2b4697-41ce-422e-8602-a1c0190745df\") " pod="openstack/openstack-cell1-galera-0" Oct 01 15:18:21 crc kubenswrapper[4869]: I1001 15:18:21.621299 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/6c2b4697-41ce-422e-8602-a1c0190745df-config-data-generated\") pod \"openstack-cell1-galera-0\" (UID: \"6c2b4697-41ce-422e-8602-a1c0190745df\") " pod="openstack/openstack-cell1-galera-0" Oct 01 15:18:21 crc kubenswrapper[4869]: I1001 15:18:21.621404 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/6c2b4697-41ce-422e-8602-a1c0190745df-galera-tls-certs\") pod \"openstack-cell1-galera-0\" (UID: \"6c2b4697-41ce-422e-8602-a1c0190745df\") " pod="openstack/openstack-cell1-galera-0" Oct 01 15:18:21 crc kubenswrapper[4869]: I1001 15:18:21.700588 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-galera-0" Oct 01 15:18:21 crc kubenswrapper[4869]: I1001 15:18:21.722725 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"openstack-cell1-galera-0\" (UID: \"6c2b4697-41ce-422e-8602-a1c0190745df\") " pod="openstack/openstack-cell1-galera-0" Oct 01 15:18:21 crc kubenswrapper[4869]: I1001 15:18:21.722812 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8kfhg\" (UniqueName: \"kubernetes.io/projected/6c2b4697-41ce-422e-8602-a1c0190745df-kube-api-access-8kfhg\") pod \"openstack-cell1-galera-0\" (UID: \"6c2b4697-41ce-422e-8602-a1c0190745df\") " pod="openstack/openstack-cell1-galera-0" Oct 01 15:18:21 crc kubenswrapper[4869]: I1001 15:18:21.722886 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/6c2b4697-41ce-422e-8602-a1c0190745df-operator-scripts\") pod \"openstack-cell1-galera-0\" (UID: \"6c2b4697-41ce-422e-8602-a1c0190745df\") " pod="openstack/openstack-cell1-galera-0" Oct 01 15:18:21 crc kubenswrapper[4869]: I1001 15:18:21.722982 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/6c2b4697-41ce-422e-8602-a1c0190745df-kolla-config\") pod \"openstack-cell1-galera-0\" (UID: \"6c2b4697-41ce-422e-8602-a1c0190745df\") " pod="openstack/openstack-cell1-galera-0" Oct 01 15:18:21 crc kubenswrapper[4869]: I1001 15:18:21.723065 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6c2b4697-41ce-422e-8602-a1c0190745df-combined-ca-bundle\") pod \"openstack-cell1-galera-0\" (UID: \"6c2b4697-41ce-422e-8602-a1c0190745df\") " pod="openstack/openstack-cell1-galera-0" Oct 01 15:18:21 crc kubenswrapper[4869]: I1001 15:18:21.723183 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secrets\" (UniqueName: \"kubernetes.io/secret/6c2b4697-41ce-422e-8602-a1c0190745df-secrets\") pod \"openstack-cell1-galera-0\" (UID: \"6c2b4697-41ce-422e-8602-a1c0190745df\") " pod="openstack/openstack-cell1-galera-0" Oct 01 15:18:21 crc kubenswrapper[4869]: I1001 15:18:21.723229 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/6c2b4697-41ce-422e-8602-a1c0190745df-config-data-generated\") pod \"openstack-cell1-galera-0\" (UID: \"6c2b4697-41ce-422e-8602-a1c0190745df\") " pod="openstack/openstack-cell1-galera-0" Oct 01 15:18:21 crc kubenswrapper[4869]: I1001 15:18:21.723297 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/6c2b4697-41ce-422e-8602-a1c0190745df-galera-tls-certs\") pod \"openstack-cell1-galera-0\" (UID: \"6c2b4697-41ce-422e-8602-a1c0190745df\") " pod="openstack/openstack-cell1-galera-0" Oct 01 15:18:21 crc kubenswrapper[4869]: I1001 15:18:21.723386 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/6c2b4697-41ce-422e-8602-a1c0190745df-config-data-default\") pod \"openstack-cell1-galera-0\" (UID: \"6c2b4697-41ce-422e-8602-a1c0190745df\") " pod="openstack/openstack-cell1-galera-0" Oct 01 15:18:21 crc kubenswrapper[4869]: I1001 15:18:21.725343 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/6c2b4697-41ce-422e-8602-a1c0190745df-config-data-default\") pod \"openstack-cell1-galera-0\" (UID: \"6c2b4697-41ce-422e-8602-a1c0190745df\") " pod="openstack/openstack-cell1-galera-0" Oct 01 15:18:21 crc kubenswrapper[4869]: I1001 15:18:21.727293 4869 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"openstack-cell1-galera-0\" (UID: \"6c2b4697-41ce-422e-8602-a1c0190745df\") device mount path \"/mnt/openstack/pv10\"" pod="openstack/openstack-cell1-galera-0" Oct 01 15:18:21 crc kubenswrapper[4869]: I1001 15:18:21.732165 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6c2b4697-41ce-422e-8602-a1c0190745df-combined-ca-bundle\") pod \"openstack-cell1-galera-0\" (UID: \"6c2b4697-41ce-422e-8602-a1c0190745df\") " pod="openstack/openstack-cell1-galera-0" Oct 01 15:18:21 crc kubenswrapper[4869]: I1001 15:18:21.733121 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/6c2b4697-41ce-422e-8602-a1c0190745df-kolla-config\") pod \"openstack-cell1-galera-0\" (UID: \"6c2b4697-41ce-422e-8602-a1c0190745df\") " pod="openstack/openstack-cell1-galera-0" Oct 01 15:18:21 crc kubenswrapper[4869]: I1001 15:18:21.733402 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/6c2b4697-41ce-422e-8602-a1c0190745df-operator-scripts\") pod \"openstack-cell1-galera-0\" (UID: \"6c2b4697-41ce-422e-8602-a1c0190745df\") " pod="openstack/openstack-cell1-galera-0" Oct 01 15:18:21 crc kubenswrapper[4869]: I1001 15:18:21.735075 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/6c2b4697-41ce-422e-8602-a1c0190745df-galera-tls-certs\") pod \"openstack-cell1-galera-0\" (UID: \"6c2b4697-41ce-422e-8602-a1c0190745df\") " pod="openstack/openstack-cell1-galera-0" Oct 01 15:18:21 crc kubenswrapper[4869]: I1001 15:18:21.743609 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/6c2b4697-41ce-422e-8602-a1c0190745df-config-data-generated\") pod \"openstack-cell1-galera-0\" (UID: \"6c2b4697-41ce-422e-8602-a1c0190745df\") " pod="openstack/openstack-cell1-galera-0" Oct 01 15:18:21 crc kubenswrapper[4869]: I1001 15:18:21.751459 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secrets\" (UniqueName: \"kubernetes.io/secret/6c2b4697-41ce-422e-8602-a1c0190745df-secrets\") pod \"openstack-cell1-galera-0\" (UID: \"6c2b4697-41ce-422e-8602-a1c0190745df\") " pod="openstack/openstack-cell1-galera-0" Oct 01 15:18:21 crc kubenswrapper[4869]: I1001 15:18:21.761087 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8kfhg\" (UniqueName: \"kubernetes.io/projected/6c2b4697-41ce-422e-8602-a1c0190745df-kube-api-access-8kfhg\") pod \"openstack-cell1-galera-0\" (UID: \"6c2b4697-41ce-422e-8602-a1c0190745df\") " pod="openstack/openstack-cell1-galera-0" Oct 01 15:18:21 crc kubenswrapper[4869]: I1001 15:18:21.769069 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"openstack-cell1-galera-0\" (UID: \"6c2b4697-41ce-422e-8602-a1c0190745df\") " pod="openstack/openstack-cell1-galera-0" Oct 01 15:18:21 crc kubenswrapper[4869]: I1001 15:18:21.932027 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/memcached-0"] Oct 01 15:18:21 crc kubenswrapper[4869]: I1001 15:18:21.933028 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/memcached-0" Oct 01 15:18:21 crc kubenswrapper[4869]: I1001 15:18:21.936705 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"memcached-memcached-dockercfg-dd5fb" Oct 01 15:18:21 crc kubenswrapper[4869]: I1001 15:18:21.936866 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-memcached-svc" Oct 01 15:18:21 crc kubenswrapper[4869]: I1001 15:18:21.936979 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"memcached-config-data" Oct 01 15:18:21 crc kubenswrapper[4869]: I1001 15:18:21.949485 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/memcached-0"] Oct 01 15:18:22 crc kubenswrapper[4869]: I1001 15:18:22.044460 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-cell1-galera-0" Oct 01 15:18:22 crc kubenswrapper[4869]: I1001 15:18:22.128855 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/05395c1c-e984-4b4c-abb8-6309d6a961da-kolla-config\") pod \"memcached-0\" (UID: \"05395c1c-e984-4b4c-abb8-6309d6a961da\") " pod="openstack/memcached-0" Oct 01 15:18:22 crc kubenswrapper[4869]: I1001 15:18:22.128910 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kr76j\" (UniqueName: \"kubernetes.io/projected/05395c1c-e984-4b4c-abb8-6309d6a961da-kube-api-access-kr76j\") pod \"memcached-0\" (UID: \"05395c1c-e984-4b4c-abb8-6309d6a961da\") " pod="openstack/memcached-0" Oct 01 15:18:22 crc kubenswrapper[4869]: I1001 15:18:22.129233 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/05395c1c-e984-4b4c-abb8-6309d6a961da-combined-ca-bundle\") pod \"memcached-0\" (UID: \"05395c1c-e984-4b4c-abb8-6309d6a961da\") " pod="openstack/memcached-0" Oct 01 15:18:22 crc kubenswrapper[4869]: I1001 15:18:22.129370 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/05395c1c-e984-4b4c-abb8-6309d6a961da-config-data\") pod \"memcached-0\" (UID: \"05395c1c-e984-4b4c-abb8-6309d6a961da\") " pod="openstack/memcached-0" Oct 01 15:18:22 crc kubenswrapper[4869]: I1001 15:18:22.129414 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"memcached-tls-certs\" (UniqueName: \"kubernetes.io/secret/05395c1c-e984-4b4c-abb8-6309d6a961da-memcached-tls-certs\") pod \"memcached-0\" (UID: \"05395c1c-e984-4b4c-abb8-6309d6a961da\") " pod="openstack/memcached-0" Oct 01 15:18:22 crc kubenswrapper[4869]: I1001 15:18:22.230648 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kr76j\" (UniqueName: \"kubernetes.io/projected/05395c1c-e984-4b4c-abb8-6309d6a961da-kube-api-access-kr76j\") pod \"memcached-0\" (UID: \"05395c1c-e984-4b4c-abb8-6309d6a961da\") " pod="openstack/memcached-0" Oct 01 15:18:22 crc kubenswrapper[4869]: I1001 15:18:22.230768 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/05395c1c-e984-4b4c-abb8-6309d6a961da-combined-ca-bundle\") pod \"memcached-0\" (UID: \"05395c1c-e984-4b4c-abb8-6309d6a961da\") " pod="openstack/memcached-0" Oct 01 15:18:22 crc kubenswrapper[4869]: I1001 15:18:22.230794 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/05395c1c-e984-4b4c-abb8-6309d6a961da-config-data\") pod \"memcached-0\" (UID: \"05395c1c-e984-4b4c-abb8-6309d6a961da\") " pod="openstack/memcached-0" Oct 01 15:18:22 crc kubenswrapper[4869]: I1001 15:18:22.230815 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"memcached-tls-certs\" (UniqueName: \"kubernetes.io/secret/05395c1c-e984-4b4c-abb8-6309d6a961da-memcached-tls-certs\") pod \"memcached-0\" (UID: \"05395c1c-e984-4b4c-abb8-6309d6a961da\") " pod="openstack/memcached-0" Oct 01 15:18:22 crc kubenswrapper[4869]: I1001 15:18:22.230846 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/05395c1c-e984-4b4c-abb8-6309d6a961da-kolla-config\") pod \"memcached-0\" (UID: \"05395c1c-e984-4b4c-abb8-6309d6a961da\") " pod="openstack/memcached-0" Oct 01 15:18:22 crc kubenswrapper[4869]: I1001 15:18:22.231801 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/05395c1c-e984-4b4c-abb8-6309d6a961da-kolla-config\") pod \"memcached-0\" (UID: \"05395c1c-e984-4b4c-abb8-6309d6a961da\") " pod="openstack/memcached-0" Oct 01 15:18:22 crc kubenswrapper[4869]: I1001 15:18:22.231928 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/05395c1c-e984-4b4c-abb8-6309d6a961da-config-data\") pod \"memcached-0\" (UID: \"05395c1c-e984-4b4c-abb8-6309d6a961da\") " pod="openstack/memcached-0" Oct 01 15:18:22 crc kubenswrapper[4869]: I1001 15:18:22.234944 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/05395c1c-e984-4b4c-abb8-6309d6a961da-combined-ca-bundle\") pod \"memcached-0\" (UID: \"05395c1c-e984-4b4c-abb8-6309d6a961da\") " pod="openstack/memcached-0" Oct 01 15:18:22 crc kubenswrapper[4869]: I1001 15:18:22.241625 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"memcached-tls-certs\" (UniqueName: \"kubernetes.io/secret/05395c1c-e984-4b4c-abb8-6309d6a961da-memcached-tls-certs\") pod \"memcached-0\" (UID: \"05395c1c-e984-4b4c-abb8-6309d6a961da\") " pod="openstack/memcached-0" Oct 01 15:18:22 crc kubenswrapper[4869]: I1001 15:18:22.247689 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kr76j\" (UniqueName: \"kubernetes.io/projected/05395c1c-e984-4b4c-abb8-6309d6a961da-kube-api-access-kr76j\") pod \"memcached-0\" (UID: \"05395c1c-e984-4b4c-abb8-6309d6a961da\") " pod="openstack/memcached-0" Oct 01 15:18:22 crc kubenswrapper[4869]: I1001 15:18:22.257844 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/memcached-0" Oct 01 15:18:23 crc kubenswrapper[4869]: I1001 15:18:23.576041 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/kube-state-metrics-0"] Oct 01 15:18:23 crc kubenswrapper[4869]: I1001 15:18:23.577194 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Oct 01 15:18:23 crc kubenswrapper[4869]: I1001 15:18:23.579087 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"telemetry-ceilometer-dockercfg-g74fv" Oct 01 15:18:23 crc kubenswrapper[4869]: I1001 15:18:23.589826 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/kube-state-metrics-0"] Oct 01 15:18:23 crc kubenswrapper[4869]: I1001 15:18:23.662907 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bbzt7\" (UniqueName: \"kubernetes.io/projected/589bdd72-e961-4bbd-bb04-bcff96363cab-kube-api-access-bbzt7\") pod \"kube-state-metrics-0\" (UID: \"589bdd72-e961-4bbd-bb04-bcff96363cab\") " pod="openstack/kube-state-metrics-0" Oct 01 15:18:23 crc kubenswrapper[4869]: I1001 15:18:23.763727 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bbzt7\" (UniqueName: \"kubernetes.io/projected/589bdd72-e961-4bbd-bb04-bcff96363cab-kube-api-access-bbzt7\") pod \"kube-state-metrics-0\" (UID: \"589bdd72-e961-4bbd-bb04-bcff96363cab\") " pod="openstack/kube-state-metrics-0" Oct 01 15:18:23 crc kubenswrapper[4869]: I1001 15:18:23.784958 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bbzt7\" (UniqueName: \"kubernetes.io/projected/589bdd72-e961-4bbd-bb04-bcff96363cab-kube-api-access-bbzt7\") pod \"kube-state-metrics-0\" (UID: \"589bdd72-e961-4bbd-bb04-bcff96363cab\") " pod="openstack/kube-state-metrics-0" Oct 01 15:18:23 crc kubenswrapper[4869]: I1001 15:18:23.914566 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Oct 01 15:18:27 crc kubenswrapper[4869]: I1001 15:18:27.595972 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/kube-state-metrics-0"] Oct 01 15:18:27 crc kubenswrapper[4869]: I1001 15:18:27.913817 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-controller-8xxqn"] Oct 01 15:18:27 crc kubenswrapper[4869]: I1001 15:18:27.914953 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-8xxqn" Oct 01 15:18:27 crc kubenswrapper[4869]: I1001 15:18:27.918802 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovncontroller-scripts" Oct 01 15:18:27 crc kubenswrapper[4869]: I1001 15:18:27.918828 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ovncontroller-ovncontroller-dockercfg-tdtvc" Oct 01 15:18:27 crc kubenswrapper[4869]: I1001 15:18:27.919051 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovncontroller-ovndbs" Oct 01 15:18:27 crc kubenswrapper[4869]: I1001 15:18:27.921229 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-8xxqn"] Oct 01 15:18:27 crc kubenswrapper[4869]: I1001 15:18:27.948203 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-controller-ovs-jhxfd"] Oct 01 15:18:27 crc kubenswrapper[4869]: I1001 15:18:27.952803 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-ovs-jhxfd" Oct 01 15:18:27 crc kubenswrapper[4869]: I1001 15:18:27.981803 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-ovs-jhxfd"] Oct 01 15:18:28 crc kubenswrapper[4869]: I1001 15:18:28.031612 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/fce7eb65-0111-43b3-9265-700c584695fa-var-run-ovn\") pod \"ovn-controller-8xxqn\" (UID: \"fce7eb65-0111-43b3-9265-700c584695fa\") " pod="openstack/ovn-controller-8xxqn" Oct 01 15:18:28 crc kubenswrapper[4869]: I1001 15:18:28.031708 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/01ddd573-45b0-4379-8b00-fa92a1da0ec1-scripts\") pod \"ovn-controller-ovs-jhxfd\" (UID: \"01ddd573-45b0-4379-8b00-fa92a1da0ec1\") " pod="openstack/ovn-controller-ovs-jhxfd" Oct 01 15:18:28 crc kubenswrapper[4869]: I1001 15:18:28.031734 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/fce7eb65-0111-43b3-9265-700c584695fa-scripts\") pod \"ovn-controller-8xxqn\" (UID: \"fce7eb65-0111-43b3-9265-700c584695fa\") " pod="openstack/ovn-controller-8xxqn" Oct 01 15:18:28 crc kubenswrapper[4869]: I1001 15:18:28.031758 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fce7eb65-0111-43b3-9265-700c584695fa-combined-ca-bundle\") pod \"ovn-controller-8xxqn\" (UID: \"fce7eb65-0111-43b3-9265-700c584695fa\") " pod="openstack/ovn-controller-8xxqn" Oct 01 15:18:28 crc kubenswrapper[4869]: I1001 15:18:28.031798 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/01ddd573-45b0-4379-8b00-fa92a1da0ec1-var-run\") pod \"ovn-controller-ovs-jhxfd\" (UID: \"01ddd573-45b0-4379-8b00-fa92a1da0ec1\") " pod="openstack/ovn-controller-ovs-jhxfd" Oct 01 15:18:28 crc kubenswrapper[4869]: I1001 15:18:28.031820 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bbjlh\" (UniqueName: \"kubernetes.io/projected/01ddd573-45b0-4379-8b00-fa92a1da0ec1-kube-api-access-bbjlh\") pod \"ovn-controller-ovs-jhxfd\" (UID: \"01ddd573-45b0-4379-8b00-fa92a1da0ec1\") " pod="openstack/ovn-controller-ovs-jhxfd" Oct 01 15:18:28 crc kubenswrapper[4869]: I1001 15:18:28.031850 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/01ddd573-45b0-4379-8b00-fa92a1da0ec1-var-log\") pod \"ovn-controller-ovs-jhxfd\" (UID: \"01ddd573-45b0-4379-8b00-fa92a1da0ec1\") " pod="openstack/ovn-controller-ovs-jhxfd" Oct 01 15:18:28 crc kubenswrapper[4869]: I1001 15:18:28.031873 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib\" (UniqueName: \"kubernetes.io/host-path/01ddd573-45b0-4379-8b00-fa92a1da0ec1-var-lib\") pod \"ovn-controller-ovs-jhxfd\" (UID: \"01ddd573-45b0-4379-8b00-fa92a1da0ec1\") " pod="openstack/ovn-controller-ovs-jhxfd" Oct 01 15:18:28 crc kubenswrapper[4869]: I1001 15:18:28.031904 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-controller-tls-certs\" (UniqueName: \"kubernetes.io/secret/fce7eb65-0111-43b3-9265-700c584695fa-ovn-controller-tls-certs\") pod \"ovn-controller-8xxqn\" (UID: \"fce7eb65-0111-43b3-9265-700c584695fa\") " pod="openstack/ovn-controller-8xxqn" Oct 01 15:18:28 crc kubenswrapper[4869]: I1001 15:18:28.031936 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/fce7eb65-0111-43b3-9265-700c584695fa-var-log-ovn\") pod \"ovn-controller-8xxqn\" (UID: \"fce7eb65-0111-43b3-9265-700c584695fa\") " pod="openstack/ovn-controller-8xxqn" Oct 01 15:18:28 crc kubenswrapper[4869]: I1001 15:18:28.031984 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xsx7w\" (UniqueName: \"kubernetes.io/projected/fce7eb65-0111-43b3-9265-700c584695fa-kube-api-access-xsx7w\") pod \"ovn-controller-8xxqn\" (UID: \"fce7eb65-0111-43b3-9265-700c584695fa\") " pod="openstack/ovn-controller-8xxqn" Oct 01 15:18:28 crc kubenswrapper[4869]: I1001 15:18:28.032007 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/fce7eb65-0111-43b3-9265-700c584695fa-var-run\") pod \"ovn-controller-8xxqn\" (UID: \"fce7eb65-0111-43b3-9265-700c584695fa\") " pod="openstack/ovn-controller-8xxqn" Oct 01 15:18:28 crc kubenswrapper[4869]: I1001 15:18:28.032029 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-ovs\" (UniqueName: \"kubernetes.io/host-path/01ddd573-45b0-4379-8b00-fa92a1da0ec1-etc-ovs\") pod \"ovn-controller-ovs-jhxfd\" (UID: \"01ddd573-45b0-4379-8b00-fa92a1da0ec1\") " pod="openstack/ovn-controller-ovs-jhxfd" Oct 01 15:18:28 crc kubenswrapper[4869]: I1001 15:18:28.133885 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-controller-tls-certs\" (UniqueName: \"kubernetes.io/secret/fce7eb65-0111-43b3-9265-700c584695fa-ovn-controller-tls-certs\") pod \"ovn-controller-8xxqn\" (UID: \"fce7eb65-0111-43b3-9265-700c584695fa\") " pod="openstack/ovn-controller-8xxqn" Oct 01 15:18:28 crc kubenswrapper[4869]: I1001 15:18:28.133948 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/fce7eb65-0111-43b3-9265-700c584695fa-var-log-ovn\") pod \"ovn-controller-8xxqn\" (UID: \"fce7eb65-0111-43b3-9265-700c584695fa\") " pod="openstack/ovn-controller-8xxqn" Oct 01 15:18:28 crc kubenswrapper[4869]: I1001 15:18:28.133992 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xsx7w\" (UniqueName: \"kubernetes.io/projected/fce7eb65-0111-43b3-9265-700c584695fa-kube-api-access-xsx7w\") pod \"ovn-controller-8xxqn\" (UID: \"fce7eb65-0111-43b3-9265-700c584695fa\") " pod="openstack/ovn-controller-8xxqn" Oct 01 15:18:28 crc kubenswrapper[4869]: I1001 15:18:28.134013 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/fce7eb65-0111-43b3-9265-700c584695fa-var-run\") pod \"ovn-controller-8xxqn\" (UID: \"fce7eb65-0111-43b3-9265-700c584695fa\") " pod="openstack/ovn-controller-8xxqn" Oct 01 15:18:28 crc kubenswrapper[4869]: I1001 15:18:28.134031 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-ovs\" (UniqueName: \"kubernetes.io/host-path/01ddd573-45b0-4379-8b00-fa92a1da0ec1-etc-ovs\") pod \"ovn-controller-ovs-jhxfd\" (UID: \"01ddd573-45b0-4379-8b00-fa92a1da0ec1\") " pod="openstack/ovn-controller-ovs-jhxfd" Oct 01 15:18:28 crc kubenswrapper[4869]: I1001 15:18:28.134064 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/fce7eb65-0111-43b3-9265-700c584695fa-var-run-ovn\") pod \"ovn-controller-8xxqn\" (UID: \"fce7eb65-0111-43b3-9265-700c584695fa\") " pod="openstack/ovn-controller-8xxqn" Oct 01 15:18:28 crc kubenswrapper[4869]: I1001 15:18:28.134079 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/01ddd573-45b0-4379-8b00-fa92a1da0ec1-scripts\") pod \"ovn-controller-ovs-jhxfd\" (UID: \"01ddd573-45b0-4379-8b00-fa92a1da0ec1\") " pod="openstack/ovn-controller-ovs-jhxfd" Oct 01 15:18:28 crc kubenswrapper[4869]: I1001 15:18:28.134094 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/fce7eb65-0111-43b3-9265-700c584695fa-scripts\") pod \"ovn-controller-8xxqn\" (UID: \"fce7eb65-0111-43b3-9265-700c584695fa\") " pod="openstack/ovn-controller-8xxqn" Oct 01 15:18:28 crc kubenswrapper[4869]: I1001 15:18:28.134114 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fce7eb65-0111-43b3-9265-700c584695fa-combined-ca-bundle\") pod \"ovn-controller-8xxqn\" (UID: \"fce7eb65-0111-43b3-9265-700c584695fa\") " pod="openstack/ovn-controller-8xxqn" Oct 01 15:18:28 crc kubenswrapper[4869]: I1001 15:18:28.134141 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/01ddd573-45b0-4379-8b00-fa92a1da0ec1-var-run\") pod \"ovn-controller-ovs-jhxfd\" (UID: \"01ddd573-45b0-4379-8b00-fa92a1da0ec1\") " pod="openstack/ovn-controller-ovs-jhxfd" Oct 01 15:18:28 crc kubenswrapper[4869]: I1001 15:18:28.134160 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bbjlh\" (UniqueName: \"kubernetes.io/projected/01ddd573-45b0-4379-8b00-fa92a1da0ec1-kube-api-access-bbjlh\") pod \"ovn-controller-ovs-jhxfd\" (UID: \"01ddd573-45b0-4379-8b00-fa92a1da0ec1\") " pod="openstack/ovn-controller-ovs-jhxfd" Oct 01 15:18:28 crc kubenswrapper[4869]: I1001 15:18:28.134186 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/01ddd573-45b0-4379-8b00-fa92a1da0ec1-var-log\") pod \"ovn-controller-ovs-jhxfd\" (UID: \"01ddd573-45b0-4379-8b00-fa92a1da0ec1\") " pod="openstack/ovn-controller-ovs-jhxfd" Oct 01 15:18:28 crc kubenswrapper[4869]: I1001 15:18:28.134211 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib\" (UniqueName: \"kubernetes.io/host-path/01ddd573-45b0-4379-8b00-fa92a1da0ec1-var-lib\") pod \"ovn-controller-ovs-jhxfd\" (UID: \"01ddd573-45b0-4379-8b00-fa92a1da0ec1\") " pod="openstack/ovn-controller-ovs-jhxfd" Oct 01 15:18:28 crc kubenswrapper[4869]: I1001 15:18:28.134728 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib\" (UniqueName: \"kubernetes.io/host-path/01ddd573-45b0-4379-8b00-fa92a1da0ec1-var-lib\") pod \"ovn-controller-ovs-jhxfd\" (UID: \"01ddd573-45b0-4379-8b00-fa92a1da0ec1\") " pod="openstack/ovn-controller-ovs-jhxfd" Oct 01 15:18:28 crc kubenswrapper[4869]: I1001 15:18:28.134857 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/fce7eb65-0111-43b3-9265-700c584695fa-var-log-ovn\") pod \"ovn-controller-8xxqn\" (UID: \"fce7eb65-0111-43b3-9265-700c584695fa\") " pod="openstack/ovn-controller-8xxqn" Oct 01 15:18:28 crc kubenswrapper[4869]: I1001 15:18:28.135231 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/fce7eb65-0111-43b3-9265-700c584695fa-var-run\") pod \"ovn-controller-8xxqn\" (UID: \"fce7eb65-0111-43b3-9265-700c584695fa\") " pod="openstack/ovn-controller-8xxqn" Oct 01 15:18:28 crc kubenswrapper[4869]: I1001 15:18:28.135351 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-ovs\" (UniqueName: \"kubernetes.io/host-path/01ddd573-45b0-4379-8b00-fa92a1da0ec1-etc-ovs\") pod \"ovn-controller-ovs-jhxfd\" (UID: \"01ddd573-45b0-4379-8b00-fa92a1da0ec1\") " pod="openstack/ovn-controller-ovs-jhxfd" Oct 01 15:18:28 crc kubenswrapper[4869]: I1001 15:18:28.135426 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/fce7eb65-0111-43b3-9265-700c584695fa-var-run-ovn\") pod \"ovn-controller-8xxqn\" (UID: \"fce7eb65-0111-43b3-9265-700c584695fa\") " pod="openstack/ovn-controller-8xxqn" Oct 01 15:18:28 crc kubenswrapper[4869]: I1001 15:18:28.135525 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/01ddd573-45b0-4379-8b00-fa92a1da0ec1-var-run\") pod \"ovn-controller-ovs-jhxfd\" (UID: \"01ddd573-45b0-4379-8b00-fa92a1da0ec1\") " pod="openstack/ovn-controller-ovs-jhxfd" Oct 01 15:18:28 crc kubenswrapper[4869]: I1001 15:18:28.137830 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/fce7eb65-0111-43b3-9265-700c584695fa-scripts\") pod \"ovn-controller-8xxqn\" (UID: \"fce7eb65-0111-43b3-9265-700c584695fa\") " pod="openstack/ovn-controller-8xxqn" Oct 01 15:18:28 crc kubenswrapper[4869]: I1001 15:18:28.140275 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/01ddd573-45b0-4379-8b00-fa92a1da0ec1-scripts\") pod \"ovn-controller-ovs-jhxfd\" (UID: \"01ddd573-45b0-4379-8b00-fa92a1da0ec1\") " pod="openstack/ovn-controller-ovs-jhxfd" Oct 01 15:18:28 crc kubenswrapper[4869]: I1001 15:18:28.140360 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-controller-tls-certs\" (UniqueName: \"kubernetes.io/secret/fce7eb65-0111-43b3-9265-700c584695fa-ovn-controller-tls-certs\") pod \"ovn-controller-8xxqn\" (UID: \"fce7eb65-0111-43b3-9265-700c584695fa\") " pod="openstack/ovn-controller-8xxqn" Oct 01 15:18:28 crc kubenswrapper[4869]: I1001 15:18:28.140591 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/01ddd573-45b0-4379-8b00-fa92a1da0ec1-var-log\") pod \"ovn-controller-ovs-jhxfd\" (UID: \"01ddd573-45b0-4379-8b00-fa92a1da0ec1\") " pod="openstack/ovn-controller-ovs-jhxfd" Oct 01 15:18:28 crc kubenswrapper[4869]: I1001 15:18:28.141567 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fce7eb65-0111-43b3-9265-700c584695fa-combined-ca-bundle\") pod \"ovn-controller-8xxqn\" (UID: \"fce7eb65-0111-43b3-9265-700c584695fa\") " pod="openstack/ovn-controller-8xxqn" Oct 01 15:18:28 crc kubenswrapper[4869]: I1001 15:18:28.149937 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xsx7w\" (UniqueName: \"kubernetes.io/projected/fce7eb65-0111-43b3-9265-700c584695fa-kube-api-access-xsx7w\") pod \"ovn-controller-8xxqn\" (UID: \"fce7eb65-0111-43b3-9265-700c584695fa\") " pod="openstack/ovn-controller-8xxqn" Oct 01 15:18:28 crc kubenswrapper[4869]: I1001 15:18:28.168941 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bbjlh\" (UniqueName: \"kubernetes.io/projected/01ddd573-45b0-4379-8b00-fa92a1da0ec1-kube-api-access-bbjlh\") pod \"ovn-controller-ovs-jhxfd\" (UID: \"01ddd573-45b0-4379-8b00-fa92a1da0ec1\") " pod="openstack/ovn-controller-ovs-jhxfd" Oct 01 15:18:28 crc kubenswrapper[4869]: I1001 15:18:28.246569 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-8xxqn" Oct 01 15:18:28 crc kubenswrapper[4869]: I1001 15:18:28.271672 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-ovs-jhxfd" Oct 01 15:18:30 crc kubenswrapper[4869]: I1001 15:18:30.368424 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovsdbserver-nb-0"] Oct 01 15:18:30 crc kubenswrapper[4869]: I1001 15:18:30.369778 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-nb-0" Oct 01 15:18:30 crc kubenswrapper[4869]: I1001 15:18:30.373879 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovndbcluster-nb-ovndbs" Oct 01 15:18:30 crc kubenswrapper[4869]: I1001 15:18:30.373914 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovndbcluster-nb-config" Oct 01 15:18:30 crc kubenswrapper[4869]: I1001 15:18:30.373981 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ovncluster-ovndbcluster-nb-dockercfg-m678b" Oct 01 15:18:30 crc kubenswrapper[4869]: I1001 15:18:30.374085 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovn-metrics" Oct 01 15:18:30 crc kubenswrapper[4869]: I1001 15:18:30.374196 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovndbcluster-nb-scripts" Oct 01 15:18:30 crc kubenswrapper[4869]: I1001 15:18:30.385514 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-nb-0"] Oct 01 15:18:30 crc kubenswrapper[4869]: I1001 15:18:30.565969 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5733eb7d-fef5-44fd-8d97-68660485d909-config\") pod \"ovsdbserver-nb-0\" (UID: \"5733eb7d-fef5-44fd-8d97-68660485d909\") " pod="openstack/ovsdbserver-nb-0" Oct 01 15:18:30 crc kubenswrapper[4869]: I1001 15:18:30.566015 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/5733eb7d-fef5-44fd-8d97-68660485d909-metrics-certs-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"5733eb7d-fef5-44fd-8d97-68660485d909\") " pod="openstack/ovsdbserver-nb-0" Oct 01 15:18:30 crc kubenswrapper[4869]: I1001 15:18:30.566047 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/5733eb7d-fef5-44fd-8d97-68660485d909-ovsdb-rundir\") pod \"ovsdbserver-nb-0\" (UID: \"5733eb7d-fef5-44fd-8d97-68660485d909\") " pod="openstack/ovsdbserver-nb-0" Oct 01 15:18:30 crc kubenswrapper[4869]: I1001 15:18:30.566116 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb-tls-certs\" (UniqueName: \"kubernetes.io/secret/5733eb7d-fef5-44fd-8d97-68660485d909-ovsdbserver-nb-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"5733eb7d-fef5-44fd-8d97-68660485d909\") " pod="openstack/ovsdbserver-nb-0" Oct 01 15:18:30 crc kubenswrapper[4869]: I1001 15:18:30.566136 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5733eb7d-fef5-44fd-8d97-68660485d909-combined-ca-bundle\") pod \"ovsdbserver-nb-0\" (UID: \"5733eb7d-fef5-44fd-8d97-68660485d909\") " pod="openstack/ovsdbserver-nb-0" Oct 01 15:18:30 crc kubenswrapper[4869]: I1001 15:18:30.566152 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/5733eb7d-fef5-44fd-8d97-68660485d909-scripts\") pod \"ovsdbserver-nb-0\" (UID: \"5733eb7d-fef5-44fd-8d97-68660485d909\") " pod="openstack/ovsdbserver-nb-0" Oct 01 15:18:30 crc kubenswrapper[4869]: I1001 15:18:30.566171 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lv89n\" (UniqueName: \"kubernetes.io/projected/5733eb7d-fef5-44fd-8d97-68660485d909-kube-api-access-lv89n\") pod \"ovsdbserver-nb-0\" (UID: \"5733eb7d-fef5-44fd-8d97-68660485d909\") " pod="openstack/ovsdbserver-nb-0" Oct 01 15:18:30 crc kubenswrapper[4869]: I1001 15:18:30.566219 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"ovsdbserver-nb-0\" (UID: \"5733eb7d-fef5-44fd-8d97-68660485d909\") " pod="openstack/ovsdbserver-nb-0" Oct 01 15:18:30 crc kubenswrapper[4869]: I1001 15:18:30.667703 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/5733eb7d-fef5-44fd-8d97-68660485d909-ovsdb-rundir\") pod \"ovsdbserver-nb-0\" (UID: \"5733eb7d-fef5-44fd-8d97-68660485d909\") " pod="openstack/ovsdbserver-nb-0" Oct 01 15:18:30 crc kubenswrapper[4869]: I1001 15:18:30.667805 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb-tls-certs\" (UniqueName: \"kubernetes.io/secret/5733eb7d-fef5-44fd-8d97-68660485d909-ovsdbserver-nb-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"5733eb7d-fef5-44fd-8d97-68660485d909\") " pod="openstack/ovsdbserver-nb-0" Oct 01 15:18:30 crc kubenswrapper[4869]: I1001 15:18:30.667836 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5733eb7d-fef5-44fd-8d97-68660485d909-combined-ca-bundle\") pod \"ovsdbserver-nb-0\" (UID: \"5733eb7d-fef5-44fd-8d97-68660485d909\") " pod="openstack/ovsdbserver-nb-0" Oct 01 15:18:30 crc kubenswrapper[4869]: I1001 15:18:30.667862 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/5733eb7d-fef5-44fd-8d97-68660485d909-scripts\") pod \"ovsdbserver-nb-0\" (UID: \"5733eb7d-fef5-44fd-8d97-68660485d909\") " pod="openstack/ovsdbserver-nb-0" Oct 01 15:18:30 crc kubenswrapper[4869]: I1001 15:18:30.667888 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lv89n\" (UniqueName: \"kubernetes.io/projected/5733eb7d-fef5-44fd-8d97-68660485d909-kube-api-access-lv89n\") pod \"ovsdbserver-nb-0\" (UID: \"5733eb7d-fef5-44fd-8d97-68660485d909\") " pod="openstack/ovsdbserver-nb-0" Oct 01 15:18:30 crc kubenswrapper[4869]: I1001 15:18:30.667954 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"ovsdbserver-nb-0\" (UID: \"5733eb7d-fef5-44fd-8d97-68660485d909\") " pod="openstack/ovsdbserver-nb-0" Oct 01 15:18:30 crc kubenswrapper[4869]: I1001 15:18:30.667998 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5733eb7d-fef5-44fd-8d97-68660485d909-config\") pod \"ovsdbserver-nb-0\" (UID: \"5733eb7d-fef5-44fd-8d97-68660485d909\") " pod="openstack/ovsdbserver-nb-0" Oct 01 15:18:30 crc kubenswrapper[4869]: I1001 15:18:30.668027 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/5733eb7d-fef5-44fd-8d97-68660485d909-metrics-certs-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"5733eb7d-fef5-44fd-8d97-68660485d909\") " pod="openstack/ovsdbserver-nb-0" Oct 01 15:18:30 crc kubenswrapper[4869]: I1001 15:18:30.669077 4869 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"ovsdbserver-nb-0\" (UID: \"5733eb7d-fef5-44fd-8d97-68660485d909\") device mount path \"/mnt/openstack/pv03\"" pod="openstack/ovsdbserver-nb-0" Oct 01 15:18:30 crc kubenswrapper[4869]: I1001 15:18:30.669821 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/5733eb7d-fef5-44fd-8d97-68660485d909-scripts\") pod \"ovsdbserver-nb-0\" (UID: \"5733eb7d-fef5-44fd-8d97-68660485d909\") " pod="openstack/ovsdbserver-nb-0" Oct 01 15:18:30 crc kubenswrapper[4869]: I1001 15:18:30.670177 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/5733eb7d-fef5-44fd-8d97-68660485d909-ovsdb-rundir\") pod \"ovsdbserver-nb-0\" (UID: \"5733eb7d-fef5-44fd-8d97-68660485d909\") " pod="openstack/ovsdbserver-nb-0" Oct 01 15:18:30 crc kubenswrapper[4869]: I1001 15:18:30.671709 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5733eb7d-fef5-44fd-8d97-68660485d909-config\") pod \"ovsdbserver-nb-0\" (UID: \"5733eb7d-fef5-44fd-8d97-68660485d909\") " pod="openstack/ovsdbserver-nb-0" Oct 01 15:18:30 crc kubenswrapper[4869]: I1001 15:18:30.675403 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5733eb7d-fef5-44fd-8d97-68660485d909-combined-ca-bundle\") pod \"ovsdbserver-nb-0\" (UID: \"5733eb7d-fef5-44fd-8d97-68660485d909\") " pod="openstack/ovsdbserver-nb-0" Oct 01 15:18:30 crc kubenswrapper[4869]: I1001 15:18:30.689967 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/5733eb7d-fef5-44fd-8d97-68660485d909-metrics-certs-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"5733eb7d-fef5-44fd-8d97-68660485d909\") " pod="openstack/ovsdbserver-nb-0" Oct 01 15:18:30 crc kubenswrapper[4869]: I1001 15:18:30.694868 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb-tls-certs\" (UniqueName: \"kubernetes.io/secret/5733eb7d-fef5-44fd-8d97-68660485d909-ovsdbserver-nb-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"5733eb7d-fef5-44fd-8d97-68660485d909\") " pod="openstack/ovsdbserver-nb-0" Oct 01 15:18:30 crc kubenswrapper[4869]: I1001 15:18:30.695608 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lv89n\" (UniqueName: \"kubernetes.io/projected/5733eb7d-fef5-44fd-8d97-68660485d909-kube-api-access-lv89n\") pod \"ovsdbserver-nb-0\" (UID: \"5733eb7d-fef5-44fd-8d97-68660485d909\") " pod="openstack/ovsdbserver-nb-0" Oct 01 15:18:30 crc kubenswrapper[4869]: I1001 15:18:30.715733 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"ovsdbserver-nb-0\" (UID: \"5733eb7d-fef5-44fd-8d97-68660485d909\") " pod="openstack/ovsdbserver-nb-0" Oct 01 15:18:31 crc kubenswrapper[4869]: I1001 15:18:31.015688 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-nb-0" Oct 01 15:18:31 crc kubenswrapper[4869]: I1001 15:18:31.463719 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovsdbserver-sb-0"] Oct 01 15:18:31 crc kubenswrapper[4869]: I1001 15:18:31.465078 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-sb-0" Oct 01 15:18:31 crc kubenswrapper[4869]: I1001 15:18:31.467987 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovndbcluster-sb-ovndbs" Oct 01 15:18:31 crc kubenswrapper[4869]: I1001 15:18:31.468129 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ovncluster-ovndbcluster-sb-dockercfg-d6vnb" Oct 01 15:18:31 crc kubenswrapper[4869]: I1001 15:18:31.470465 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovndbcluster-sb-config" Oct 01 15:18:31 crc kubenswrapper[4869]: I1001 15:18:31.471515 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovndbcluster-sb-scripts" Oct 01 15:18:31 crc kubenswrapper[4869]: I1001 15:18:31.479527 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-sb-0"] Oct 01 15:18:31 crc kubenswrapper[4869]: I1001 15:18:31.599481 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb-tls-certs\" (UniqueName: \"kubernetes.io/secret/b4bcb7f8-7b79-424f-9b67-e341b25a5ac1-ovsdbserver-sb-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"b4bcb7f8-7b79-424f-9b67-e341b25a5ac1\") " pod="openstack/ovsdbserver-sb-0" Oct 01 15:18:31 crc kubenswrapper[4869]: I1001 15:18:31.599620 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b4bcb7f8-7b79-424f-9b67-e341b25a5ac1-config\") pod \"ovsdbserver-sb-0\" (UID: \"b4bcb7f8-7b79-424f-9b67-e341b25a5ac1\") " pod="openstack/ovsdbserver-sb-0" Oct 01 15:18:31 crc kubenswrapper[4869]: I1001 15:18:31.599647 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/b4bcb7f8-7b79-424f-9b67-e341b25a5ac1-metrics-certs-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"b4bcb7f8-7b79-424f-9b67-e341b25a5ac1\") " pod="openstack/ovsdbserver-sb-0" Oct 01 15:18:31 crc kubenswrapper[4869]: I1001 15:18:31.599665 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/b4bcb7f8-7b79-424f-9b67-e341b25a5ac1-scripts\") pod \"ovsdbserver-sb-0\" (UID: \"b4bcb7f8-7b79-424f-9b67-e341b25a5ac1\") " pod="openstack/ovsdbserver-sb-0" Oct 01 15:18:31 crc kubenswrapper[4869]: I1001 15:18:31.599885 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fbdkb\" (UniqueName: \"kubernetes.io/projected/b4bcb7f8-7b79-424f-9b67-e341b25a5ac1-kube-api-access-fbdkb\") pod \"ovsdbserver-sb-0\" (UID: \"b4bcb7f8-7b79-424f-9b67-e341b25a5ac1\") " pod="openstack/ovsdbserver-sb-0" Oct 01 15:18:31 crc kubenswrapper[4869]: I1001 15:18:31.600006 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b4bcb7f8-7b79-424f-9b67-e341b25a5ac1-combined-ca-bundle\") pod \"ovsdbserver-sb-0\" (UID: \"b4bcb7f8-7b79-424f-9b67-e341b25a5ac1\") " pod="openstack/ovsdbserver-sb-0" Oct 01 15:18:31 crc kubenswrapper[4869]: I1001 15:18:31.600075 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/b4bcb7f8-7b79-424f-9b67-e341b25a5ac1-ovsdb-rundir\") pod \"ovsdbserver-sb-0\" (UID: \"b4bcb7f8-7b79-424f-9b67-e341b25a5ac1\") " pod="openstack/ovsdbserver-sb-0" Oct 01 15:18:31 crc kubenswrapper[4869]: I1001 15:18:31.600232 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"ovsdbserver-sb-0\" (UID: \"b4bcb7f8-7b79-424f-9b67-e341b25a5ac1\") " pod="openstack/ovsdbserver-sb-0" Oct 01 15:18:31 crc kubenswrapper[4869]: I1001 15:18:31.702312 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb-tls-certs\" (UniqueName: \"kubernetes.io/secret/b4bcb7f8-7b79-424f-9b67-e341b25a5ac1-ovsdbserver-sb-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"b4bcb7f8-7b79-424f-9b67-e341b25a5ac1\") " pod="openstack/ovsdbserver-sb-0" Oct 01 15:18:31 crc kubenswrapper[4869]: I1001 15:18:31.702484 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b4bcb7f8-7b79-424f-9b67-e341b25a5ac1-config\") pod \"ovsdbserver-sb-0\" (UID: \"b4bcb7f8-7b79-424f-9b67-e341b25a5ac1\") " pod="openstack/ovsdbserver-sb-0" Oct 01 15:18:31 crc kubenswrapper[4869]: I1001 15:18:31.702548 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/b4bcb7f8-7b79-424f-9b67-e341b25a5ac1-metrics-certs-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"b4bcb7f8-7b79-424f-9b67-e341b25a5ac1\") " pod="openstack/ovsdbserver-sb-0" Oct 01 15:18:31 crc kubenswrapper[4869]: I1001 15:18:31.702578 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/b4bcb7f8-7b79-424f-9b67-e341b25a5ac1-scripts\") pod \"ovsdbserver-sb-0\" (UID: \"b4bcb7f8-7b79-424f-9b67-e341b25a5ac1\") " pod="openstack/ovsdbserver-sb-0" Oct 01 15:18:31 crc kubenswrapper[4869]: I1001 15:18:31.702753 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fbdkb\" (UniqueName: \"kubernetes.io/projected/b4bcb7f8-7b79-424f-9b67-e341b25a5ac1-kube-api-access-fbdkb\") pod \"ovsdbserver-sb-0\" (UID: \"b4bcb7f8-7b79-424f-9b67-e341b25a5ac1\") " pod="openstack/ovsdbserver-sb-0" Oct 01 15:18:31 crc kubenswrapper[4869]: I1001 15:18:31.702799 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b4bcb7f8-7b79-424f-9b67-e341b25a5ac1-combined-ca-bundle\") pod \"ovsdbserver-sb-0\" (UID: \"b4bcb7f8-7b79-424f-9b67-e341b25a5ac1\") " pod="openstack/ovsdbserver-sb-0" Oct 01 15:18:31 crc kubenswrapper[4869]: I1001 15:18:31.702851 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/b4bcb7f8-7b79-424f-9b67-e341b25a5ac1-ovsdb-rundir\") pod \"ovsdbserver-sb-0\" (UID: \"b4bcb7f8-7b79-424f-9b67-e341b25a5ac1\") " pod="openstack/ovsdbserver-sb-0" Oct 01 15:18:31 crc kubenswrapper[4869]: I1001 15:18:31.702980 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"ovsdbserver-sb-0\" (UID: \"b4bcb7f8-7b79-424f-9b67-e341b25a5ac1\") " pod="openstack/ovsdbserver-sb-0" Oct 01 15:18:31 crc kubenswrapper[4869]: I1001 15:18:31.703206 4869 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"ovsdbserver-sb-0\" (UID: \"b4bcb7f8-7b79-424f-9b67-e341b25a5ac1\") device mount path \"/mnt/openstack/pv09\"" pod="openstack/ovsdbserver-sb-0" Oct 01 15:18:31 crc kubenswrapper[4869]: I1001 15:18:31.704022 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/b4bcb7f8-7b79-424f-9b67-e341b25a5ac1-scripts\") pod \"ovsdbserver-sb-0\" (UID: \"b4bcb7f8-7b79-424f-9b67-e341b25a5ac1\") " pod="openstack/ovsdbserver-sb-0" Oct 01 15:18:31 crc kubenswrapper[4869]: I1001 15:18:31.704345 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b4bcb7f8-7b79-424f-9b67-e341b25a5ac1-config\") pod \"ovsdbserver-sb-0\" (UID: \"b4bcb7f8-7b79-424f-9b67-e341b25a5ac1\") " pod="openstack/ovsdbserver-sb-0" Oct 01 15:18:31 crc kubenswrapper[4869]: I1001 15:18:31.704693 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/b4bcb7f8-7b79-424f-9b67-e341b25a5ac1-ovsdb-rundir\") pod \"ovsdbserver-sb-0\" (UID: \"b4bcb7f8-7b79-424f-9b67-e341b25a5ac1\") " pod="openstack/ovsdbserver-sb-0" Oct 01 15:18:31 crc kubenswrapper[4869]: I1001 15:18:31.708274 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb-tls-certs\" (UniqueName: \"kubernetes.io/secret/b4bcb7f8-7b79-424f-9b67-e341b25a5ac1-ovsdbserver-sb-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"b4bcb7f8-7b79-424f-9b67-e341b25a5ac1\") " pod="openstack/ovsdbserver-sb-0" Oct 01 15:18:31 crc kubenswrapper[4869]: I1001 15:18:31.708462 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b4bcb7f8-7b79-424f-9b67-e341b25a5ac1-combined-ca-bundle\") pod \"ovsdbserver-sb-0\" (UID: \"b4bcb7f8-7b79-424f-9b67-e341b25a5ac1\") " pod="openstack/ovsdbserver-sb-0" Oct 01 15:18:31 crc kubenswrapper[4869]: I1001 15:18:31.709056 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/b4bcb7f8-7b79-424f-9b67-e341b25a5ac1-metrics-certs-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"b4bcb7f8-7b79-424f-9b67-e341b25a5ac1\") " pod="openstack/ovsdbserver-sb-0" Oct 01 15:18:31 crc kubenswrapper[4869]: I1001 15:18:31.717476 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fbdkb\" (UniqueName: \"kubernetes.io/projected/b4bcb7f8-7b79-424f-9b67-e341b25a5ac1-kube-api-access-fbdkb\") pod \"ovsdbserver-sb-0\" (UID: \"b4bcb7f8-7b79-424f-9b67-e341b25a5ac1\") " pod="openstack/ovsdbserver-sb-0" Oct 01 15:18:31 crc kubenswrapper[4869]: I1001 15:18:31.736510 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"ovsdbserver-sb-0\" (UID: \"b4bcb7f8-7b79-424f-9b67-e341b25a5ac1\") " pod="openstack/ovsdbserver-sb-0" Oct 01 15:18:31 crc kubenswrapper[4869]: I1001 15:18:31.796157 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-sb-0" Oct 01 15:18:33 crc kubenswrapper[4869]: I1001 15:18:33.561121 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"589bdd72-e961-4bbd-bb04-bcff96363cab","Type":"ContainerStarted","Data":"e8b64caed0813fb5ed0821adfcb99cde3a792c21c19b3342252fc0afc3d446fe"} Oct 01 15:18:33 crc kubenswrapper[4869]: E1001 15:18:33.741399 4869 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-neutron-server@sha256:6276771339c90f342673dcaf7faa8c46e2c0ece62ed5efc4b7d65a095dabe07b" Oct 01 15:18:33 crc kubenswrapper[4869]: E1001 15:18:33.741577 4869 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:init,Image:quay.io/podified-antelope-centos9/openstack-neutron-server@sha256:6276771339c90f342673dcaf7faa8c46e2c0ece62ed5efc4b7d65a095dabe07b,Command:[/bin/bash],Args:[-c dnsmasq --interface=* --conf-dir=/etc/dnsmasq.d --hostsdir=/etc/dnsmasq.d/hosts --keep-in-foreground --log-debug --bind-interfaces --listen-address=$(POD_IP) --port 5353 --log-facility=- --no-hosts --domain-needed --no-resolv --bogus-priv --log-queries --test],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:nffh5bdhf4h5f8h79h55h77h58fh56dh7bh6fh578hbch55dh68h56bhd9h65dh57ch658hc9h566h666h688h58h65dh684h5d7h6ch575h5d6h88q,ValueFrom:nil,},EnvVar{Name:POD_IP,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:status.podIP,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:config,ReadOnly:true,MountPath:/etc/dnsmasq.d/config.cfg,SubPath:dns,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-tkzln,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000650000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:&SeccompProfile{Type:RuntimeDefault,LocalhostProfile:nil,},AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod dnsmasq-dns-b8b69cf79-z8c6l_openstack(e90c4d77-8795-43ca-8c46-a3d86440cde6): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Oct 01 15:18:33 crc kubenswrapper[4869]: E1001 15:18:33.743679 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"init\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/dnsmasq-dns-b8b69cf79-z8c6l" podUID="e90c4d77-8795-43ca-8c46-a3d86440cde6" Oct 01 15:18:33 crc kubenswrapper[4869]: E1001 15:18:33.897155 4869 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-neutron-server@sha256:6276771339c90f342673dcaf7faa8c46e2c0ece62ed5efc4b7d65a095dabe07b" Oct 01 15:18:33 crc kubenswrapper[4869]: E1001 15:18:33.897740 4869 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:init,Image:quay.io/podified-antelope-centos9/openstack-neutron-server@sha256:6276771339c90f342673dcaf7faa8c46e2c0ece62ed5efc4b7d65a095dabe07b,Command:[/bin/bash],Args:[-c dnsmasq --interface=* --conf-dir=/etc/dnsmasq.d --hostsdir=/etc/dnsmasq.d/hosts --keep-in-foreground --log-debug --bind-interfaces --listen-address=$(POD_IP) --port 5353 --log-facility=- --no-hosts --domain-needed --no-resolv --bogus-priv --log-queries --test],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:ndfhb5h667h568h584h5f9h58dh565h664h587h597h577h64bh5c4h66fh647hbdh68ch5c5h68dh686h5f7h64hd7hc6h55fh57bh98h57fh87h5fh57fq,ValueFrom:nil,},EnvVar{Name:POD_IP,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:status.podIP,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:config,ReadOnly:true,MountPath:/etc/dnsmasq.d/config.cfg,SubPath:dns,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:dns-svc,ReadOnly:true,MountPath:/etc/dnsmasq.d/hosts/dns-svc,SubPath:dns-svc,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-qz84c,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000650000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:&SeccompProfile{Type:RuntimeDefault,LocalhostProfile:nil,},AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod dnsmasq-dns-d5f6f49c7-dqwqq_openstack(0cdcbca4-2cf3-4cb7-a859-82c4e39d2f7d): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Oct 01 15:18:33 crc kubenswrapper[4869]: E1001 15:18:33.905881 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"init\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/dnsmasq-dns-d5f6f49c7-dqwqq" podUID="0cdcbca4-2cf3-4cb7-a859-82c4e39d2f7d" Oct 01 15:18:34 crc kubenswrapper[4869]: I1001 15:18:34.253784 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/memcached-0"] Oct 01 15:18:34 crc kubenswrapper[4869]: I1001 15:18:34.260935 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstack-cell1-galera-0"] Oct 01 15:18:34 crc kubenswrapper[4869]: I1001 15:18:34.475611 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstack-galera-0"] Oct 01 15:18:34 crc kubenswrapper[4869]: I1001 15:18:34.556428 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-ovs-jhxfd"] Oct 01 15:18:34 crc kubenswrapper[4869]: I1001 15:18:34.579430 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/memcached-0" event={"ID":"05395c1c-e984-4b4c-abb8-6309d6a961da","Type":"ContainerStarted","Data":"6554f2ac1985b9f47b18dbb8bfff8aa8cf05025ea230d846922135d08ffc98f3"} Oct 01 15:18:34 crc kubenswrapper[4869]: I1001 15:18:34.590641 4869 generic.go:334] "Generic (PLEG): container finished" podID="13f951e9-b94f-41aa-9547-59e5a0eff174" containerID="d21f5097758972bcc4d3cd7d2c517908e5b9fccfe8a3fd2e5fa7f5b83820dd32" exitCode=0 Oct 01 15:18:34 crc kubenswrapper[4869]: I1001 15:18:34.590747 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-77795d58f5-px8hm" event={"ID":"13f951e9-b94f-41aa-9547-59e5a0eff174","Type":"ContainerDied","Data":"d21f5097758972bcc4d3cd7d2c517908e5b9fccfe8a3fd2e5fa7f5b83820dd32"} Oct 01 15:18:34 crc kubenswrapper[4869]: I1001 15:18:34.595425 4869 generic.go:334] "Generic (PLEG): container finished" podID="99606142-049c-4f0a-813f-b7274041ec9e" containerID="b1031aa26ddab06e7931b5cd99c357fb7b68cb5730dfcbf047153cf7ef708d62" exitCode=0 Oct 01 15:18:34 crc kubenswrapper[4869]: I1001 15:18:34.595467 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-b6f94bdfc-k4rnx" event={"ID":"99606142-049c-4f0a-813f-b7274041ec9e","Type":"ContainerDied","Data":"b1031aa26ddab06e7931b5cd99c357fb7b68cb5730dfcbf047153cf7ef708d62"} Oct 01 15:18:34 crc kubenswrapper[4869]: I1001 15:18:34.602721 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-8xxqn"] Oct 01 15:18:34 crc kubenswrapper[4869]: I1001 15:18:34.715797 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-sb-0"] Oct 01 15:18:34 crc kubenswrapper[4869]: W1001 15:18:34.963737 4869 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod7980b3ae_7c3f_42ad_a9e4_9bfb05cb934a.slice/crio-0b59d36fc594a6a799aada538f166565554633dae0f4812ba1b362b4d8dac310 WatchSource:0}: Error finding container 0b59d36fc594a6a799aada538f166565554633dae0f4812ba1b362b4d8dac310: Status 404 returned error can't find the container with id 0b59d36fc594a6a799aada538f166565554633dae0f4812ba1b362b4d8dac310 Oct 01 15:18:34 crc kubenswrapper[4869]: W1001 15:18:34.976699 4869 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podb4bcb7f8_7b79_424f_9b67_e341b25a5ac1.slice/crio-478e055dc909934d2c16d3f8428a6f20c443fd53f5ffba130fc600da04553e51 WatchSource:0}: Error finding container 478e055dc909934d2c16d3f8428a6f20c443fd53f5ffba130fc600da04553e51: Status 404 returned error can't find the container with id 478e055dc909934d2c16d3f8428a6f20c443fd53f5ffba130fc600da04553e51 Oct 01 15:18:35 crc kubenswrapper[4869]: I1001 15:18:35.080449 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-d5f6f49c7-dqwqq" Oct 01 15:18:35 crc kubenswrapper[4869]: I1001 15:18:35.084727 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-b8b69cf79-z8c6l" Oct 01 15:18:35 crc kubenswrapper[4869]: I1001 15:18:35.251384 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-nb-0"] Oct 01 15:18:35 crc kubenswrapper[4869]: W1001 15:18:35.271799 4869 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod5733eb7d_fef5_44fd_8d97_68660485d909.slice/crio-e253e67a08757ac88fc9f4f9cb9cabc56f26fda997f6f2b73f7386e91c35be6d WatchSource:0}: Error finding container e253e67a08757ac88fc9f4f9cb9cabc56f26fda997f6f2b73f7386e91c35be6d: Status 404 returned error can't find the container with id e253e67a08757ac88fc9f4f9cb9cabc56f26fda997f6f2b73f7386e91c35be6d Oct 01 15:18:35 crc kubenswrapper[4869]: I1001 15:18:35.275235 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e90c4d77-8795-43ca-8c46-a3d86440cde6-config\") pod \"e90c4d77-8795-43ca-8c46-a3d86440cde6\" (UID: \"e90c4d77-8795-43ca-8c46-a3d86440cde6\") " Oct 01 15:18:35 crc kubenswrapper[4869]: I1001 15:18:35.275307 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qz84c\" (UniqueName: \"kubernetes.io/projected/0cdcbca4-2cf3-4cb7-a859-82c4e39d2f7d-kube-api-access-qz84c\") pod \"0cdcbca4-2cf3-4cb7-a859-82c4e39d2f7d\" (UID: \"0cdcbca4-2cf3-4cb7-a859-82c4e39d2f7d\") " Oct 01 15:18:35 crc kubenswrapper[4869]: I1001 15:18:35.275362 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/0cdcbca4-2cf3-4cb7-a859-82c4e39d2f7d-dns-svc\") pod \"0cdcbca4-2cf3-4cb7-a859-82c4e39d2f7d\" (UID: \"0cdcbca4-2cf3-4cb7-a859-82c4e39d2f7d\") " Oct 01 15:18:35 crc kubenswrapper[4869]: I1001 15:18:35.275431 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tkzln\" (UniqueName: \"kubernetes.io/projected/e90c4d77-8795-43ca-8c46-a3d86440cde6-kube-api-access-tkzln\") pod \"e90c4d77-8795-43ca-8c46-a3d86440cde6\" (UID: \"e90c4d77-8795-43ca-8c46-a3d86440cde6\") " Oct 01 15:18:35 crc kubenswrapper[4869]: I1001 15:18:35.275523 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0cdcbca4-2cf3-4cb7-a859-82c4e39d2f7d-config\") pod \"0cdcbca4-2cf3-4cb7-a859-82c4e39d2f7d\" (UID: \"0cdcbca4-2cf3-4cb7-a859-82c4e39d2f7d\") " Oct 01 15:18:35 crc kubenswrapper[4869]: I1001 15:18:35.275582 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e90c4d77-8795-43ca-8c46-a3d86440cde6-config" (OuterVolumeSpecName: "config") pod "e90c4d77-8795-43ca-8c46-a3d86440cde6" (UID: "e90c4d77-8795-43ca-8c46-a3d86440cde6"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 15:18:35 crc kubenswrapper[4869]: I1001 15:18:35.275893 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0cdcbca4-2cf3-4cb7-a859-82c4e39d2f7d-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "0cdcbca4-2cf3-4cb7-a859-82c4e39d2f7d" (UID: "0cdcbca4-2cf3-4cb7-a859-82c4e39d2f7d"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 15:18:35 crc kubenswrapper[4869]: I1001 15:18:35.275911 4869 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e90c4d77-8795-43ca-8c46-a3d86440cde6-config\") on node \"crc\" DevicePath \"\"" Oct 01 15:18:35 crc kubenswrapper[4869]: I1001 15:18:35.275959 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0cdcbca4-2cf3-4cb7-a859-82c4e39d2f7d-config" (OuterVolumeSpecName: "config") pod "0cdcbca4-2cf3-4cb7-a859-82c4e39d2f7d" (UID: "0cdcbca4-2cf3-4cb7-a859-82c4e39d2f7d"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 15:18:35 crc kubenswrapper[4869]: I1001 15:18:35.280094 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e90c4d77-8795-43ca-8c46-a3d86440cde6-kube-api-access-tkzln" (OuterVolumeSpecName: "kube-api-access-tkzln") pod "e90c4d77-8795-43ca-8c46-a3d86440cde6" (UID: "e90c4d77-8795-43ca-8c46-a3d86440cde6"). InnerVolumeSpecName "kube-api-access-tkzln". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 15:18:35 crc kubenswrapper[4869]: I1001 15:18:35.280688 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0cdcbca4-2cf3-4cb7-a859-82c4e39d2f7d-kube-api-access-qz84c" (OuterVolumeSpecName: "kube-api-access-qz84c") pod "0cdcbca4-2cf3-4cb7-a859-82c4e39d2f7d" (UID: "0cdcbca4-2cf3-4cb7-a859-82c4e39d2f7d"). InnerVolumeSpecName "kube-api-access-qz84c". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 15:18:35 crc kubenswrapper[4869]: I1001 15:18:35.377464 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tkzln\" (UniqueName: \"kubernetes.io/projected/e90c4d77-8795-43ca-8c46-a3d86440cde6-kube-api-access-tkzln\") on node \"crc\" DevicePath \"\"" Oct 01 15:18:35 crc kubenswrapper[4869]: I1001 15:18:35.377484 4869 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0cdcbca4-2cf3-4cb7-a859-82c4e39d2f7d-config\") on node \"crc\" DevicePath \"\"" Oct 01 15:18:35 crc kubenswrapper[4869]: I1001 15:18:35.377494 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qz84c\" (UniqueName: \"kubernetes.io/projected/0cdcbca4-2cf3-4cb7-a859-82c4e39d2f7d-kube-api-access-qz84c\") on node \"crc\" DevicePath \"\"" Oct 01 15:18:35 crc kubenswrapper[4869]: I1001 15:18:35.377504 4869 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/0cdcbca4-2cf3-4cb7-a859-82c4e39d2f7d-dns-svc\") on node \"crc\" DevicePath \"\"" Oct 01 15:18:35 crc kubenswrapper[4869]: I1001 15:18:35.603011 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-b8b69cf79-z8c6l" event={"ID":"e90c4d77-8795-43ca-8c46-a3d86440cde6","Type":"ContainerDied","Data":"45a3f2e2797c54276b2b267a2aaa4240f927b367c067090a33adab2a277413b8"} Oct 01 15:18:35 crc kubenswrapper[4869]: I1001 15:18:35.603126 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-b8b69cf79-z8c6l" Oct 01 15:18:35 crc kubenswrapper[4869]: I1001 15:18:35.609775 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-d5f6f49c7-dqwqq" Oct 01 15:18:35 crc kubenswrapper[4869]: I1001 15:18:35.609776 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-d5f6f49c7-dqwqq" event={"ID":"0cdcbca4-2cf3-4cb7-a859-82c4e39d2f7d","Type":"ContainerDied","Data":"18d82e31aa98d64c33da199a271fdc69c25257e8b4263d748102f87269e913a4"} Oct 01 15:18:35 crc kubenswrapper[4869]: I1001 15:18:35.610971 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-0" event={"ID":"b4bcb7f8-7b79-424f-9b67-e341b25a5ac1","Type":"ContainerStarted","Data":"478e055dc909934d2c16d3f8428a6f20c443fd53f5ffba130fc600da04553e51"} Oct 01 15:18:35 crc kubenswrapper[4869]: I1001 15:18:35.612834 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-jhxfd" event={"ID":"01ddd573-45b0-4379-8b00-fa92a1da0ec1","Type":"ContainerStarted","Data":"95b0357421448b8b5e7b7417509cdf0c3eb85684e5496226d1f1dbf8bd46af54"} Oct 01 15:18:35 crc kubenswrapper[4869]: I1001 15:18:35.613679 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-8xxqn" event={"ID":"fce7eb65-0111-43b3-9265-700c584695fa","Type":"ContainerStarted","Data":"42057e6d16a3480c6680ff9f82e800838003204a88d3a73caebed2b6fc52c4e9"} Oct 01 15:18:35 crc kubenswrapper[4869]: I1001 15:18:35.614341 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"6c2b4697-41ce-422e-8602-a1c0190745df","Type":"ContainerStarted","Data":"8313af19543f938b7288d76dbe8ea7279832d109426082663462a7e872778d83"} Oct 01 15:18:35 crc kubenswrapper[4869]: I1001 15:18:35.618582 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-0" event={"ID":"5733eb7d-fef5-44fd-8d97-68660485d909","Type":"ContainerStarted","Data":"e253e67a08757ac88fc9f4f9cb9cabc56f26fda997f6f2b73f7386e91c35be6d"} Oct 01 15:18:35 crc kubenswrapper[4869]: I1001 15:18:35.620860 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"7980b3ae-7c3f-42ad-a9e4-9bfb05cb934a","Type":"ContainerStarted","Data":"0b59d36fc594a6a799aada538f166565554633dae0f4812ba1b362b4d8dac310"} Oct 01 15:18:35 crc kubenswrapper[4869]: I1001 15:18:35.622831 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"004ab312-4718-4cf2-80df-5a2b1eccc301","Type":"ContainerStarted","Data":"d20e4a80d05da9996c50b83728ad8a401a62da2d044eb3737994a92551baa863"} Oct 01 15:18:35 crc kubenswrapper[4869]: I1001 15:18:35.626037 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"25c1e81e-fa0e-4ec6-b29c-bda2529fde66","Type":"ContainerStarted","Data":"676eea2624cce6e1eb68bf2c8575b8ec869fb8ee8184f7395a0ca6650eae7267"} Oct 01 15:18:35 crc kubenswrapper[4869]: I1001 15:18:35.710293 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-b8b69cf79-z8c6l"] Oct 01 15:18:35 crc kubenswrapper[4869]: I1001 15:18:35.725814 4869 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-b8b69cf79-z8c6l"] Oct 01 15:18:35 crc kubenswrapper[4869]: I1001 15:18:35.735723 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-d5f6f49c7-dqwqq"] Oct 01 15:18:35 crc kubenswrapper[4869]: I1001 15:18:35.739756 4869 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-d5f6f49c7-dqwqq"] Oct 01 15:18:37 crc kubenswrapper[4869]: I1001 15:18:37.594787 4869 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0cdcbca4-2cf3-4cb7-a859-82c4e39d2f7d" path="/var/lib/kubelet/pods/0cdcbca4-2cf3-4cb7-a859-82c4e39d2f7d/volumes" Oct 01 15:18:37 crc kubenswrapper[4869]: I1001 15:18:37.595630 4869 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e90c4d77-8795-43ca-8c46-a3d86440cde6" path="/var/lib/kubelet/pods/e90c4d77-8795-43ca-8c46-a3d86440cde6/volumes" Oct 01 15:18:41 crc kubenswrapper[4869]: I1001 15:18:41.684967 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"589bdd72-e961-4bbd-bb04-bcff96363cab","Type":"ContainerStarted","Data":"796cb2602fbe212a6a0fff1c0b6dfa43ce47db0f54b471aa1a94dfc4b889263d"} Oct 01 15:18:41 crc kubenswrapper[4869]: I1001 15:18:41.686011 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/kube-state-metrics-0" Oct 01 15:18:41 crc kubenswrapper[4869]: I1001 15:18:41.688060 4869 generic.go:334] "Generic (PLEG): container finished" podID="01ddd573-45b0-4379-8b00-fa92a1da0ec1" containerID="1327917cf1afe504812fd22cb470c3afbe719a144a250d63cf1b2879cebb9ff9" exitCode=0 Oct 01 15:18:41 crc kubenswrapper[4869]: I1001 15:18:41.688120 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-jhxfd" event={"ID":"01ddd573-45b0-4379-8b00-fa92a1da0ec1","Type":"ContainerDied","Data":"1327917cf1afe504812fd22cb470c3afbe719a144a250d63cf1b2879cebb9ff9"} Oct 01 15:18:41 crc kubenswrapper[4869]: I1001 15:18:41.691491 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-77795d58f5-px8hm" event={"ID":"13f951e9-b94f-41aa-9547-59e5a0eff174","Type":"ContainerStarted","Data":"7b47e75163355a6bece22860cef0d589bce3ff7b50e7904014e7ddfe621e6a94"} Oct 01 15:18:41 crc kubenswrapper[4869]: I1001 15:18:41.691550 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-77795d58f5-px8hm" Oct 01 15:18:41 crc kubenswrapper[4869]: I1001 15:18:41.694427 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-0" event={"ID":"b4bcb7f8-7b79-424f-9b67-e341b25a5ac1","Type":"ContainerStarted","Data":"bcbce201e2bd342ad64bd86cab1fad6af8523f7eb08ef49a9f8640de2deedd62"} Oct 01 15:18:41 crc kubenswrapper[4869]: I1001 15:18:41.696803 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-b6f94bdfc-k4rnx" event={"ID":"99606142-049c-4f0a-813f-b7274041ec9e","Type":"ContainerStarted","Data":"c4854cdef0a1800b9f1f9dea538cb6c50b87b8428e065b0d2ac80d6b35dcaba0"} Oct 01 15:18:41 crc kubenswrapper[4869]: I1001 15:18:41.696930 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-b6f94bdfc-k4rnx" Oct 01 15:18:41 crc kubenswrapper[4869]: I1001 15:18:41.698754 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/memcached-0" event={"ID":"05395c1c-e984-4b4c-abb8-6309d6a961da","Type":"ContainerStarted","Data":"0e385b9f0be7b5440177c48ffb28c0ca50ebfcf229a11de72a522b35a12783e5"} Oct 01 15:18:41 crc kubenswrapper[4869]: I1001 15:18:41.698820 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/memcached-0" Oct 01 15:18:41 crc kubenswrapper[4869]: I1001 15:18:41.706244 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-8xxqn" event={"ID":"fce7eb65-0111-43b3-9265-700c584695fa","Type":"ContainerStarted","Data":"3bf2d49dd0c1ccbe8c2ebac580a1de0226212fc46b517ef9a538577da1274370"} Oct 01 15:18:41 crc kubenswrapper[4869]: I1001 15:18:41.706985 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovn-controller-8xxqn" Oct 01 15:18:41 crc kubenswrapper[4869]: I1001 15:18:41.709150 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-0" event={"ID":"5733eb7d-fef5-44fd-8d97-68660485d909","Type":"ContainerStarted","Data":"30414e7f56c0fef4de4b74e1b26af94ff4d6cda84ffe3335270139443115b285"} Oct 01 15:18:41 crc kubenswrapper[4869]: I1001 15:18:41.711465 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"6c2b4697-41ce-422e-8602-a1c0190745df","Type":"ContainerStarted","Data":"d44c870df751e711d042efc41fa93260666b944de8bc1691caedf429501bb873"} Oct 01 15:18:41 crc kubenswrapper[4869]: I1001 15:18:41.712979 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"7980b3ae-7c3f-42ad-a9e4-9bfb05cb934a","Type":"ContainerStarted","Data":"5cdc6dfe681c9a60e5d01f168a75b2981cf09023a124a667760188d0ce1d5aa5"} Oct 01 15:18:41 crc kubenswrapper[4869]: I1001 15:18:41.771340 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/memcached-0" podStartSLOduration=14.886917822000001 podStartE2EDuration="20.771311373s" podCreationTimestamp="2025-10-01 15:18:21 +0000 UTC" firstStartedPulling="2025-10-01 15:18:34.488375648 +0000 UTC m=+823.635218764" lastFinishedPulling="2025-10-01 15:18:40.372769189 +0000 UTC m=+829.519612315" observedRunningTime="2025-10-01 15:18:41.764676595 +0000 UTC m=+830.911519721" watchObservedRunningTime="2025-10-01 15:18:41.771311373 +0000 UTC m=+830.918154509" Oct 01 15:18:41 crc kubenswrapper[4869]: I1001 15:18:41.827900 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-b6f94bdfc-k4rnx" podStartSLOduration=9.02934885 podStartE2EDuration="24.827883052s" podCreationTimestamp="2025-10-01 15:18:17 +0000 UTC" firstStartedPulling="2025-10-01 15:18:18.081299976 +0000 UTC m=+807.228143092" lastFinishedPulling="2025-10-01 15:18:33.879834178 +0000 UTC m=+823.026677294" observedRunningTime="2025-10-01 15:18:41.827250826 +0000 UTC m=+830.974093942" watchObservedRunningTime="2025-10-01 15:18:41.827883052 +0000 UTC m=+830.974726158" Oct 01 15:18:41 crc kubenswrapper[4869]: I1001 15:18:41.842695 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/kube-state-metrics-0" podStartSLOduration=13.014643407 podStartE2EDuration="18.842668995s" podCreationTimestamp="2025-10-01 15:18:23 +0000 UTC" firstStartedPulling="2025-10-01 15:18:33.0060906 +0000 UTC m=+822.152933716" lastFinishedPulling="2025-10-01 15:18:38.834116188 +0000 UTC m=+827.980959304" observedRunningTime="2025-10-01 15:18:41.842037479 +0000 UTC m=+830.988880595" watchObservedRunningTime="2025-10-01 15:18:41.842668995 +0000 UTC m=+830.989512141" Oct 01 15:18:41 crc kubenswrapper[4869]: I1001 15:18:41.862139 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-77795d58f5-px8hm" podStartSLOduration=9.165050696 podStartE2EDuration="24.862122126s" podCreationTimestamp="2025-10-01 15:18:17 +0000 UTC" firstStartedPulling="2025-10-01 15:18:18.181122187 +0000 UTC m=+807.327965303" lastFinishedPulling="2025-10-01 15:18:33.878193607 +0000 UTC m=+823.025036733" observedRunningTime="2025-10-01 15:18:41.857492719 +0000 UTC m=+831.004335845" watchObservedRunningTime="2025-10-01 15:18:41.862122126 +0000 UTC m=+831.008965242" Oct 01 15:18:41 crc kubenswrapper[4869]: I1001 15:18:41.897047 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-controller-8xxqn" podStartSLOduration=9.187324809 podStartE2EDuration="14.897026458s" podCreationTimestamp="2025-10-01 15:18:27 +0000 UTC" firstStartedPulling="2025-10-01 15:18:34.974222329 +0000 UTC m=+824.121065485" lastFinishedPulling="2025-10-01 15:18:40.683924018 +0000 UTC m=+829.830767134" observedRunningTime="2025-10-01 15:18:41.894405602 +0000 UTC m=+831.041248718" watchObservedRunningTime="2025-10-01 15:18:41.897026458 +0000 UTC m=+831.043869574" Oct 01 15:18:42 crc kubenswrapper[4869]: I1001 15:18:42.724359 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-jhxfd" event={"ID":"01ddd573-45b0-4379-8b00-fa92a1da0ec1","Type":"ContainerStarted","Data":"d4950838a26ec6c2964bc540460035a421bd654406f9fdef7f2a6ff04b7bef73"} Oct 01 15:18:42 crc kubenswrapper[4869]: I1001 15:18:42.724704 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-jhxfd" event={"ID":"01ddd573-45b0-4379-8b00-fa92a1da0ec1","Type":"ContainerStarted","Data":"9cd6be38b55d56b4b8f8e067d714f51ab28207d225cfc945bcacc9a279f88a6a"} Oct 01 15:18:42 crc kubenswrapper[4869]: I1001 15:18:42.758131 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-controller-ovs-jhxfd" podStartSLOduration=10.100948814 podStartE2EDuration="15.758106566s" podCreationTimestamp="2025-10-01 15:18:27 +0000 UTC" firstStartedPulling="2025-10-01 15:18:34.981016701 +0000 UTC m=+824.127859847" lastFinishedPulling="2025-10-01 15:18:40.638174493 +0000 UTC m=+829.785017599" observedRunningTime="2025-10-01 15:18:42.753563732 +0000 UTC m=+831.900406888" watchObservedRunningTime="2025-10-01 15:18:42.758106566 +0000 UTC m=+831.904949692" Oct 01 15:18:43 crc kubenswrapper[4869]: I1001 15:18:43.272480 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovn-controller-ovs-jhxfd" Oct 01 15:18:43 crc kubenswrapper[4869]: I1001 15:18:43.272528 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovn-controller-ovs-jhxfd" Oct 01 15:18:48 crc kubenswrapper[4869]: I1001 15:18:47.259563 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/memcached-0" Oct 01 15:18:48 crc kubenswrapper[4869]: I1001 15:18:47.486510 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-b6f94bdfc-k4rnx" Oct 01 15:18:48 crc kubenswrapper[4869]: I1001 15:18:47.720433 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-77795d58f5-px8hm" Oct 01 15:18:48 crc kubenswrapper[4869]: I1001 15:18:47.773634 4869 generic.go:334] "Generic (PLEG): container finished" podID="6c2b4697-41ce-422e-8602-a1c0190745df" containerID="d44c870df751e711d042efc41fa93260666b944de8bc1691caedf429501bb873" exitCode=0 Oct 01 15:18:48 crc kubenswrapper[4869]: I1001 15:18:47.773710 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"6c2b4697-41ce-422e-8602-a1c0190745df","Type":"ContainerDied","Data":"d44c870df751e711d042efc41fa93260666b944de8bc1691caedf429501bb873"} Oct 01 15:18:48 crc kubenswrapper[4869]: I1001 15:18:47.776426 4869 generic.go:334] "Generic (PLEG): container finished" podID="7980b3ae-7c3f-42ad-a9e4-9bfb05cb934a" containerID="5cdc6dfe681c9a60e5d01f168a75b2981cf09023a124a667760188d0ce1d5aa5" exitCode=0 Oct 01 15:18:48 crc kubenswrapper[4869]: I1001 15:18:47.776471 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"7980b3ae-7c3f-42ad-a9e4-9bfb05cb934a","Type":"ContainerDied","Data":"5cdc6dfe681c9a60e5d01f168a75b2981cf09023a124a667760188d0ce1d5aa5"} Oct 01 15:18:48 crc kubenswrapper[4869]: I1001 15:18:47.788300 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-b6f94bdfc-k4rnx"] Oct 01 15:18:48 crc kubenswrapper[4869]: I1001 15:18:47.788529 4869 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-b6f94bdfc-k4rnx" podUID="99606142-049c-4f0a-813f-b7274041ec9e" containerName="dnsmasq-dns" containerID="cri-o://c4854cdef0a1800b9f1f9dea538cb6c50b87b8428e065b0d2ac80d6b35dcaba0" gracePeriod=10 Oct 01 15:18:48 crc kubenswrapper[4869]: I1001 15:18:48.788643 4869 generic.go:334] "Generic (PLEG): container finished" podID="99606142-049c-4f0a-813f-b7274041ec9e" containerID="c4854cdef0a1800b9f1f9dea538cb6c50b87b8428e065b0d2ac80d6b35dcaba0" exitCode=0 Oct 01 15:18:48 crc kubenswrapper[4869]: I1001 15:18:48.788842 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-b6f94bdfc-k4rnx" event={"ID":"99606142-049c-4f0a-813f-b7274041ec9e","Type":"ContainerDied","Data":"c4854cdef0a1800b9f1f9dea538cb6c50b87b8428e065b0d2ac80d6b35dcaba0"} Oct 01 15:18:49 crc kubenswrapper[4869]: I1001 15:18:49.109939 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-b6f94bdfc-k4rnx" Oct 01 15:18:49 crc kubenswrapper[4869]: I1001 15:18:49.249792 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/99606142-049c-4f0a-813f-b7274041ec9e-dns-svc\") pod \"99606142-049c-4f0a-813f-b7274041ec9e\" (UID: \"99606142-049c-4f0a-813f-b7274041ec9e\") " Oct 01 15:18:49 crc kubenswrapper[4869]: I1001 15:18:49.253503 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/99606142-049c-4f0a-813f-b7274041ec9e-config\") pod \"99606142-049c-4f0a-813f-b7274041ec9e\" (UID: \"99606142-049c-4f0a-813f-b7274041ec9e\") " Oct 01 15:18:49 crc kubenswrapper[4869]: I1001 15:18:49.253600 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qjgzs\" (UniqueName: \"kubernetes.io/projected/99606142-049c-4f0a-813f-b7274041ec9e-kube-api-access-qjgzs\") pod \"99606142-049c-4f0a-813f-b7274041ec9e\" (UID: \"99606142-049c-4f0a-813f-b7274041ec9e\") " Oct 01 15:18:49 crc kubenswrapper[4869]: I1001 15:18:49.257536 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/99606142-049c-4f0a-813f-b7274041ec9e-kube-api-access-qjgzs" (OuterVolumeSpecName: "kube-api-access-qjgzs") pod "99606142-049c-4f0a-813f-b7274041ec9e" (UID: "99606142-049c-4f0a-813f-b7274041ec9e"). InnerVolumeSpecName "kube-api-access-qjgzs". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 15:18:49 crc kubenswrapper[4869]: I1001 15:18:49.309856 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/99606142-049c-4f0a-813f-b7274041ec9e-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "99606142-049c-4f0a-813f-b7274041ec9e" (UID: "99606142-049c-4f0a-813f-b7274041ec9e"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 15:18:49 crc kubenswrapper[4869]: I1001 15:18:49.310612 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/99606142-049c-4f0a-813f-b7274041ec9e-config" (OuterVolumeSpecName: "config") pod "99606142-049c-4f0a-813f-b7274041ec9e" (UID: "99606142-049c-4f0a-813f-b7274041ec9e"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 15:18:49 crc kubenswrapper[4869]: I1001 15:18:49.355994 4869 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/99606142-049c-4f0a-813f-b7274041ec9e-config\") on node \"crc\" DevicePath \"\"" Oct 01 15:18:49 crc kubenswrapper[4869]: I1001 15:18:49.356023 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qjgzs\" (UniqueName: \"kubernetes.io/projected/99606142-049c-4f0a-813f-b7274041ec9e-kube-api-access-qjgzs\") on node \"crc\" DevicePath \"\"" Oct 01 15:18:49 crc kubenswrapper[4869]: I1001 15:18:49.356036 4869 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/99606142-049c-4f0a-813f-b7274041ec9e-dns-svc\") on node \"crc\" DevicePath \"\"" Oct 01 15:18:49 crc kubenswrapper[4869]: I1001 15:18:49.807121 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-b6f94bdfc-k4rnx" event={"ID":"99606142-049c-4f0a-813f-b7274041ec9e","Type":"ContainerDied","Data":"604b836dade644e0d905b316842453a925f045a7e823ea9d4aaafec617a49a58"} Oct 01 15:18:49 crc kubenswrapper[4869]: I1001 15:18:49.807172 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-b6f94bdfc-k4rnx" Oct 01 15:18:49 crc kubenswrapper[4869]: I1001 15:18:49.807509 4869 scope.go:117] "RemoveContainer" containerID="c4854cdef0a1800b9f1f9dea538cb6c50b87b8428e065b0d2ac80d6b35dcaba0" Oct 01 15:18:49 crc kubenswrapper[4869]: I1001 15:18:49.810034 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"7980b3ae-7c3f-42ad-a9e4-9bfb05cb934a","Type":"ContainerStarted","Data":"8fc9f9871d4d4ef92b5229032e528f03c2fc2d22908c3b6a36bcfba55881b2c2"} Oct 01 15:18:49 crc kubenswrapper[4869]: I1001 15:18:49.813636 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-0" event={"ID":"5733eb7d-fef5-44fd-8d97-68660485d909","Type":"ContainerStarted","Data":"22b9f08617f953207ca4d4d9b5c13ce1a731b672606d5db822ba4cd1a70ea580"} Oct 01 15:18:49 crc kubenswrapper[4869]: I1001 15:18:49.817918 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"6c2b4697-41ce-422e-8602-a1c0190745df","Type":"ContainerStarted","Data":"7e2093f3bccb90b763f84f11adc4960a000130a7c02448aa28fc59ac31957618"} Oct 01 15:18:49 crc kubenswrapper[4869]: I1001 15:18:49.823813 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-0" event={"ID":"b4bcb7f8-7b79-424f-9b67-e341b25a5ac1","Type":"ContainerStarted","Data":"7af5ca3d983a4a5b0b9c2c2e5184547e52dd1eeae72577f13db3a36809478042"} Oct 01 15:18:49 crc kubenswrapper[4869]: I1001 15:18:49.833524 4869 scope.go:117] "RemoveContainer" containerID="b1031aa26ddab06e7931b5cd99c357fb7b68cb5730dfcbf047153cf7ef708d62" Oct 01 15:18:49 crc kubenswrapper[4869]: I1001 15:18:49.842533 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovsdbserver-nb-0" podStartSLOduration=6.964075191 podStartE2EDuration="20.842519627s" podCreationTimestamp="2025-10-01 15:18:29 +0000 UTC" firstStartedPulling="2025-10-01 15:18:35.274990626 +0000 UTC m=+824.421833742" lastFinishedPulling="2025-10-01 15:18:49.153435062 +0000 UTC m=+838.300278178" observedRunningTime="2025-10-01 15:18:49.839511371 +0000 UTC m=+838.986354487" watchObservedRunningTime="2025-10-01 15:18:49.842519627 +0000 UTC m=+838.989362743" Oct 01 15:18:49 crc kubenswrapper[4869]: I1001 15:18:49.878148 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/openstack-galera-0" podStartSLOduration=24.123171214 podStartE2EDuration="29.878124376s" podCreationTimestamp="2025-10-01 15:18:20 +0000 UTC" firstStartedPulling="2025-10-01 15:18:34.967638093 +0000 UTC m=+824.114481209" lastFinishedPulling="2025-10-01 15:18:40.722591255 +0000 UTC m=+829.869434371" observedRunningTime="2025-10-01 15:18:49.869426316 +0000 UTC m=+839.016269432" watchObservedRunningTime="2025-10-01 15:18:49.878124376 +0000 UTC m=+839.024967502" Oct 01 15:18:49 crc kubenswrapper[4869]: I1001 15:18:49.888652 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-b6f94bdfc-k4rnx"] Oct 01 15:18:49 crc kubenswrapper[4869]: I1001 15:18:49.892404 4869 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-b6f94bdfc-k4rnx"] Oct 01 15:18:49 crc kubenswrapper[4869]: I1001 15:18:49.906573 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovsdbserver-sb-0" podStartSLOduration=5.7563618640000005 podStartE2EDuration="19.906542214s" podCreationTimestamp="2025-10-01 15:18:30 +0000 UTC" firstStartedPulling="2025-10-01 15:18:34.980558929 +0000 UTC m=+824.127402085" lastFinishedPulling="2025-10-01 15:18:49.130739319 +0000 UTC m=+838.277582435" observedRunningTime="2025-10-01 15:18:49.899429064 +0000 UTC m=+839.046272210" watchObservedRunningTime="2025-10-01 15:18:49.906542214 +0000 UTC m=+839.053385400" Oct 01 15:18:49 crc kubenswrapper[4869]: I1001 15:18:49.926976 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/openstack-cell1-galera-0" podStartSLOduration=24.20379961 podStartE2EDuration="29.926945059s" podCreationTimestamp="2025-10-01 15:18:20 +0000 UTC" firstStartedPulling="2025-10-01 15:18:34.96078947 +0000 UTC m=+824.107632606" lastFinishedPulling="2025-10-01 15:18:40.683934939 +0000 UTC m=+829.830778055" observedRunningTime="2025-10-01 15:18:49.920588969 +0000 UTC m=+839.067432085" watchObservedRunningTime="2025-10-01 15:18:49.926945059 +0000 UTC m=+839.073788225" Oct 01 15:18:51 crc kubenswrapper[4869]: I1001 15:18:51.017185 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovsdbserver-nb-0" Oct 01 15:18:51 crc kubenswrapper[4869]: I1001 15:18:51.594686 4869 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="99606142-049c-4f0a-813f-b7274041ec9e" path="/var/lib/kubelet/pods/99606142-049c-4f0a-813f-b7274041ec9e/volumes" Oct 01 15:18:51 crc kubenswrapper[4869]: I1001 15:18:51.700679 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/openstack-galera-0" Oct 01 15:18:51 crc kubenswrapper[4869]: I1001 15:18:51.700727 4869 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/openstack-galera-0" Oct 01 15:18:51 crc kubenswrapper[4869]: I1001 15:18:51.796374 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovsdbserver-sb-0" Oct 01 15:18:52 crc kubenswrapper[4869]: I1001 15:18:52.016194 4869 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/ovsdbserver-nb-0" Oct 01 15:18:52 crc kubenswrapper[4869]: I1001 15:18:52.044879 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/openstack-cell1-galera-0" Oct 01 15:18:52 crc kubenswrapper[4869]: I1001 15:18:52.045418 4869 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/openstack-cell1-galera-0" Oct 01 15:18:52 crc kubenswrapper[4869]: I1001 15:18:52.064664 4869 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/ovsdbserver-nb-0" Oct 01 15:18:52 crc kubenswrapper[4869]: E1001 15:18:52.269313 4869 upgradeaware.go:427] Error proxying data from client to backend: readfrom tcp 38.102.83.30:38966->38.102.83.30:35525: write tcp 38.102.83.30:38966->38.102.83.30:35525: write: broken pipe Oct 01 15:18:52 crc kubenswrapper[4869]: I1001 15:18:52.796497 4869 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/ovsdbserver-sb-0" Oct 01 15:18:52 crc kubenswrapper[4869]: I1001 15:18:52.839782 4869 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/ovsdbserver-sb-0" Oct 01 15:18:52 crc kubenswrapper[4869]: I1001 15:18:52.907331 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovsdbserver-sb-0" Oct 01 15:18:52 crc kubenswrapper[4869]: I1001 15:18:52.907387 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovsdbserver-nb-0" Oct 01 15:18:53 crc kubenswrapper[4869]: I1001 15:18:53.188617 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-77cd7bb9bf-mw84t"] Oct 01 15:18:53 crc kubenswrapper[4869]: E1001 15:18:53.188919 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="99606142-049c-4f0a-813f-b7274041ec9e" containerName="dnsmasq-dns" Oct 01 15:18:53 crc kubenswrapper[4869]: I1001 15:18:53.188930 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="99606142-049c-4f0a-813f-b7274041ec9e" containerName="dnsmasq-dns" Oct 01 15:18:53 crc kubenswrapper[4869]: E1001 15:18:53.188942 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="99606142-049c-4f0a-813f-b7274041ec9e" containerName="init" Oct 01 15:18:53 crc kubenswrapper[4869]: I1001 15:18:53.188947 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="99606142-049c-4f0a-813f-b7274041ec9e" containerName="init" Oct 01 15:18:53 crc kubenswrapper[4869]: I1001 15:18:53.189111 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="99606142-049c-4f0a-813f-b7274041ec9e" containerName="dnsmasq-dns" Oct 01 15:18:53 crc kubenswrapper[4869]: I1001 15:18:53.189849 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-77cd7bb9bf-mw84t" Oct 01 15:18:53 crc kubenswrapper[4869]: I1001 15:18:53.191423 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovsdbserver-sb" Oct 01 15:18:53 crc kubenswrapper[4869]: I1001 15:18:53.202945 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-77cd7bb9bf-mw84t"] Oct 01 15:18:53 crc kubenswrapper[4869]: I1001 15:18:53.320428 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/dbaaa5f3-5e1c-48c3-a7fb-fe392bf03ddf-config\") pod \"dnsmasq-dns-77cd7bb9bf-mw84t\" (UID: \"dbaaa5f3-5e1c-48c3-a7fb-fe392bf03ddf\") " pod="openstack/dnsmasq-dns-77cd7bb9bf-mw84t" Oct 01 15:18:53 crc kubenswrapper[4869]: I1001 15:18:53.320482 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/dbaaa5f3-5e1c-48c3-a7fb-fe392bf03ddf-dns-svc\") pod \"dnsmasq-dns-77cd7bb9bf-mw84t\" (UID: \"dbaaa5f3-5e1c-48c3-a7fb-fe392bf03ddf\") " pod="openstack/dnsmasq-dns-77cd7bb9bf-mw84t" Oct 01 15:18:53 crc kubenswrapper[4869]: I1001 15:18:53.320505 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kkp49\" (UniqueName: \"kubernetes.io/projected/dbaaa5f3-5e1c-48c3-a7fb-fe392bf03ddf-kube-api-access-kkp49\") pod \"dnsmasq-dns-77cd7bb9bf-mw84t\" (UID: \"dbaaa5f3-5e1c-48c3-a7fb-fe392bf03ddf\") " pod="openstack/dnsmasq-dns-77cd7bb9bf-mw84t" Oct 01 15:18:53 crc kubenswrapper[4869]: I1001 15:18:53.320787 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/dbaaa5f3-5e1c-48c3-a7fb-fe392bf03ddf-ovsdbserver-sb\") pod \"dnsmasq-dns-77cd7bb9bf-mw84t\" (UID: \"dbaaa5f3-5e1c-48c3-a7fb-fe392bf03ddf\") " pod="openstack/dnsmasq-dns-77cd7bb9bf-mw84t" Oct 01 15:18:53 crc kubenswrapper[4869]: I1001 15:18:53.396973 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-controller-metrics-rn9pl"] Oct 01 15:18:53 crc kubenswrapper[4869]: I1001 15:18:53.398080 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-metrics-rn9pl" Oct 01 15:18:53 crc kubenswrapper[4869]: I1001 15:18:53.404590 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-metrics-rn9pl"] Oct 01 15:18:53 crc kubenswrapper[4869]: I1001 15:18:53.405701 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovncontroller-metrics-config" Oct 01 15:18:53 crc kubenswrapper[4869]: I1001 15:18:53.422293 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/dbaaa5f3-5e1c-48c3-a7fb-fe392bf03ddf-config\") pod \"dnsmasq-dns-77cd7bb9bf-mw84t\" (UID: \"dbaaa5f3-5e1c-48c3-a7fb-fe392bf03ddf\") " pod="openstack/dnsmasq-dns-77cd7bb9bf-mw84t" Oct 01 15:18:53 crc kubenswrapper[4869]: I1001 15:18:53.422366 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/dbaaa5f3-5e1c-48c3-a7fb-fe392bf03ddf-dns-svc\") pod \"dnsmasq-dns-77cd7bb9bf-mw84t\" (UID: \"dbaaa5f3-5e1c-48c3-a7fb-fe392bf03ddf\") " pod="openstack/dnsmasq-dns-77cd7bb9bf-mw84t" Oct 01 15:18:53 crc kubenswrapper[4869]: I1001 15:18:53.422405 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kkp49\" (UniqueName: \"kubernetes.io/projected/dbaaa5f3-5e1c-48c3-a7fb-fe392bf03ddf-kube-api-access-kkp49\") pod \"dnsmasq-dns-77cd7bb9bf-mw84t\" (UID: \"dbaaa5f3-5e1c-48c3-a7fb-fe392bf03ddf\") " pod="openstack/dnsmasq-dns-77cd7bb9bf-mw84t" Oct 01 15:18:53 crc kubenswrapper[4869]: I1001 15:18:53.422450 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/dbaaa5f3-5e1c-48c3-a7fb-fe392bf03ddf-ovsdbserver-sb\") pod \"dnsmasq-dns-77cd7bb9bf-mw84t\" (UID: \"dbaaa5f3-5e1c-48c3-a7fb-fe392bf03ddf\") " pod="openstack/dnsmasq-dns-77cd7bb9bf-mw84t" Oct 01 15:18:53 crc kubenswrapper[4869]: I1001 15:18:53.423182 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/dbaaa5f3-5e1c-48c3-a7fb-fe392bf03ddf-config\") pod \"dnsmasq-dns-77cd7bb9bf-mw84t\" (UID: \"dbaaa5f3-5e1c-48c3-a7fb-fe392bf03ddf\") " pod="openstack/dnsmasq-dns-77cd7bb9bf-mw84t" Oct 01 15:18:53 crc kubenswrapper[4869]: I1001 15:18:53.423241 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/dbaaa5f3-5e1c-48c3-a7fb-fe392bf03ddf-ovsdbserver-sb\") pod \"dnsmasq-dns-77cd7bb9bf-mw84t\" (UID: \"dbaaa5f3-5e1c-48c3-a7fb-fe392bf03ddf\") " pod="openstack/dnsmasq-dns-77cd7bb9bf-mw84t" Oct 01 15:18:53 crc kubenswrapper[4869]: I1001 15:18:53.423489 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/dbaaa5f3-5e1c-48c3-a7fb-fe392bf03ddf-dns-svc\") pod \"dnsmasq-dns-77cd7bb9bf-mw84t\" (UID: \"dbaaa5f3-5e1c-48c3-a7fb-fe392bf03ddf\") " pod="openstack/dnsmasq-dns-77cd7bb9bf-mw84t" Oct 01 15:18:53 crc kubenswrapper[4869]: I1001 15:18:53.446847 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kkp49\" (UniqueName: \"kubernetes.io/projected/dbaaa5f3-5e1c-48c3-a7fb-fe392bf03ddf-kube-api-access-kkp49\") pod \"dnsmasq-dns-77cd7bb9bf-mw84t\" (UID: \"dbaaa5f3-5e1c-48c3-a7fb-fe392bf03ddf\") " pod="openstack/dnsmasq-dns-77cd7bb9bf-mw84t" Oct 01 15:18:53 crc kubenswrapper[4869]: I1001 15:18:53.497696 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-77cd7bb9bf-mw84t"] Oct 01 15:18:53 crc kubenswrapper[4869]: I1001 15:18:53.498355 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-77cd7bb9bf-mw84t" Oct 01 15:18:53 crc kubenswrapper[4869]: I1001 15:18:53.524326 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2d3dbd60-2b6c-431d-a4ce-3d8d606ee5e9-config\") pod \"ovn-controller-metrics-rn9pl\" (UID: \"2d3dbd60-2b6c-431d-a4ce-3d8d606ee5e9\") " pod="openstack/ovn-controller-metrics-rn9pl" Oct 01 15:18:53 crc kubenswrapper[4869]: I1001 15:18:53.524386 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovs-rundir\" (UniqueName: \"kubernetes.io/host-path/2d3dbd60-2b6c-431d-a4ce-3d8d606ee5e9-ovs-rundir\") pod \"ovn-controller-metrics-rn9pl\" (UID: \"2d3dbd60-2b6c-431d-a4ce-3d8d606ee5e9\") " pod="openstack/ovn-controller-metrics-rn9pl" Oct 01 15:18:53 crc kubenswrapper[4869]: I1001 15:18:53.524409 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2d3dbd60-2b6c-431d-a4ce-3d8d606ee5e9-combined-ca-bundle\") pod \"ovn-controller-metrics-rn9pl\" (UID: \"2d3dbd60-2b6c-431d-a4ce-3d8d606ee5e9\") " pod="openstack/ovn-controller-metrics-rn9pl" Oct 01 15:18:53 crc kubenswrapper[4869]: I1001 15:18:53.524439 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/2d3dbd60-2b6c-431d-a4ce-3d8d606ee5e9-metrics-certs-tls-certs\") pod \"ovn-controller-metrics-rn9pl\" (UID: \"2d3dbd60-2b6c-431d-a4ce-3d8d606ee5e9\") " pod="openstack/ovn-controller-metrics-rn9pl" Oct 01 15:18:53 crc kubenswrapper[4869]: I1001 15:18:53.524472 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-d54x8\" (UniqueName: \"kubernetes.io/projected/2d3dbd60-2b6c-431d-a4ce-3d8d606ee5e9-kube-api-access-d54x8\") pod \"ovn-controller-metrics-rn9pl\" (UID: \"2d3dbd60-2b6c-431d-a4ce-3d8d606ee5e9\") " pod="openstack/ovn-controller-metrics-rn9pl" Oct 01 15:18:53 crc kubenswrapper[4869]: I1001 15:18:53.524506 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/host-path/2d3dbd60-2b6c-431d-a4ce-3d8d606ee5e9-ovn-rundir\") pod \"ovn-controller-metrics-rn9pl\" (UID: \"2d3dbd60-2b6c-431d-a4ce-3d8d606ee5e9\") " pod="openstack/ovn-controller-metrics-rn9pl" Oct 01 15:18:53 crc kubenswrapper[4869]: I1001 15:18:53.528108 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-86ddb7fb65-5th7v"] Oct 01 15:18:53 crc kubenswrapper[4869]: I1001 15:18:53.529718 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-86ddb7fb65-5th7v" Oct 01 15:18:53 crc kubenswrapper[4869]: I1001 15:18:53.532431 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovsdbserver-nb" Oct 01 15:18:53 crc kubenswrapper[4869]: I1001 15:18:53.555719 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-86ddb7fb65-5th7v"] Oct 01 15:18:53 crc kubenswrapper[4869]: I1001 15:18:53.606057 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-northd-0"] Oct 01 15:18:53 crc kubenswrapper[4869]: I1001 15:18:53.609752 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-northd-0"] Oct 01 15:18:53 crc kubenswrapper[4869]: I1001 15:18:53.609860 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-northd-0" Oct 01 15:18:53 crc kubenswrapper[4869]: I1001 15:18:53.616001 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovnnorthd-scripts" Oct 01 15:18:53 crc kubenswrapper[4869]: I1001 15:18:53.616167 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovnnorthd-ovndbs" Oct 01 15:18:53 crc kubenswrapper[4869]: I1001 15:18:53.616294 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ovnnorthd-ovnnorthd-dockercfg-srj59" Oct 01 15:18:53 crc kubenswrapper[4869]: I1001 15:18:53.616641 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovnnorthd-config" Oct 01 15:18:53 crc kubenswrapper[4869]: I1001 15:18:53.626338 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4btt7\" (UniqueName: \"kubernetes.io/projected/298c162a-4d72-4d7c-bd59-d5d5c8f7cf7f-kube-api-access-4btt7\") pod \"dnsmasq-dns-86ddb7fb65-5th7v\" (UID: \"298c162a-4d72-4d7c-bd59-d5d5c8f7cf7f\") " pod="openstack/dnsmasq-dns-86ddb7fb65-5th7v" Oct 01 15:18:53 crc kubenswrapper[4869]: I1001 15:18:53.626383 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-d54x8\" (UniqueName: \"kubernetes.io/projected/2d3dbd60-2b6c-431d-a4ce-3d8d606ee5e9-kube-api-access-d54x8\") pod \"ovn-controller-metrics-rn9pl\" (UID: \"2d3dbd60-2b6c-431d-a4ce-3d8d606ee5e9\") " pod="openstack/ovn-controller-metrics-rn9pl" Oct 01 15:18:53 crc kubenswrapper[4869]: I1001 15:18:53.626428 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/host-path/2d3dbd60-2b6c-431d-a4ce-3d8d606ee5e9-ovn-rundir\") pod \"ovn-controller-metrics-rn9pl\" (UID: \"2d3dbd60-2b6c-431d-a4ce-3d8d606ee5e9\") " pod="openstack/ovn-controller-metrics-rn9pl" Oct 01 15:18:53 crc kubenswrapper[4869]: I1001 15:18:53.626471 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/298c162a-4d72-4d7c-bd59-d5d5c8f7cf7f-ovsdbserver-sb\") pod \"dnsmasq-dns-86ddb7fb65-5th7v\" (UID: \"298c162a-4d72-4d7c-bd59-d5d5c8f7cf7f\") " pod="openstack/dnsmasq-dns-86ddb7fb65-5th7v" Oct 01 15:18:53 crc kubenswrapper[4869]: I1001 15:18:53.626513 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2d3dbd60-2b6c-431d-a4ce-3d8d606ee5e9-config\") pod \"ovn-controller-metrics-rn9pl\" (UID: \"2d3dbd60-2b6c-431d-a4ce-3d8d606ee5e9\") " pod="openstack/ovn-controller-metrics-rn9pl" Oct 01 15:18:53 crc kubenswrapper[4869]: I1001 15:18:53.626541 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/298c162a-4d72-4d7c-bd59-d5d5c8f7cf7f-config\") pod \"dnsmasq-dns-86ddb7fb65-5th7v\" (UID: \"298c162a-4d72-4d7c-bd59-d5d5c8f7cf7f\") " pod="openstack/dnsmasq-dns-86ddb7fb65-5th7v" Oct 01 15:18:53 crc kubenswrapper[4869]: I1001 15:18:53.626564 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/298c162a-4d72-4d7c-bd59-d5d5c8f7cf7f-ovsdbserver-nb\") pod \"dnsmasq-dns-86ddb7fb65-5th7v\" (UID: \"298c162a-4d72-4d7c-bd59-d5d5c8f7cf7f\") " pod="openstack/dnsmasq-dns-86ddb7fb65-5th7v" Oct 01 15:18:53 crc kubenswrapper[4869]: I1001 15:18:53.626591 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovs-rundir\" (UniqueName: \"kubernetes.io/host-path/2d3dbd60-2b6c-431d-a4ce-3d8d606ee5e9-ovs-rundir\") pod \"ovn-controller-metrics-rn9pl\" (UID: \"2d3dbd60-2b6c-431d-a4ce-3d8d606ee5e9\") " pod="openstack/ovn-controller-metrics-rn9pl" Oct 01 15:18:53 crc kubenswrapper[4869]: I1001 15:18:53.626610 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2d3dbd60-2b6c-431d-a4ce-3d8d606ee5e9-combined-ca-bundle\") pod \"ovn-controller-metrics-rn9pl\" (UID: \"2d3dbd60-2b6c-431d-a4ce-3d8d606ee5e9\") " pod="openstack/ovn-controller-metrics-rn9pl" Oct 01 15:18:53 crc kubenswrapper[4869]: I1001 15:18:53.626638 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/2d3dbd60-2b6c-431d-a4ce-3d8d606ee5e9-metrics-certs-tls-certs\") pod \"ovn-controller-metrics-rn9pl\" (UID: \"2d3dbd60-2b6c-431d-a4ce-3d8d606ee5e9\") " pod="openstack/ovn-controller-metrics-rn9pl" Oct 01 15:18:53 crc kubenswrapper[4869]: I1001 15:18:53.626662 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/298c162a-4d72-4d7c-bd59-d5d5c8f7cf7f-dns-svc\") pod \"dnsmasq-dns-86ddb7fb65-5th7v\" (UID: \"298c162a-4d72-4d7c-bd59-d5d5c8f7cf7f\") " pod="openstack/dnsmasq-dns-86ddb7fb65-5th7v" Oct 01 15:18:53 crc kubenswrapper[4869]: I1001 15:18:53.627144 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/host-path/2d3dbd60-2b6c-431d-a4ce-3d8d606ee5e9-ovn-rundir\") pod \"ovn-controller-metrics-rn9pl\" (UID: \"2d3dbd60-2b6c-431d-a4ce-3d8d606ee5e9\") " pod="openstack/ovn-controller-metrics-rn9pl" Oct 01 15:18:53 crc kubenswrapper[4869]: I1001 15:18:53.627688 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2d3dbd60-2b6c-431d-a4ce-3d8d606ee5e9-config\") pod \"ovn-controller-metrics-rn9pl\" (UID: \"2d3dbd60-2b6c-431d-a4ce-3d8d606ee5e9\") " pod="openstack/ovn-controller-metrics-rn9pl" Oct 01 15:18:53 crc kubenswrapper[4869]: I1001 15:18:53.627743 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovs-rundir\" (UniqueName: \"kubernetes.io/host-path/2d3dbd60-2b6c-431d-a4ce-3d8d606ee5e9-ovs-rundir\") pod \"ovn-controller-metrics-rn9pl\" (UID: \"2d3dbd60-2b6c-431d-a4ce-3d8d606ee5e9\") " pod="openstack/ovn-controller-metrics-rn9pl" Oct 01 15:18:53 crc kubenswrapper[4869]: I1001 15:18:53.645708 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2d3dbd60-2b6c-431d-a4ce-3d8d606ee5e9-combined-ca-bundle\") pod \"ovn-controller-metrics-rn9pl\" (UID: \"2d3dbd60-2b6c-431d-a4ce-3d8d606ee5e9\") " pod="openstack/ovn-controller-metrics-rn9pl" Oct 01 15:18:53 crc kubenswrapper[4869]: I1001 15:18:53.649989 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/2d3dbd60-2b6c-431d-a4ce-3d8d606ee5e9-metrics-certs-tls-certs\") pod \"ovn-controller-metrics-rn9pl\" (UID: \"2d3dbd60-2b6c-431d-a4ce-3d8d606ee5e9\") " pod="openstack/ovn-controller-metrics-rn9pl" Oct 01 15:18:53 crc kubenswrapper[4869]: I1001 15:18:53.653494 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-d54x8\" (UniqueName: \"kubernetes.io/projected/2d3dbd60-2b6c-431d-a4ce-3d8d606ee5e9-kube-api-access-d54x8\") pod \"ovn-controller-metrics-rn9pl\" (UID: \"2d3dbd60-2b6c-431d-a4ce-3d8d606ee5e9\") " pod="openstack/ovn-controller-metrics-rn9pl" Oct 01 15:18:53 crc kubenswrapper[4869]: I1001 15:18:53.713961 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-metrics-rn9pl" Oct 01 15:18:53 crc kubenswrapper[4869]: I1001 15:18:53.728760 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9941e8c7-ec7d-4385-bae8-fd5fc9250689-config\") pod \"ovn-northd-0\" (UID: \"9941e8c7-ec7d-4385-bae8-fd5fc9250689\") " pod="openstack/ovn-northd-0" Oct 01 15:18:53 crc kubenswrapper[4869]: I1001 15:18:53.728847 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9941e8c7-ec7d-4385-bae8-fd5fc9250689-combined-ca-bundle\") pod \"ovn-northd-0\" (UID: \"9941e8c7-ec7d-4385-bae8-fd5fc9250689\") " pod="openstack/ovn-northd-0" Oct 01 15:18:53 crc kubenswrapper[4869]: I1001 15:18:53.728903 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/298c162a-4d72-4d7c-bd59-d5d5c8f7cf7f-ovsdbserver-sb\") pod \"dnsmasq-dns-86ddb7fb65-5th7v\" (UID: \"298c162a-4d72-4d7c-bd59-d5d5c8f7cf7f\") " pod="openstack/dnsmasq-dns-86ddb7fb65-5th7v" Oct 01 15:18:53 crc kubenswrapper[4869]: I1001 15:18:53.728927 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-northd-tls-certs\" (UniqueName: \"kubernetes.io/secret/9941e8c7-ec7d-4385-bae8-fd5fc9250689-ovn-northd-tls-certs\") pod \"ovn-northd-0\" (UID: \"9941e8c7-ec7d-4385-bae8-fd5fc9250689\") " pod="openstack/ovn-northd-0" Oct 01 15:18:53 crc kubenswrapper[4869]: I1001 15:18:53.729799 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/298c162a-4d72-4d7c-bd59-d5d5c8f7cf7f-ovsdbserver-sb\") pod \"dnsmasq-dns-86ddb7fb65-5th7v\" (UID: \"298c162a-4d72-4d7c-bd59-d5d5c8f7cf7f\") " pod="openstack/dnsmasq-dns-86ddb7fb65-5th7v" Oct 01 15:18:53 crc kubenswrapper[4869]: I1001 15:18:53.729876 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/298c162a-4d72-4d7c-bd59-d5d5c8f7cf7f-config\") pod \"dnsmasq-dns-86ddb7fb65-5th7v\" (UID: \"298c162a-4d72-4d7c-bd59-d5d5c8f7cf7f\") " pod="openstack/dnsmasq-dns-86ddb7fb65-5th7v" Oct 01 15:18:53 crc kubenswrapper[4869]: I1001 15:18:53.729910 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/9941e8c7-ec7d-4385-bae8-fd5fc9250689-scripts\") pod \"ovn-northd-0\" (UID: \"9941e8c7-ec7d-4385-bae8-fd5fc9250689\") " pod="openstack/ovn-northd-0" Oct 01 15:18:53 crc kubenswrapper[4869]: I1001 15:18:53.729935 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/298c162a-4d72-4d7c-bd59-d5d5c8f7cf7f-ovsdbserver-nb\") pod \"dnsmasq-dns-86ddb7fb65-5th7v\" (UID: \"298c162a-4d72-4d7c-bd59-d5d5c8f7cf7f\") " pod="openstack/dnsmasq-dns-86ddb7fb65-5th7v" Oct 01 15:18:53 crc kubenswrapper[4869]: I1001 15:18:53.730480 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/298c162a-4d72-4d7c-bd59-d5d5c8f7cf7f-config\") pod \"dnsmasq-dns-86ddb7fb65-5th7v\" (UID: \"298c162a-4d72-4d7c-bd59-d5d5c8f7cf7f\") " pod="openstack/dnsmasq-dns-86ddb7fb65-5th7v" Oct 01 15:18:53 crc kubenswrapper[4869]: I1001 15:18:53.730642 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/9941e8c7-ec7d-4385-bae8-fd5fc9250689-metrics-certs-tls-certs\") pod \"ovn-northd-0\" (UID: \"9941e8c7-ec7d-4385-bae8-fd5fc9250689\") " pod="openstack/ovn-northd-0" Oct 01 15:18:53 crc kubenswrapper[4869]: I1001 15:18:53.730678 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/empty-dir/9941e8c7-ec7d-4385-bae8-fd5fc9250689-ovn-rundir\") pod \"ovn-northd-0\" (UID: \"9941e8c7-ec7d-4385-bae8-fd5fc9250689\") " pod="openstack/ovn-northd-0" Oct 01 15:18:53 crc kubenswrapper[4869]: I1001 15:18:53.730704 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/298c162a-4d72-4d7c-bd59-d5d5c8f7cf7f-dns-svc\") pod \"dnsmasq-dns-86ddb7fb65-5th7v\" (UID: \"298c162a-4d72-4d7c-bd59-d5d5c8f7cf7f\") " pod="openstack/dnsmasq-dns-86ddb7fb65-5th7v" Oct 01 15:18:53 crc kubenswrapper[4869]: I1001 15:18:53.730730 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4btt7\" (UniqueName: \"kubernetes.io/projected/298c162a-4d72-4d7c-bd59-d5d5c8f7cf7f-kube-api-access-4btt7\") pod \"dnsmasq-dns-86ddb7fb65-5th7v\" (UID: \"298c162a-4d72-4d7c-bd59-d5d5c8f7cf7f\") " pod="openstack/dnsmasq-dns-86ddb7fb65-5th7v" Oct 01 15:18:53 crc kubenswrapper[4869]: I1001 15:18:53.730757 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sw8jg\" (UniqueName: \"kubernetes.io/projected/9941e8c7-ec7d-4385-bae8-fd5fc9250689-kube-api-access-sw8jg\") pod \"ovn-northd-0\" (UID: \"9941e8c7-ec7d-4385-bae8-fd5fc9250689\") " pod="openstack/ovn-northd-0" Oct 01 15:18:53 crc kubenswrapper[4869]: I1001 15:18:53.731798 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/298c162a-4d72-4d7c-bd59-d5d5c8f7cf7f-dns-svc\") pod \"dnsmasq-dns-86ddb7fb65-5th7v\" (UID: \"298c162a-4d72-4d7c-bd59-d5d5c8f7cf7f\") " pod="openstack/dnsmasq-dns-86ddb7fb65-5th7v" Oct 01 15:18:53 crc kubenswrapper[4869]: I1001 15:18:53.732466 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/298c162a-4d72-4d7c-bd59-d5d5c8f7cf7f-ovsdbserver-nb\") pod \"dnsmasq-dns-86ddb7fb65-5th7v\" (UID: \"298c162a-4d72-4d7c-bd59-d5d5c8f7cf7f\") " pod="openstack/dnsmasq-dns-86ddb7fb65-5th7v" Oct 01 15:18:53 crc kubenswrapper[4869]: I1001 15:18:53.748441 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4btt7\" (UniqueName: \"kubernetes.io/projected/298c162a-4d72-4d7c-bd59-d5d5c8f7cf7f-kube-api-access-4btt7\") pod \"dnsmasq-dns-86ddb7fb65-5th7v\" (UID: \"298c162a-4d72-4d7c-bd59-d5d5c8f7cf7f\") " pod="openstack/dnsmasq-dns-86ddb7fb65-5th7v" Oct 01 15:18:53 crc kubenswrapper[4869]: I1001 15:18:53.803499 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-77cd7bb9bf-mw84t"] Oct 01 15:18:53 crc kubenswrapper[4869]: I1001 15:18:53.805537 4869 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/openstack-galera-0" Oct 01 15:18:53 crc kubenswrapper[4869]: I1001 15:18:53.831839 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-northd-tls-certs\" (UniqueName: \"kubernetes.io/secret/9941e8c7-ec7d-4385-bae8-fd5fc9250689-ovn-northd-tls-certs\") pod \"ovn-northd-0\" (UID: \"9941e8c7-ec7d-4385-bae8-fd5fc9250689\") " pod="openstack/ovn-northd-0" Oct 01 15:18:53 crc kubenswrapper[4869]: I1001 15:18:53.831886 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/9941e8c7-ec7d-4385-bae8-fd5fc9250689-scripts\") pod \"ovn-northd-0\" (UID: \"9941e8c7-ec7d-4385-bae8-fd5fc9250689\") " pod="openstack/ovn-northd-0" Oct 01 15:18:53 crc kubenswrapper[4869]: I1001 15:18:53.831923 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/9941e8c7-ec7d-4385-bae8-fd5fc9250689-metrics-certs-tls-certs\") pod \"ovn-northd-0\" (UID: \"9941e8c7-ec7d-4385-bae8-fd5fc9250689\") " pod="openstack/ovn-northd-0" Oct 01 15:18:53 crc kubenswrapper[4869]: I1001 15:18:53.831949 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/empty-dir/9941e8c7-ec7d-4385-bae8-fd5fc9250689-ovn-rundir\") pod \"ovn-northd-0\" (UID: \"9941e8c7-ec7d-4385-bae8-fd5fc9250689\") " pod="openstack/ovn-northd-0" Oct 01 15:18:53 crc kubenswrapper[4869]: I1001 15:18:53.831987 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sw8jg\" (UniqueName: \"kubernetes.io/projected/9941e8c7-ec7d-4385-bae8-fd5fc9250689-kube-api-access-sw8jg\") pod \"ovn-northd-0\" (UID: \"9941e8c7-ec7d-4385-bae8-fd5fc9250689\") " pod="openstack/ovn-northd-0" Oct 01 15:18:53 crc kubenswrapper[4869]: I1001 15:18:53.832017 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9941e8c7-ec7d-4385-bae8-fd5fc9250689-config\") pod \"ovn-northd-0\" (UID: \"9941e8c7-ec7d-4385-bae8-fd5fc9250689\") " pod="openstack/ovn-northd-0" Oct 01 15:18:53 crc kubenswrapper[4869]: I1001 15:18:53.832044 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9941e8c7-ec7d-4385-bae8-fd5fc9250689-combined-ca-bundle\") pod \"ovn-northd-0\" (UID: \"9941e8c7-ec7d-4385-bae8-fd5fc9250689\") " pod="openstack/ovn-northd-0" Oct 01 15:18:53 crc kubenswrapper[4869]: I1001 15:18:53.832782 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/empty-dir/9941e8c7-ec7d-4385-bae8-fd5fc9250689-ovn-rundir\") pod \"ovn-northd-0\" (UID: \"9941e8c7-ec7d-4385-bae8-fd5fc9250689\") " pod="openstack/ovn-northd-0" Oct 01 15:18:53 crc kubenswrapper[4869]: I1001 15:18:53.833426 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/9941e8c7-ec7d-4385-bae8-fd5fc9250689-scripts\") pod \"ovn-northd-0\" (UID: \"9941e8c7-ec7d-4385-bae8-fd5fc9250689\") " pod="openstack/ovn-northd-0" Oct 01 15:18:53 crc kubenswrapper[4869]: I1001 15:18:53.833620 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9941e8c7-ec7d-4385-bae8-fd5fc9250689-config\") pod \"ovn-northd-0\" (UID: \"9941e8c7-ec7d-4385-bae8-fd5fc9250689\") " pod="openstack/ovn-northd-0" Oct 01 15:18:53 crc kubenswrapper[4869]: I1001 15:18:53.835364 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-northd-tls-certs\" (UniqueName: \"kubernetes.io/secret/9941e8c7-ec7d-4385-bae8-fd5fc9250689-ovn-northd-tls-certs\") pod \"ovn-northd-0\" (UID: \"9941e8c7-ec7d-4385-bae8-fd5fc9250689\") " pod="openstack/ovn-northd-0" Oct 01 15:18:53 crc kubenswrapper[4869]: I1001 15:18:53.837177 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/9941e8c7-ec7d-4385-bae8-fd5fc9250689-metrics-certs-tls-certs\") pod \"ovn-northd-0\" (UID: \"9941e8c7-ec7d-4385-bae8-fd5fc9250689\") " pod="openstack/ovn-northd-0" Oct 01 15:18:53 crc kubenswrapper[4869]: I1001 15:18:53.838973 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9941e8c7-ec7d-4385-bae8-fd5fc9250689-combined-ca-bundle\") pod \"ovn-northd-0\" (UID: \"9941e8c7-ec7d-4385-bae8-fd5fc9250689\") " pod="openstack/ovn-northd-0" Oct 01 15:18:53 crc kubenswrapper[4869]: I1001 15:18:53.848679 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sw8jg\" (UniqueName: \"kubernetes.io/projected/9941e8c7-ec7d-4385-bae8-fd5fc9250689-kube-api-access-sw8jg\") pod \"ovn-northd-0\" (UID: \"9941e8c7-ec7d-4385-bae8-fd5fc9250689\") " pod="openstack/ovn-northd-0" Oct 01 15:18:53 crc kubenswrapper[4869]: I1001 15:18:53.860624 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/openstack-galera-0" Oct 01 15:18:53 crc kubenswrapper[4869]: I1001 15:18:53.866720 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-77cd7bb9bf-mw84t" event={"ID":"dbaaa5f3-5e1c-48c3-a7fb-fe392bf03ddf","Type":"ContainerStarted","Data":"a7ca33d6dccd4b5b5f0b5dba2c87bb3a1d5aac2905502ce23e1c38ade251aeea"} Oct 01 15:18:53 crc kubenswrapper[4869]: I1001 15:18:53.893569 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-86ddb7fb65-5th7v" Oct 01 15:18:53 crc kubenswrapper[4869]: I1001 15:18:53.922917 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/kube-state-metrics-0" Oct 01 15:18:53 crc kubenswrapper[4869]: I1001 15:18:53.935453 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-northd-0" Oct 01 15:18:54 crc kubenswrapper[4869]: I1001 15:18:54.142371 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-metrics-rn9pl"] Oct 01 15:18:54 crc kubenswrapper[4869]: W1001 15:18:54.153228 4869 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod2d3dbd60_2b6c_431d_a4ce_3d8d606ee5e9.slice/crio-226bf95078f2a86c16b9ac7ddf7c443b3012eec856f628c40c06c6854305f0fc WatchSource:0}: Error finding container 226bf95078f2a86c16b9ac7ddf7c443b3012eec856f628c40c06c6854305f0fc: Status 404 returned error can't find the container with id 226bf95078f2a86c16b9ac7ddf7c443b3012eec856f628c40c06c6854305f0fc Oct 01 15:18:54 crc kubenswrapper[4869]: I1001 15:18:54.345969 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-86ddb7fb65-5th7v"] Oct 01 15:18:54 crc kubenswrapper[4869]: W1001 15:18:54.352021 4869 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod298c162a_4d72_4d7c_bd59_d5d5c8f7cf7f.slice/crio-19f7ed2cee0dadca13cdf8d3341006d2c83f2ca0b62746c72ac27d9853441a7d WatchSource:0}: Error finding container 19f7ed2cee0dadca13cdf8d3341006d2c83f2ca0b62746c72ac27d9853441a7d: Status 404 returned error can't find the container with id 19f7ed2cee0dadca13cdf8d3341006d2c83f2ca0b62746c72ac27d9853441a7d Oct 01 15:18:54 crc kubenswrapper[4869]: I1001 15:18:54.414298 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-northd-0"] Oct 01 15:18:54 crc kubenswrapper[4869]: I1001 15:18:54.876181 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-metrics-rn9pl" event={"ID":"2d3dbd60-2b6c-431d-a4ce-3d8d606ee5e9","Type":"ContainerStarted","Data":"125df08b23efbaf586c04b5b7aaceaaa8551fdae5debb5eecdb81cec4ef015b9"} Oct 01 15:18:54 crc kubenswrapper[4869]: I1001 15:18:54.876561 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-metrics-rn9pl" event={"ID":"2d3dbd60-2b6c-431d-a4ce-3d8d606ee5e9","Type":"ContainerStarted","Data":"226bf95078f2a86c16b9ac7ddf7c443b3012eec856f628c40c06c6854305f0fc"} Oct 01 15:18:54 crc kubenswrapper[4869]: I1001 15:18:54.878855 4869 generic.go:334] "Generic (PLEG): container finished" podID="298c162a-4d72-4d7c-bd59-d5d5c8f7cf7f" containerID="c9112b6c56879e32b798287e159dea2fc67f0ade0ed0e14364c2cbacb8a4db68" exitCode=0 Oct 01 15:18:54 crc kubenswrapper[4869]: I1001 15:18:54.879091 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-86ddb7fb65-5th7v" event={"ID":"298c162a-4d72-4d7c-bd59-d5d5c8f7cf7f","Type":"ContainerDied","Data":"c9112b6c56879e32b798287e159dea2fc67f0ade0ed0e14364c2cbacb8a4db68"} Oct 01 15:18:54 crc kubenswrapper[4869]: I1001 15:18:54.879125 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-86ddb7fb65-5th7v" event={"ID":"298c162a-4d72-4d7c-bd59-d5d5c8f7cf7f","Type":"ContainerStarted","Data":"19f7ed2cee0dadca13cdf8d3341006d2c83f2ca0b62746c72ac27d9853441a7d"} Oct 01 15:18:54 crc kubenswrapper[4869]: I1001 15:18:54.881641 4869 generic.go:334] "Generic (PLEG): container finished" podID="dbaaa5f3-5e1c-48c3-a7fb-fe392bf03ddf" containerID="30d057bbabf1d0e38db33862ec0f9958cf24c6ba02c3d08d589103dc4e4d69aa" exitCode=0 Oct 01 15:18:54 crc kubenswrapper[4869]: I1001 15:18:54.881951 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-77cd7bb9bf-mw84t" event={"ID":"dbaaa5f3-5e1c-48c3-a7fb-fe392bf03ddf","Type":"ContainerDied","Data":"30d057bbabf1d0e38db33862ec0f9958cf24c6ba02c3d08d589103dc4e4d69aa"} Oct 01 15:18:54 crc kubenswrapper[4869]: I1001 15:18:54.884559 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-northd-0" event={"ID":"9941e8c7-ec7d-4385-bae8-fd5fc9250689","Type":"ContainerStarted","Data":"d6bc183ce400e86ba2cf8f85289ae674ee93df6d636f287da32c5a3e63804b3b"} Oct 01 15:18:54 crc kubenswrapper[4869]: I1001 15:18:54.911651 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-controller-metrics-rn9pl" podStartSLOduration=1.911630717 podStartE2EDuration="1.911630717s" podCreationTimestamp="2025-10-01 15:18:53 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-01 15:18:54.907369419 +0000 UTC m=+844.054212575" watchObservedRunningTime="2025-10-01 15:18:54.911630717 +0000 UTC m=+844.058473833" Oct 01 15:18:55 crc kubenswrapper[4869]: I1001 15:18:55.199780 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-77cd7bb9bf-mw84t" Oct 01 15:18:55 crc kubenswrapper[4869]: I1001 15:18:55.257536 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kkp49\" (UniqueName: \"kubernetes.io/projected/dbaaa5f3-5e1c-48c3-a7fb-fe392bf03ddf-kube-api-access-kkp49\") pod \"dbaaa5f3-5e1c-48c3-a7fb-fe392bf03ddf\" (UID: \"dbaaa5f3-5e1c-48c3-a7fb-fe392bf03ddf\") " Oct 01 15:18:55 crc kubenswrapper[4869]: I1001 15:18:55.257654 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/dbaaa5f3-5e1c-48c3-a7fb-fe392bf03ddf-dns-svc\") pod \"dbaaa5f3-5e1c-48c3-a7fb-fe392bf03ddf\" (UID: \"dbaaa5f3-5e1c-48c3-a7fb-fe392bf03ddf\") " Oct 01 15:18:55 crc kubenswrapper[4869]: I1001 15:18:55.257724 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/dbaaa5f3-5e1c-48c3-a7fb-fe392bf03ddf-ovsdbserver-sb\") pod \"dbaaa5f3-5e1c-48c3-a7fb-fe392bf03ddf\" (UID: \"dbaaa5f3-5e1c-48c3-a7fb-fe392bf03ddf\") " Oct 01 15:18:55 crc kubenswrapper[4869]: I1001 15:18:55.257747 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/dbaaa5f3-5e1c-48c3-a7fb-fe392bf03ddf-config\") pod \"dbaaa5f3-5e1c-48c3-a7fb-fe392bf03ddf\" (UID: \"dbaaa5f3-5e1c-48c3-a7fb-fe392bf03ddf\") " Oct 01 15:18:55 crc kubenswrapper[4869]: I1001 15:18:55.261911 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/dbaaa5f3-5e1c-48c3-a7fb-fe392bf03ddf-kube-api-access-kkp49" (OuterVolumeSpecName: "kube-api-access-kkp49") pod "dbaaa5f3-5e1c-48c3-a7fb-fe392bf03ddf" (UID: "dbaaa5f3-5e1c-48c3-a7fb-fe392bf03ddf"). InnerVolumeSpecName "kube-api-access-kkp49". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 15:18:55 crc kubenswrapper[4869]: I1001 15:18:55.276217 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/dbaaa5f3-5e1c-48c3-a7fb-fe392bf03ddf-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "dbaaa5f3-5e1c-48c3-a7fb-fe392bf03ddf" (UID: "dbaaa5f3-5e1c-48c3-a7fb-fe392bf03ddf"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 15:18:55 crc kubenswrapper[4869]: I1001 15:18:55.277420 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/dbaaa5f3-5e1c-48c3-a7fb-fe392bf03ddf-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "dbaaa5f3-5e1c-48c3-a7fb-fe392bf03ddf" (UID: "dbaaa5f3-5e1c-48c3-a7fb-fe392bf03ddf"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 15:18:55 crc kubenswrapper[4869]: I1001 15:18:55.280225 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/dbaaa5f3-5e1c-48c3-a7fb-fe392bf03ddf-config" (OuterVolumeSpecName: "config") pod "dbaaa5f3-5e1c-48c3-a7fb-fe392bf03ddf" (UID: "dbaaa5f3-5e1c-48c3-a7fb-fe392bf03ddf"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 15:18:55 crc kubenswrapper[4869]: I1001 15:18:55.359811 4869 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/dbaaa5f3-5e1c-48c3-a7fb-fe392bf03ddf-dns-svc\") on node \"crc\" DevicePath \"\"" Oct 01 15:18:55 crc kubenswrapper[4869]: I1001 15:18:55.359851 4869 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/dbaaa5f3-5e1c-48c3-a7fb-fe392bf03ddf-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Oct 01 15:18:55 crc kubenswrapper[4869]: I1001 15:18:55.359866 4869 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/dbaaa5f3-5e1c-48c3-a7fb-fe392bf03ddf-config\") on node \"crc\" DevicePath \"\"" Oct 01 15:18:55 crc kubenswrapper[4869]: I1001 15:18:55.359879 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kkp49\" (UniqueName: \"kubernetes.io/projected/dbaaa5f3-5e1c-48c3-a7fb-fe392bf03ddf-kube-api-access-kkp49\") on node \"crc\" DevicePath \"\"" Oct 01 15:18:55 crc kubenswrapper[4869]: I1001 15:18:55.899966 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-86ddb7fb65-5th7v" event={"ID":"298c162a-4d72-4d7c-bd59-d5d5c8f7cf7f","Type":"ContainerStarted","Data":"261d9771c3e0c08c4b3e742d5b6028f0b71083327f06a52fbf4b771d3c082ac0"} Oct 01 15:18:55 crc kubenswrapper[4869]: I1001 15:18:55.900445 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-86ddb7fb65-5th7v" Oct 01 15:18:55 crc kubenswrapper[4869]: I1001 15:18:55.904718 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-77cd7bb9bf-mw84t" Oct 01 15:18:55 crc kubenswrapper[4869]: I1001 15:18:55.904796 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-77cd7bb9bf-mw84t" event={"ID":"dbaaa5f3-5e1c-48c3-a7fb-fe392bf03ddf","Type":"ContainerDied","Data":"a7ca33d6dccd4b5b5f0b5dba2c87bb3a1d5aac2905502ce23e1c38ade251aeea"} Oct 01 15:18:55 crc kubenswrapper[4869]: I1001 15:18:55.904854 4869 scope.go:117] "RemoveContainer" containerID="30d057bbabf1d0e38db33862ec0f9958cf24c6ba02c3d08d589103dc4e4d69aa" Oct 01 15:18:55 crc kubenswrapper[4869]: I1001 15:18:55.910746 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-northd-0" event={"ID":"9941e8c7-ec7d-4385-bae8-fd5fc9250689","Type":"ContainerStarted","Data":"c200d8f20a88ce052a0e811cf6fffd709a7bbcfefd3291011b63e25549d9d926"} Oct 01 15:18:55 crc kubenswrapper[4869]: I1001 15:18:55.929069 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-86ddb7fb65-5th7v" podStartSLOduration=2.929045934 podStartE2EDuration="2.929045934s" podCreationTimestamp="2025-10-01 15:18:53 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-01 15:18:55.923948245 +0000 UTC m=+845.070791411" watchObservedRunningTime="2025-10-01 15:18:55.929045934 +0000 UTC m=+845.075889050" Oct 01 15:18:55 crc kubenswrapper[4869]: I1001 15:18:55.977057 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-77cd7bb9bf-mw84t"] Oct 01 15:18:55 crc kubenswrapper[4869]: I1001 15:18:55.982585 4869 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-77cd7bb9bf-mw84t"] Oct 01 15:18:56 crc kubenswrapper[4869]: I1001 15:18:56.131649 4869 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/openstack-cell1-galera-0" Oct 01 15:18:56 crc kubenswrapper[4869]: I1001 15:18:56.189845 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/openstack-cell1-galera-0" Oct 01 15:18:56 crc kubenswrapper[4869]: I1001 15:18:56.919418 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-northd-0" event={"ID":"9941e8c7-ec7d-4385-bae8-fd5fc9250689","Type":"ContainerStarted","Data":"01e0a4c49880342e1b2afd33ecc9c68f9e0c828216bdecbd0d4d55d47701f022"} Oct 01 15:18:56 crc kubenswrapper[4869]: I1001 15:18:56.921455 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovn-northd-0" Oct 01 15:18:56 crc kubenswrapper[4869]: I1001 15:18:56.946576 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-northd-0" podStartSLOduration=2.8639677 podStartE2EDuration="3.946543793s" podCreationTimestamp="2025-10-01 15:18:53 +0000 UTC" firstStartedPulling="2025-10-01 15:18:54.437764648 +0000 UTC m=+843.584607764" lastFinishedPulling="2025-10-01 15:18:55.520340741 +0000 UTC m=+844.667183857" observedRunningTime="2025-10-01 15:18:56.941629029 +0000 UTC m=+846.088472145" watchObservedRunningTime="2025-10-01 15:18:56.946543793 +0000 UTC m=+846.093386959" Oct 01 15:18:57 crc kubenswrapper[4869]: I1001 15:18:57.573316 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-db-create-q7p4t"] Oct 01 15:18:57 crc kubenswrapper[4869]: E1001 15:18:57.574077 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dbaaa5f3-5e1c-48c3-a7fb-fe392bf03ddf" containerName="init" Oct 01 15:18:57 crc kubenswrapper[4869]: I1001 15:18:57.574103 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="dbaaa5f3-5e1c-48c3-a7fb-fe392bf03ddf" containerName="init" Oct 01 15:18:57 crc kubenswrapper[4869]: I1001 15:18:57.574458 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="dbaaa5f3-5e1c-48c3-a7fb-fe392bf03ddf" containerName="init" Oct 01 15:18:57 crc kubenswrapper[4869]: I1001 15:18:57.575935 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-create-q7p4t" Oct 01 15:18:57 crc kubenswrapper[4869]: I1001 15:18:57.596713 4869 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="dbaaa5f3-5e1c-48c3-a7fb-fe392bf03ddf" path="/var/lib/kubelet/pods/dbaaa5f3-5e1c-48c3-a7fb-fe392bf03ddf/volumes" Oct 01 15:18:57 crc kubenswrapper[4869]: I1001 15:18:57.597181 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-db-create-q7p4t"] Oct 01 15:18:57 crc kubenswrapper[4869]: I1001 15:18:57.704405 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2ln6l\" (UniqueName: \"kubernetes.io/projected/01bcad38-d3c2-42c6-8895-366eb4e48bb0-kube-api-access-2ln6l\") pod \"glance-db-create-q7p4t\" (UID: \"01bcad38-d3c2-42c6-8895-366eb4e48bb0\") " pod="openstack/glance-db-create-q7p4t" Oct 01 15:18:57 crc kubenswrapper[4869]: I1001 15:18:57.807504 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2ln6l\" (UniqueName: \"kubernetes.io/projected/01bcad38-d3c2-42c6-8895-366eb4e48bb0-kube-api-access-2ln6l\") pod \"glance-db-create-q7p4t\" (UID: \"01bcad38-d3c2-42c6-8895-366eb4e48bb0\") " pod="openstack/glance-db-create-q7p4t" Oct 01 15:18:57 crc kubenswrapper[4869]: I1001 15:18:57.835297 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2ln6l\" (UniqueName: \"kubernetes.io/projected/01bcad38-d3c2-42c6-8895-366eb4e48bb0-kube-api-access-2ln6l\") pod \"glance-db-create-q7p4t\" (UID: \"01bcad38-d3c2-42c6-8895-366eb4e48bb0\") " pod="openstack/glance-db-create-q7p4t" Oct 01 15:18:57 crc kubenswrapper[4869]: I1001 15:18:57.908115 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-create-q7p4t" Oct 01 15:18:58 crc kubenswrapper[4869]: I1001 15:18:58.369625 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-db-create-q7p4t"] Oct 01 15:18:58 crc kubenswrapper[4869]: W1001 15:18:58.386949 4869 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod01bcad38_d3c2_42c6_8895_366eb4e48bb0.slice/crio-030328938a9586b47e35073969d5701bdda2c2676d012ceb76297febd3f263b5 WatchSource:0}: Error finding container 030328938a9586b47e35073969d5701bdda2c2676d012ceb76297febd3f263b5: Status 404 returned error can't find the container with id 030328938a9586b47e35073969d5701bdda2c2676d012ceb76297febd3f263b5 Oct 01 15:18:58 crc kubenswrapper[4869]: I1001 15:18:58.941779 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-create-q7p4t" event={"ID":"01bcad38-d3c2-42c6-8895-366eb4e48bb0","Type":"ContainerDied","Data":"f472c64f2e196a6b51cdbf2b4f8b68c5419d7c0d5bc40bf0acff0ce4a2910019"} Oct 01 15:18:58 crc kubenswrapper[4869]: I1001 15:18:58.942528 4869 generic.go:334] "Generic (PLEG): container finished" podID="01bcad38-d3c2-42c6-8895-366eb4e48bb0" containerID="f472c64f2e196a6b51cdbf2b4f8b68c5419d7c0d5bc40bf0acff0ce4a2910019" exitCode=0 Oct 01 15:18:58 crc kubenswrapper[4869]: I1001 15:18:58.942647 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-create-q7p4t" event={"ID":"01bcad38-d3c2-42c6-8895-366eb4e48bb0","Type":"ContainerStarted","Data":"030328938a9586b47e35073969d5701bdda2c2676d012ceb76297febd3f263b5"} Oct 01 15:19:00 crc kubenswrapper[4869]: I1001 15:19:00.346702 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-create-q7p4t" Oct 01 15:19:00 crc kubenswrapper[4869]: I1001 15:19:00.462586 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2ln6l\" (UniqueName: \"kubernetes.io/projected/01bcad38-d3c2-42c6-8895-366eb4e48bb0-kube-api-access-2ln6l\") pod \"01bcad38-d3c2-42c6-8895-366eb4e48bb0\" (UID: \"01bcad38-d3c2-42c6-8895-366eb4e48bb0\") " Oct 01 15:19:00 crc kubenswrapper[4869]: I1001 15:19:00.472357 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/01bcad38-d3c2-42c6-8895-366eb4e48bb0-kube-api-access-2ln6l" (OuterVolumeSpecName: "kube-api-access-2ln6l") pod "01bcad38-d3c2-42c6-8895-366eb4e48bb0" (UID: "01bcad38-d3c2-42c6-8895-366eb4e48bb0"). InnerVolumeSpecName "kube-api-access-2ln6l". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 15:19:00 crc kubenswrapper[4869]: I1001 15:19:00.566246 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2ln6l\" (UniqueName: \"kubernetes.io/projected/01bcad38-d3c2-42c6-8895-366eb4e48bb0-kube-api-access-2ln6l\") on node \"crc\" DevicePath \"\"" Oct 01 15:19:00 crc kubenswrapper[4869]: I1001 15:19:00.969908 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-create-q7p4t" event={"ID":"01bcad38-d3c2-42c6-8895-366eb4e48bb0","Type":"ContainerDied","Data":"030328938a9586b47e35073969d5701bdda2c2676d012ceb76297febd3f263b5"} Oct 01 15:19:00 crc kubenswrapper[4869]: I1001 15:19:00.969964 4869 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="030328938a9586b47e35073969d5701bdda2c2676d012ceb76297febd3f263b5" Oct 01 15:19:00 crc kubenswrapper[4869]: I1001 15:19:00.970025 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-create-q7p4t" Oct 01 15:19:01 crc kubenswrapper[4869]: I1001 15:19:01.893111 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-db-create-mx78s"] Oct 01 15:19:01 crc kubenswrapper[4869]: E1001 15:19:01.893649 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="01bcad38-d3c2-42c6-8895-366eb4e48bb0" containerName="mariadb-database-create" Oct 01 15:19:01 crc kubenswrapper[4869]: I1001 15:19:01.893673 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="01bcad38-d3c2-42c6-8895-366eb4e48bb0" containerName="mariadb-database-create" Oct 01 15:19:01 crc kubenswrapper[4869]: I1001 15:19:01.893952 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="01bcad38-d3c2-42c6-8895-366eb4e48bb0" containerName="mariadb-database-create" Oct 01 15:19:01 crc kubenswrapper[4869]: I1001 15:19:01.894789 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-create-mx78s" Oct 01 15:19:01 crc kubenswrapper[4869]: I1001 15:19:01.908896 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-db-create-mx78s"] Oct 01 15:19:02 crc kubenswrapper[4869]: I1001 15:19:02.093299 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-n7rpm\" (UniqueName: \"kubernetes.io/projected/746c5a71-9fcd-42a6-88f6-167edfe66fac-kube-api-access-n7rpm\") pod \"keystone-db-create-mx78s\" (UID: \"746c5a71-9fcd-42a6-88f6-167edfe66fac\") " pod="openstack/keystone-db-create-mx78s" Oct 01 15:19:02 crc kubenswrapper[4869]: I1001 15:19:02.146513 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/placement-db-create-769h5"] Oct 01 15:19:02 crc kubenswrapper[4869]: I1001 15:19:02.148624 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-create-769h5" Oct 01 15:19:02 crc kubenswrapper[4869]: I1001 15:19:02.154502 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-db-create-769h5"] Oct 01 15:19:02 crc kubenswrapper[4869]: I1001 15:19:02.194938 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2g954\" (UniqueName: \"kubernetes.io/projected/6d3a2079-2713-4de2-b3c9-3e3e49d581e0-kube-api-access-2g954\") pod \"placement-db-create-769h5\" (UID: \"6d3a2079-2713-4de2-b3c9-3e3e49d581e0\") " pod="openstack/placement-db-create-769h5" Oct 01 15:19:02 crc kubenswrapper[4869]: I1001 15:19:02.195004 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-n7rpm\" (UniqueName: \"kubernetes.io/projected/746c5a71-9fcd-42a6-88f6-167edfe66fac-kube-api-access-n7rpm\") pod \"keystone-db-create-mx78s\" (UID: \"746c5a71-9fcd-42a6-88f6-167edfe66fac\") " pod="openstack/keystone-db-create-mx78s" Oct 01 15:19:02 crc kubenswrapper[4869]: I1001 15:19:02.214057 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-n7rpm\" (UniqueName: \"kubernetes.io/projected/746c5a71-9fcd-42a6-88f6-167edfe66fac-kube-api-access-n7rpm\") pod \"keystone-db-create-mx78s\" (UID: \"746c5a71-9fcd-42a6-88f6-167edfe66fac\") " pod="openstack/keystone-db-create-mx78s" Oct 01 15:19:02 crc kubenswrapper[4869]: I1001 15:19:02.231120 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-create-mx78s" Oct 01 15:19:02 crc kubenswrapper[4869]: I1001 15:19:02.297477 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2g954\" (UniqueName: \"kubernetes.io/projected/6d3a2079-2713-4de2-b3c9-3e3e49d581e0-kube-api-access-2g954\") pod \"placement-db-create-769h5\" (UID: \"6d3a2079-2713-4de2-b3c9-3e3e49d581e0\") " pod="openstack/placement-db-create-769h5" Oct 01 15:19:02 crc kubenswrapper[4869]: I1001 15:19:02.339653 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2g954\" (UniqueName: \"kubernetes.io/projected/6d3a2079-2713-4de2-b3c9-3e3e49d581e0-kube-api-access-2g954\") pod \"placement-db-create-769h5\" (UID: \"6d3a2079-2713-4de2-b3c9-3e3e49d581e0\") " pod="openstack/placement-db-create-769h5" Oct 01 15:19:02 crc kubenswrapper[4869]: I1001 15:19:02.465485 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-db-create-mx78s"] Oct 01 15:19:02 crc kubenswrapper[4869]: I1001 15:19:02.467644 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-create-769h5" Oct 01 15:19:02 crc kubenswrapper[4869]: W1001 15:19:02.477997 4869 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod746c5a71_9fcd_42a6_88f6_167edfe66fac.slice/crio-b3ec7cc4b702b0c73048be046ac73937ab3773511ffea92284bc0133c4af09fb WatchSource:0}: Error finding container b3ec7cc4b702b0c73048be046ac73937ab3773511ffea92284bc0133c4af09fb: Status 404 returned error can't find the container with id b3ec7cc4b702b0c73048be046ac73937ab3773511ffea92284bc0133c4af09fb Oct 01 15:19:02 crc kubenswrapper[4869]: I1001 15:19:02.906967 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-db-create-769h5"] Oct 01 15:19:02 crc kubenswrapper[4869]: W1001 15:19:02.918004 4869 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod6d3a2079_2713_4de2_b3c9_3e3e49d581e0.slice/crio-34ccd1a1668f9066a44024caf4d962ad40bc96f90ac7cf430b4d4925672b654f WatchSource:0}: Error finding container 34ccd1a1668f9066a44024caf4d962ad40bc96f90ac7cf430b4d4925672b654f: Status 404 returned error can't find the container with id 34ccd1a1668f9066a44024caf4d962ad40bc96f90ac7cf430b4d4925672b654f Oct 01 15:19:02 crc kubenswrapper[4869]: I1001 15:19:02.988939 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-create-769h5" event={"ID":"6d3a2079-2713-4de2-b3c9-3e3e49d581e0","Type":"ContainerStarted","Data":"34ccd1a1668f9066a44024caf4d962ad40bc96f90ac7cf430b4d4925672b654f"} Oct 01 15:19:02 crc kubenswrapper[4869]: I1001 15:19:02.991142 4869 generic.go:334] "Generic (PLEG): container finished" podID="746c5a71-9fcd-42a6-88f6-167edfe66fac" containerID="6a9da1c45c82ec75a07d9cac750447de2ee6d6edb616ac97f47672935c430d42" exitCode=0 Oct 01 15:19:02 crc kubenswrapper[4869]: I1001 15:19:02.991188 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-create-mx78s" event={"ID":"746c5a71-9fcd-42a6-88f6-167edfe66fac","Type":"ContainerDied","Data":"6a9da1c45c82ec75a07d9cac750447de2ee6d6edb616ac97f47672935c430d42"} Oct 01 15:19:02 crc kubenswrapper[4869]: I1001 15:19:02.991214 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-create-mx78s" event={"ID":"746c5a71-9fcd-42a6-88f6-167edfe66fac","Type":"ContainerStarted","Data":"b3ec7cc4b702b0c73048be046ac73937ab3773511ffea92284bc0133c4af09fb"} Oct 01 15:19:03 crc kubenswrapper[4869]: I1001 15:19:03.895533 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-86ddb7fb65-5th7v" Oct 01 15:19:03 crc kubenswrapper[4869]: I1001 15:19:03.995954 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-77795d58f5-px8hm"] Oct 01 15:19:03 crc kubenswrapper[4869]: I1001 15:19:03.996191 4869 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-77795d58f5-px8hm" podUID="13f951e9-b94f-41aa-9547-59e5a0eff174" containerName="dnsmasq-dns" containerID="cri-o://7b47e75163355a6bece22860cef0d589bce3ff7b50e7904014e7ddfe621e6a94" gracePeriod=10 Oct 01 15:19:04 crc kubenswrapper[4869]: I1001 15:19:04.008457 4869 generic.go:334] "Generic (PLEG): container finished" podID="6d3a2079-2713-4de2-b3c9-3e3e49d581e0" containerID="3c4caf8ee2cb4970363eb8a019c329bf7d020fc01e7cdd17bb25f60f8c5d9f3a" exitCode=0 Oct 01 15:19:04 crc kubenswrapper[4869]: I1001 15:19:04.008843 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-create-769h5" event={"ID":"6d3a2079-2713-4de2-b3c9-3e3e49d581e0","Type":"ContainerDied","Data":"3c4caf8ee2cb4970363eb8a019c329bf7d020fc01e7cdd17bb25f60f8c5d9f3a"} Oct 01 15:19:04 crc kubenswrapper[4869]: I1001 15:19:04.509994 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-create-mx78s" Oct 01 15:19:04 crc kubenswrapper[4869]: I1001 15:19:04.553965 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-77795d58f5-px8hm" Oct 01 15:19:04 crc kubenswrapper[4869]: I1001 15:19:04.563425 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-n7rpm\" (UniqueName: \"kubernetes.io/projected/746c5a71-9fcd-42a6-88f6-167edfe66fac-kube-api-access-n7rpm\") pod \"746c5a71-9fcd-42a6-88f6-167edfe66fac\" (UID: \"746c5a71-9fcd-42a6-88f6-167edfe66fac\") " Oct 01 15:19:04 crc kubenswrapper[4869]: I1001 15:19:04.571915 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/746c5a71-9fcd-42a6-88f6-167edfe66fac-kube-api-access-n7rpm" (OuterVolumeSpecName: "kube-api-access-n7rpm") pod "746c5a71-9fcd-42a6-88f6-167edfe66fac" (UID: "746c5a71-9fcd-42a6-88f6-167edfe66fac"). InnerVolumeSpecName "kube-api-access-n7rpm". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 15:19:04 crc kubenswrapper[4869]: I1001 15:19:04.665272 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/13f951e9-b94f-41aa-9547-59e5a0eff174-config\") pod \"13f951e9-b94f-41aa-9547-59e5a0eff174\" (UID: \"13f951e9-b94f-41aa-9547-59e5a0eff174\") " Oct 01 15:19:04 crc kubenswrapper[4869]: I1001 15:19:04.665357 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8lv5n\" (UniqueName: \"kubernetes.io/projected/13f951e9-b94f-41aa-9547-59e5a0eff174-kube-api-access-8lv5n\") pod \"13f951e9-b94f-41aa-9547-59e5a0eff174\" (UID: \"13f951e9-b94f-41aa-9547-59e5a0eff174\") " Oct 01 15:19:04 crc kubenswrapper[4869]: I1001 15:19:04.665399 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/13f951e9-b94f-41aa-9547-59e5a0eff174-dns-svc\") pod \"13f951e9-b94f-41aa-9547-59e5a0eff174\" (UID: \"13f951e9-b94f-41aa-9547-59e5a0eff174\") " Oct 01 15:19:04 crc kubenswrapper[4869]: I1001 15:19:04.665730 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-n7rpm\" (UniqueName: \"kubernetes.io/projected/746c5a71-9fcd-42a6-88f6-167edfe66fac-kube-api-access-n7rpm\") on node \"crc\" DevicePath \"\"" Oct 01 15:19:04 crc kubenswrapper[4869]: I1001 15:19:04.669494 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/13f951e9-b94f-41aa-9547-59e5a0eff174-kube-api-access-8lv5n" (OuterVolumeSpecName: "kube-api-access-8lv5n") pod "13f951e9-b94f-41aa-9547-59e5a0eff174" (UID: "13f951e9-b94f-41aa-9547-59e5a0eff174"). InnerVolumeSpecName "kube-api-access-8lv5n". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 15:19:04 crc kubenswrapper[4869]: I1001 15:19:04.705301 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/13f951e9-b94f-41aa-9547-59e5a0eff174-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "13f951e9-b94f-41aa-9547-59e5a0eff174" (UID: "13f951e9-b94f-41aa-9547-59e5a0eff174"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 15:19:04 crc kubenswrapper[4869]: I1001 15:19:04.709845 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/13f951e9-b94f-41aa-9547-59e5a0eff174-config" (OuterVolumeSpecName: "config") pod "13f951e9-b94f-41aa-9547-59e5a0eff174" (UID: "13f951e9-b94f-41aa-9547-59e5a0eff174"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 15:19:04 crc kubenswrapper[4869]: I1001 15:19:04.767040 4869 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/13f951e9-b94f-41aa-9547-59e5a0eff174-config\") on node \"crc\" DevicePath \"\"" Oct 01 15:19:04 crc kubenswrapper[4869]: I1001 15:19:04.767078 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8lv5n\" (UniqueName: \"kubernetes.io/projected/13f951e9-b94f-41aa-9547-59e5a0eff174-kube-api-access-8lv5n\") on node \"crc\" DevicePath \"\"" Oct 01 15:19:04 crc kubenswrapper[4869]: I1001 15:19:04.767096 4869 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/13f951e9-b94f-41aa-9547-59e5a0eff174-dns-svc\") on node \"crc\" DevicePath \"\"" Oct 01 15:19:05 crc kubenswrapper[4869]: I1001 15:19:05.016996 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-create-mx78s" event={"ID":"746c5a71-9fcd-42a6-88f6-167edfe66fac","Type":"ContainerDied","Data":"b3ec7cc4b702b0c73048be046ac73937ab3773511ffea92284bc0133c4af09fb"} Oct 01 15:19:05 crc kubenswrapper[4869]: I1001 15:19:05.017046 4869 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="b3ec7cc4b702b0c73048be046ac73937ab3773511ffea92284bc0133c4af09fb" Oct 01 15:19:05 crc kubenswrapper[4869]: I1001 15:19:05.017066 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-create-mx78s" Oct 01 15:19:05 crc kubenswrapper[4869]: I1001 15:19:05.019245 4869 generic.go:334] "Generic (PLEG): container finished" podID="13f951e9-b94f-41aa-9547-59e5a0eff174" containerID="7b47e75163355a6bece22860cef0d589bce3ff7b50e7904014e7ddfe621e6a94" exitCode=0 Oct 01 15:19:05 crc kubenswrapper[4869]: I1001 15:19:05.019300 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-77795d58f5-px8hm" Oct 01 15:19:05 crc kubenswrapper[4869]: I1001 15:19:05.019332 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-77795d58f5-px8hm" event={"ID":"13f951e9-b94f-41aa-9547-59e5a0eff174","Type":"ContainerDied","Data":"7b47e75163355a6bece22860cef0d589bce3ff7b50e7904014e7ddfe621e6a94"} Oct 01 15:19:05 crc kubenswrapper[4869]: I1001 15:19:05.019362 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-77795d58f5-px8hm" event={"ID":"13f951e9-b94f-41aa-9547-59e5a0eff174","Type":"ContainerDied","Data":"fb318105a718901c7fbd06083f97e4c3caf7821003df838e9fe13b3c2829ccae"} Oct 01 15:19:05 crc kubenswrapper[4869]: I1001 15:19:05.019381 4869 scope.go:117] "RemoveContainer" containerID="7b47e75163355a6bece22860cef0d589bce3ff7b50e7904014e7ddfe621e6a94" Oct 01 15:19:05 crc kubenswrapper[4869]: I1001 15:19:05.050436 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-77795d58f5-px8hm"] Oct 01 15:19:05 crc kubenswrapper[4869]: I1001 15:19:05.052484 4869 scope.go:117] "RemoveContainer" containerID="d21f5097758972bcc4d3cd7d2c517908e5b9fccfe8a3fd2e5fa7f5b83820dd32" Oct 01 15:19:05 crc kubenswrapper[4869]: I1001 15:19:05.058771 4869 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-77795d58f5-px8hm"] Oct 01 15:19:05 crc kubenswrapper[4869]: I1001 15:19:05.091614 4869 scope.go:117] "RemoveContainer" containerID="7b47e75163355a6bece22860cef0d589bce3ff7b50e7904014e7ddfe621e6a94" Oct 01 15:19:05 crc kubenswrapper[4869]: E1001 15:19:05.092224 4869 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7b47e75163355a6bece22860cef0d589bce3ff7b50e7904014e7ddfe621e6a94\": container with ID starting with 7b47e75163355a6bece22860cef0d589bce3ff7b50e7904014e7ddfe621e6a94 not found: ID does not exist" containerID="7b47e75163355a6bece22860cef0d589bce3ff7b50e7904014e7ddfe621e6a94" Oct 01 15:19:05 crc kubenswrapper[4869]: I1001 15:19:05.092279 4869 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7b47e75163355a6bece22860cef0d589bce3ff7b50e7904014e7ddfe621e6a94"} err="failed to get container status \"7b47e75163355a6bece22860cef0d589bce3ff7b50e7904014e7ddfe621e6a94\": rpc error: code = NotFound desc = could not find container \"7b47e75163355a6bece22860cef0d589bce3ff7b50e7904014e7ddfe621e6a94\": container with ID starting with 7b47e75163355a6bece22860cef0d589bce3ff7b50e7904014e7ddfe621e6a94 not found: ID does not exist" Oct 01 15:19:05 crc kubenswrapper[4869]: I1001 15:19:05.092306 4869 scope.go:117] "RemoveContainer" containerID="d21f5097758972bcc4d3cd7d2c517908e5b9fccfe8a3fd2e5fa7f5b83820dd32" Oct 01 15:19:05 crc kubenswrapper[4869]: E1001 15:19:05.092899 4869 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d21f5097758972bcc4d3cd7d2c517908e5b9fccfe8a3fd2e5fa7f5b83820dd32\": container with ID starting with d21f5097758972bcc4d3cd7d2c517908e5b9fccfe8a3fd2e5fa7f5b83820dd32 not found: ID does not exist" containerID="d21f5097758972bcc4d3cd7d2c517908e5b9fccfe8a3fd2e5fa7f5b83820dd32" Oct 01 15:19:05 crc kubenswrapper[4869]: I1001 15:19:05.092939 4869 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d21f5097758972bcc4d3cd7d2c517908e5b9fccfe8a3fd2e5fa7f5b83820dd32"} err="failed to get container status \"d21f5097758972bcc4d3cd7d2c517908e5b9fccfe8a3fd2e5fa7f5b83820dd32\": rpc error: code = NotFound desc = could not find container \"d21f5097758972bcc4d3cd7d2c517908e5b9fccfe8a3fd2e5fa7f5b83820dd32\": container with ID starting with d21f5097758972bcc4d3cd7d2c517908e5b9fccfe8a3fd2e5fa7f5b83820dd32 not found: ID does not exist" Oct 01 15:19:05 crc kubenswrapper[4869]: I1001 15:19:05.253402 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-create-769h5" Oct 01 15:19:05 crc kubenswrapper[4869]: I1001 15:19:05.273750 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2g954\" (UniqueName: \"kubernetes.io/projected/6d3a2079-2713-4de2-b3c9-3e3e49d581e0-kube-api-access-2g954\") pod \"6d3a2079-2713-4de2-b3c9-3e3e49d581e0\" (UID: \"6d3a2079-2713-4de2-b3c9-3e3e49d581e0\") " Oct 01 15:19:05 crc kubenswrapper[4869]: I1001 15:19:05.277658 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6d3a2079-2713-4de2-b3c9-3e3e49d581e0-kube-api-access-2g954" (OuterVolumeSpecName: "kube-api-access-2g954") pod "6d3a2079-2713-4de2-b3c9-3e3e49d581e0" (UID: "6d3a2079-2713-4de2-b3c9-3e3e49d581e0"). InnerVolumeSpecName "kube-api-access-2g954". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 15:19:05 crc kubenswrapper[4869]: I1001 15:19:05.375167 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2g954\" (UniqueName: \"kubernetes.io/projected/6d3a2079-2713-4de2-b3c9-3e3e49d581e0-kube-api-access-2g954\") on node \"crc\" DevicePath \"\"" Oct 01 15:19:05 crc kubenswrapper[4869]: I1001 15:19:05.594801 4869 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="13f951e9-b94f-41aa-9547-59e5a0eff174" path="/var/lib/kubelet/pods/13f951e9-b94f-41aa-9547-59e5a0eff174/volumes" Oct 01 15:19:06 crc kubenswrapper[4869]: I1001 15:19:06.034616 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-create-769h5" event={"ID":"6d3a2079-2713-4de2-b3c9-3e3e49d581e0","Type":"ContainerDied","Data":"34ccd1a1668f9066a44024caf4d962ad40bc96f90ac7cf430b4d4925672b654f"} Oct 01 15:19:06 crc kubenswrapper[4869]: I1001 15:19:06.034669 4869 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="34ccd1a1668f9066a44024caf4d962ad40bc96f90ac7cf430b4d4925672b654f" Oct 01 15:19:06 crc kubenswrapper[4869]: I1001 15:19:06.034757 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-create-769h5" Oct 01 15:19:07 crc kubenswrapper[4869]: I1001 15:19:07.630310 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-f8bf-account-create-lvqd8"] Oct 01 15:19:07 crc kubenswrapper[4869]: E1001 15:19:07.631192 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6d3a2079-2713-4de2-b3c9-3e3e49d581e0" containerName="mariadb-database-create" Oct 01 15:19:07 crc kubenswrapper[4869]: I1001 15:19:07.631208 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="6d3a2079-2713-4de2-b3c9-3e3e49d581e0" containerName="mariadb-database-create" Oct 01 15:19:07 crc kubenswrapper[4869]: E1001 15:19:07.631225 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="13f951e9-b94f-41aa-9547-59e5a0eff174" containerName="dnsmasq-dns" Oct 01 15:19:07 crc kubenswrapper[4869]: I1001 15:19:07.631233 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="13f951e9-b94f-41aa-9547-59e5a0eff174" containerName="dnsmasq-dns" Oct 01 15:19:07 crc kubenswrapper[4869]: E1001 15:19:07.631247 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="746c5a71-9fcd-42a6-88f6-167edfe66fac" containerName="mariadb-database-create" Oct 01 15:19:07 crc kubenswrapper[4869]: I1001 15:19:07.631275 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="746c5a71-9fcd-42a6-88f6-167edfe66fac" containerName="mariadb-database-create" Oct 01 15:19:07 crc kubenswrapper[4869]: E1001 15:19:07.631303 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="13f951e9-b94f-41aa-9547-59e5a0eff174" containerName="init" Oct 01 15:19:07 crc kubenswrapper[4869]: I1001 15:19:07.631312 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="13f951e9-b94f-41aa-9547-59e5a0eff174" containerName="init" Oct 01 15:19:07 crc kubenswrapper[4869]: I1001 15:19:07.631511 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="6d3a2079-2713-4de2-b3c9-3e3e49d581e0" containerName="mariadb-database-create" Oct 01 15:19:07 crc kubenswrapper[4869]: I1001 15:19:07.631542 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="746c5a71-9fcd-42a6-88f6-167edfe66fac" containerName="mariadb-database-create" Oct 01 15:19:07 crc kubenswrapper[4869]: I1001 15:19:07.631553 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="13f951e9-b94f-41aa-9547-59e5a0eff174" containerName="dnsmasq-dns" Oct 01 15:19:07 crc kubenswrapper[4869]: I1001 15:19:07.632112 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-f8bf-account-create-lvqd8" Oct 01 15:19:07 crc kubenswrapper[4869]: I1001 15:19:07.634556 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-db-secret" Oct 01 15:19:07 crc kubenswrapper[4869]: I1001 15:19:07.644657 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-f8bf-account-create-lvqd8"] Oct 01 15:19:07 crc kubenswrapper[4869]: I1001 15:19:07.731686 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-77whd\" (UniqueName: \"kubernetes.io/projected/4287832f-bf35-45ec-a6fa-56e07bfca8f7-kube-api-access-77whd\") pod \"glance-f8bf-account-create-lvqd8\" (UID: \"4287832f-bf35-45ec-a6fa-56e07bfca8f7\") " pod="openstack/glance-f8bf-account-create-lvqd8" Oct 01 15:19:07 crc kubenswrapper[4869]: I1001 15:19:07.832913 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-77whd\" (UniqueName: \"kubernetes.io/projected/4287832f-bf35-45ec-a6fa-56e07bfca8f7-kube-api-access-77whd\") pod \"glance-f8bf-account-create-lvqd8\" (UID: \"4287832f-bf35-45ec-a6fa-56e07bfca8f7\") " pod="openstack/glance-f8bf-account-create-lvqd8" Oct 01 15:19:07 crc kubenswrapper[4869]: I1001 15:19:07.857367 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-77whd\" (UniqueName: \"kubernetes.io/projected/4287832f-bf35-45ec-a6fa-56e07bfca8f7-kube-api-access-77whd\") pod \"glance-f8bf-account-create-lvqd8\" (UID: \"4287832f-bf35-45ec-a6fa-56e07bfca8f7\") " pod="openstack/glance-f8bf-account-create-lvqd8" Oct 01 15:19:07 crc kubenswrapper[4869]: I1001 15:19:07.958572 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-f8bf-account-create-lvqd8" Oct 01 15:19:08 crc kubenswrapper[4869]: I1001 15:19:08.068844 4869 generic.go:334] "Generic (PLEG): container finished" podID="25c1e81e-fa0e-4ec6-b29c-bda2529fde66" containerID="676eea2624cce6e1eb68bf2c8575b8ec869fb8ee8184f7395a0ca6650eae7267" exitCode=0 Oct 01 15:19:08 crc kubenswrapper[4869]: I1001 15:19:08.068960 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"25c1e81e-fa0e-4ec6-b29c-bda2529fde66","Type":"ContainerDied","Data":"676eea2624cce6e1eb68bf2c8575b8ec869fb8ee8184f7395a0ca6650eae7267"} Oct 01 15:19:08 crc kubenswrapper[4869]: I1001 15:19:08.080537 4869 generic.go:334] "Generic (PLEG): container finished" podID="004ab312-4718-4cf2-80df-5a2b1eccc301" containerID="d20e4a80d05da9996c50b83728ad8a401a62da2d044eb3737994a92551baa863" exitCode=0 Oct 01 15:19:08 crc kubenswrapper[4869]: I1001 15:19:08.080631 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"004ab312-4718-4cf2-80df-5a2b1eccc301","Type":"ContainerDied","Data":"d20e4a80d05da9996c50b83728ad8a401a62da2d044eb3737994a92551baa863"} Oct 01 15:19:08 crc kubenswrapper[4869]: W1001 15:19:08.418953 4869 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod4287832f_bf35_45ec_a6fa_56e07bfca8f7.slice/crio-22907f4e9bb492164f3a3fafb77dd7b2b268da4a59f5b3b4cfb899cd8e2a40e4 WatchSource:0}: Error finding container 22907f4e9bb492164f3a3fafb77dd7b2b268da4a59f5b3b4cfb899cd8e2a40e4: Status 404 returned error can't find the container with id 22907f4e9bb492164f3a3fafb77dd7b2b268da4a59f5b3b4cfb899cd8e2a40e4 Oct 01 15:19:08 crc kubenswrapper[4869]: I1001 15:19:08.419159 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-f8bf-account-create-lvqd8"] Oct 01 15:19:09 crc kubenswrapper[4869]: I1001 15:19:09.003173 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovn-northd-0" Oct 01 15:19:09 crc kubenswrapper[4869]: I1001 15:19:09.089150 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"004ab312-4718-4cf2-80df-5a2b1eccc301","Type":"ContainerStarted","Data":"8bb7c698ac78361fbdd84081d11aa849720c8828f54f87f1420ea74cb36a5faf"} Oct 01 15:19:09 crc kubenswrapper[4869]: I1001 15:19:09.090313 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/rabbitmq-cell1-server-0" Oct 01 15:19:09 crc kubenswrapper[4869]: I1001 15:19:09.092084 4869 generic.go:334] "Generic (PLEG): container finished" podID="4287832f-bf35-45ec-a6fa-56e07bfca8f7" containerID="94257e09992c96c437ff35162ebf79331cede5e490be5d690b8c269edc20fb68" exitCode=0 Oct 01 15:19:09 crc kubenswrapper[4869]: I1001 15:19:09.092172 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-f8bf-account-create-lvqd8" event={"ID":"4287832f-bf35-45ec-a6fa-56e07bfca8f7","Type":"ContainerDied","Data":"94257e09992c96c437ff35162ebf79331cede5e490be5d690b8c269edc20fb68"} Oct 01 15:19:09 crc kubenswrapper[4869]: I1001 15:19:09.092208 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-f8bf-account-create-lvqd8" event={"ID":"4287832f-bf35-45ec-a6fa-56e07bfca8f7","Type":"ContainerStarted","Data":"22907f4e9bb492164f3a3fafb77dd7b2b268da4a59f5b3b4cfb899cd8e2a40e4"} Oct 01 15:19:09 crc kubenswrapper[4869]: I1001 15:19:09.094717 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"25c1e81e-fa0e-4ec6-b29c-bda2529fde66","Type":"ContainerStarted","Data":"2cdb4a3848e9ce2788889d11169e2b89ffe9fd6474ba6f492157f5a0774267f1"} Oct 01 15:19:09 crc kubenswrapper[4869]: I1001 15:19:09.095570 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/rabbitmq-server-0" Oct 01 15:19:09 crc kubenswrapper[4869]: I1001 15:19:09.117081 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/rabbitmq-cell1-server-0" podStartSLOduration=38.328745901 podStartE2EDuration="52.117065691s" podCreationTimestamp="2025-10-01 15:18:17 +0000 UTC" firstStartedPulling="2025-10-01 15:18:20.023181162 +0000 UTC m=+809.170024278" lastFinishedPulling="2025-10-01 15:18:33.811500952 +0000 UTC m=+822.958344068" observedRunningTime="2025-10-01 15:19:09.114751223 +0000 UTC m=+858.261594329" watchObservedRunningTime="2025-10-01 15:19:09.117065691 +0000 UTC m=+858.263908807" Oct 01 15:19:09 crc kubenswrapper[4869]: I1001 15:19:09.134569 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/rabbitmq-server-0" podStartSLOduration=38.243551539 podStartE2EDuration="52.134552733s" podCreationTimestamp="2025-10-01 15:18:17 +0000 UTC" firstStartedPulling="2025-10-01 15:18:19.987136061 +0000 UTC m=+809.133979177" lastFinishedPulling="2025-10-01 15:18:33.878137265 +0000 UTC m=+823.024980371" observedRunningTime="2025-10-01 15:19:09.132674905 +0000 UTC m=+858.279518041" watchObservedRunningTime="2025-10-01 15:19:09.134552733 +0000 UTC m=+858.281395849" Oct 01 15:19:10 crc kubenswrapper[4869]: I1001 15:19:10.459802 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-f8bf-account-create-lvqd8" Oct 01 15:19:10 crc kubenswrapper[4869]: I1001 15:19:10.590516 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-77whd\" (UniqueName: \"kubernetes.io/projected/4287832f-bf35-45ec-a6fa-56e07bfca8f7-kube-api-access-77whd\") pod \"4287832f-bf35-45ec-a6fa-56e07bfca8f7\" (UID: \"4287832f-bf35-45ec-a6fa-56e07bfca8f7\") " Oct 01 15:19:10 crc kubenswrapper[4869]: I1001 15:19:10.596379 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4287832f-bf35-45ec-a6fa-56e07bfca8f7-kube-api-access-77whd" (OuterVolumeSpecName: "kube-api-access-77whd") pod "4287832f-bf35-45ec-a6fa-56e07bfca8f7" (UID: "4287832f-bf35-45ec-a6fa-56e07bfca8f7"). InnerVolumeSpecName "kube-api-access-77whd". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 15:19:10 crc kubenswrapper[4869]: I1001 15:19:10.692031 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-77whd\" (UniqueName: \"kubernetes.io/projected/4287832f-bf35-45ec-a6fa-56e07bfca8f7-kube-api-access-77whd\") on node \"crc\" DevicePath \"\"" Oct 01 15:19:11 crc kubenswrapper[4869]: I1001 15:19:11.117879 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-f8bf-account-create-lvqd8" event={"ID":"4287832f-bf35-45ec-a6fa-56e07bfca8f7","Type":"ContainerDied","Data":"22907f4e9bb492164f3a3fafb77dd7b2b268da4a59f5b3b4cfb899cd8e2a40e4"} Oct 01 15:19:11 crc kubenswrapper[4869]: I1001 15:19:11.118120 4869 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="22907f4e9bb492164f3a3fafb77dd7b2b268da4a59f5b3b4cfb899cd8e2a40e4" Oct 01 15:19:11 crc kubenswrapper[4869]: I1001 15:19:11.117953 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-f8bf-account-create-lvqd8" Oct 01 15:19:11 crc kubenswrapper[4869]: I1001 15:19:11.987704 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-9562-account-create-rx6kp"] Oct 01 15:19:11 crc kubenswrapper[4869]: E1001 15:19:11.988369 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4287832f-bf35-45ec-a6fa-56e07bfca8f7" containerName="mariadb-account-create" Oct 01 15:19:11 crc kubenswrapper[4869]: I1001 15:19:11.988399 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="4287832f-bf35-45ec-a6fa-56e07bfca8f7" containerName="mariadb-account-create" Oct 01 15:19:11 crc kubenswrapper[4869]: I1001 15:19:11.988700 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="4287832f-bf35-45ec-a6fa-56e07bfca8f7" containerName="mariadb-account-create" Oct 01 15:19:11 crc kubenswrapper[4869]: I1001 15:19:11.989806 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-9562-account-create-rx6kp" Oct 01 15:19:11 crc kubenswrapper[4869]: I1001 15:19:11.993752 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-db-secret" Oct 01 15:19:11 crc kubenswrapper[4869]: I1001 15:19:11.993993 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-9562-account-create-rx6kp"] Oct 01 15:19:12 crc kubenswrapper[4869]: I1001 15:19:12.115998 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-k6spc\" (UniqueName: \"kubernetes.io/projected/a2d8b42f-6c11-4f05-baa2-5252184c1e92-kube-api-access-k6spc\") pod \"keystone-9562-account-create-rx6kp\" (UID: \"a2d8b42f-6c11-4f05-baa2-5252184c1e92\") " pod="openstack/keystone-9562-account-create-rx6kp" Oct 01 15:19:12 crc kubenswrapper[4869]: I1001 15:19:12.217542 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-k6spc\" (UniqueName: \"kubernetes.io/projected/a2d8b42f-6c11-4f05-baa2-5252184c1e92-kube-api-access-k6spc\") pod \"keystone-9562-account-create-rx6kp\" (UID: \"a2d8b42f-6c11-4f05-baa2-5252184c1e92\") " pod="openstack/keystone-9562-account-create-rx6kp" Oct 01 15:19:12 crc kubenswrapper[4869]: I1001 15:19:12.237045 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-k6spc\" (UniqueName: \"kubernetes.io/projected/a2d8b42f-6c11-4f05-baa2-5252184c1e92-kube-api-access-k6spc\") pod \"keystone-9562-account-create-rx6kp\" (UID: \"a2d8b42f-6c11-4f05-baa2-5252184c1e92\") " pod="openstack/keystone-9562-account-create-rx6kp" Oct 01 15:19:12 crc kubenswrapper[4869]: I1001 15:19:12.309188 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-9562-account-create-rx6kp" Oct 01 15:19:12 crc kubenswrapper[4869]: I1001 15:19:12.358935 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/placement-b636-account-create-sbzps"] Oct 01 15:19:12 crc kubenswrapper[4869]: I1001 15:19:12.360377 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-b636-account-create-sbzps" Oct 01 15:19:12 crc kubenswrapper[4869]: I1001 15:19:12.363666 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-db-secret" Oct 01 15:19:12 crc kubenswrapper[4869]: I1001 15:19:12.366837 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-b636-account-create-sbzps"] Oct 01 15:19:12 crc kubenswrapper[4869]: I1001 15:19:12.522010 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kszqs\" (UniqueName: \"kubernetes.io/projected/f7670535-ca76-4d3e-8493-ca7341d7de3f-kube-api-access-kszqs\") pod \"placement-b636-account-create-sbzps\" (UID: \"f7670535-ca76-4d3e-8493-ca7341d7de3f\") " pod="openstack/placement-b636-account-create-sbzps" Oct 01 15:19:12 crc kubenswrapper[4869]: I1001 15:19:12.623854 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kszqs\" (UniqueName: \"kubernetes.io/projected/f7670535-ca76-4d3e-8493-ca7341d7de3f-kube-api-access-kszqs\") pod \"placement-b636-account-create-sbzps\" (UID: \"f7670535-ca76-4d3e-8493-ca7341d7de3f\") " pod="openstack/placement-b636-account-create-sbzps" Oct 01 15:19:12 crc kubenswrapper[4869]: I1001 15:19:12.640680 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kszqs\" (UniqueName: \"kubernetes.io/projected/f7670535-ca76-4d3e-8493-ca7341d7de3f-kube-api-access-kszqs\") pod \"placement-b636-account-create-sbzps\" (UID: \"f7670535-ca76-4d3e-8493-ca7341d7de3f\") " pod="openstack/placement-b636-account-create-sbzps" Oct 01 15:19:12 crc kubenswrapper[4869]: I1001 15:19:12.750670 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-b636-account-create-sbzps" Oct 01 15:19:12 crc kubenswrapper[4869]: I1001 15:19:12.765293 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-9562-account-create-rx6kp"] Oct 01 15:19:12 crc kubenswrapper[4869]: I1001 15:19:12.807768 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-db-sync-d6v2s"] Oct 01 15:19:12 crc kubenswrapper[4869]: I1001 15:19:12.808721 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-sync-d6v2s" Oct 01 15:19:12 crc kubenswrapper[4869]: I1001 15:19:12.810750 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-config-data" Oct 01 15:19:12 crc kubenswrapper[4869]: I1001 15:19:12.812849 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-glance-dockercfg-df8gc" Oct 01 15:19:12 crc kubenswrapper[4869]: I1001 15:19:12.822530 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-db-sync-d6v2s"] Oct 01 15:19:12 crc kubenswrapper[4869]: I1001 15:19:12.845716 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/16eefc1b-2890-4b09-8f2b-0febecad6523-config-data\") pod \"glance-db-sync-d6v2s\" (UID: \"16eefc1b-2890-4b09-8f2b-0febecad6523\") " pod="openstack/glance-db-sync-d6v2s" Oct 01 15:19:12 crc kubenswrapper[4869]: I1001 15:19:12.845778 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5qfhq\" (UniqueName: \"kubernetes.io/projected/16eefc1b-2890-4b09-8f2b-0febecad6523-kube-api-access-5qfhq\") pod \"glance-db-sync-d6v2s\" (UID: \"16eefc1b-2890-4b09-8f2b-0febecad6523\") " pod="openstack/glance-db-sync-d6v2s" Oct 01 15:19:12 crc kubenswrapper[4869]: I1001 15:19:12.845889 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/16eefc1b-2890-4b09-8f2b-0febecad6523-db-sync-config-data\") pod \"glance-db-sync-d6v2s\" (UID: \"16eefc1b-2890-4b09-8f2b-0febecad6523\") " pod="openstack/glance-db-sync-d6v2s" Oct 01 15:19:12 crc kubenswrapper[4869]: I1001 15:19:12.845909 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/16eefc1b-2890-4b09-8f2b-0febecad6523-combined-ca-bundle\") pod \"glance-db-sync-d6v2s\" (UID: \"16eefc1b-2890-4b09-8f2b-0febecad6523\") " pod="openstack/glance-db-sync-d6v2s" Oct 01 15:19:12 crc kubenswrapper[4869]: I1001 15:19:12.946837 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/16eefc1b-2890-4b09-8f2b-0febecad6523-db-sync-config-data\") pod \"glance-db-sync-d6v2s\" (UID: \"16eefc1b-2890-4b09-8f2b-0febecad6523\") " pod="openstack/glance-db-sync-d6v2s" Oct 01 15:19:12 crc kubenswrapper[4869]: I1001 15:19:12.947297 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/16eefc1b-2890-4b09-8f2b-0febecad6523-combined-ca-bundle\") pod \"glance-db-sync-d6v2s\" (UID: \"16eefc1b-2890-4b09-8f2b-0febecad6523\") " pod="openstack/glance-db-sync-d6v2s" Oct 01 15:19:12 crc kubenswrapper[4869]: I1001 15:19:12.947647 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5qfhq\" (UniqueName: \"kubernetes.io/projected/16eefc1b-2890-4b09-8f2b-0febecad6523-kube-api-access-5qfhq\") pod \"glance-db-sync-d6v2s\" (UID: \"16eefc1b-2890-4b09-8f2b-0febecad6523\") " pod="openstack/glance-db-sync-d6v2s" Oct 01 15:19:12 crc kubenswrapper[4869]: I1001 15:19:12.947674 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/16eefc1b-2890-4b09-8f2b-0febecad6523-config-data\") pod \"glance-db-sync-d6v2s\" (UID: \"16eefc1b-2890-4b09-8f2b-0febecad6523\") " pod="openstack/glance-db-sync-d6v2s" Oct 01 15:19:12 crc kubenswrapper[4869]: I1001 15:19:12.952196 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/16eefc1b-2890-4b09-8f2b-0febecad6523-combined-ca-bundle\") pod \"glance-db-sync-d6v2s\" (UID: \"16eefc1b-2890-4b09-8f2b-0febecad6523\") " pod="openstack/glance-db-sync-d6v2s" Oct 01 15:19:12 crc kubenswrapper[4869]: I1001 15:19:12.954641 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/16eefc1b-2890-4b09-8f2b-0febecad6523-db-sync-config-data\") pod \"glance-db-sync-d6v2s\" (UID: \"16eefc1b-2890-4b09-8f2b-0febecad6523\") " pod="openstack/glance-db-sync-d6v2s" Oct 01 15:19:12 crc kubenswrapper[4869]: I1001 15:19:12.954719 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/16eefc1b-2890-4b09-8f2b-0febecad6523-config-data\") pod \"glance-db-sync-d6v2s\" (UID: \"16eefc1b-2890-4b09-8f2b-0febecad6523\") " pod="openstack/glance-db-sync-d6v2s" Oct 01 15:19:12 crc kubenswrapper[4869]: I1001 15:19:12.965580 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5qfhq\" (UniqueName: \"kubernetes.io/projected/16eefc1b-2890-4b09-8f2b-0febecad6523-kube-api-access-5qfhq\") pod \"glance-db-sync-d6v2s\" (UID: \"16eefc1b-2890-4b09-8f2b-0febecad6523\") " pod="openstack/glance-db-sync-d6v2s" Oct 01 15:19:13 crc kubenswrapper[4869]: I1001 15:19:13.135829 4869 generic.go:334] "Generic (PLEG): container finished" podID="a2d8b42f-6c11-4f05-baa2-5252184c1e92" containerID="7d1d70ef4225de9cd48199f03b003bed499ac89b280193aef1af9b58e2b67964" exitCode=0 Oct 01 15:19:13 crc kubenswrapper[4869]: I1001 15:19:13.135881 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-9562-account-create-rx6kp" event={"ID":"a2d8b42f-6c11-4f05-baa2-5252184c1e92","Type":"ContainerDied","Data":"7d1d70ef4225de9cd48199f03b003bed499ac89b280193aef1af9b58e2b67964"} Oct 01 15:19:13 crc kubenswrapper[4869]: I1001 15:19:13.135912 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-9562-account-create-rx6kp" event={"ID":"a2d8b42f-6c11-4f05-baa2-5252184c1e92","Type":"ContainerStarted","Data":"b397ed5c62b346e0030bfbb54934894f56d681837e9ddd717732cc40461376fc"} Oct 01 15:19:13 crc kubenswrapper[4869]: I1001 15:19:13.150900 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-sync-d6v2s" Oct 01 15:19:13 crc kubenswrapper[4869]: I1001 15:19:13.225613 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-b636-account-create-sbzps"] Oct 01 15:19:13 crc kubenswrapper[4869]: I1001 15:19:13.329852 4869 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/ovn-controller-8xxqn" podUID="fce7eb65-0111-43b3-9265-700c584695fa" containerName="ovn-controller" probeResult="failure" output=< Oct 01 15:19:13 crc kubenswrapper[4869]: ERROR - ovn-controller connection status is 'not connected', expecting 'connected' status Oct 01 15:19:13 crc kubenswrapper[4869]: > Oct 01 15:19:13 crc kubenswrapper[4869]: I1001 15:19:13.347704 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovn-controller-ovs-jhxfd" Oct 01 15:19:13 crc kubenswrapper[4869]: I1001 15:19:13.347764 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovn-controller-ovs-jhxfd" Oct 01 15:19:13 crc kubenswrapper[4869]: I1001 15:19:13.354323 4869 patch_prober.go:28] interesting pod/machine-config-daemon-c86m8 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 01 15:19:13 crc kubenswrapper[4869]: I1001 15:19:13.354369 4869 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-c86m8" podUID="a4b64f7f-0b03-4f47-965b-9fde048b735c" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 01 15:19:13 crc kubenswrapper[4869]: I1001 15:19:13.542979 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-controller-8xxqn-config-v4f9s"] Oct 01 15:19:13 crc kubenswrapper[4869]: I1001 15:19:13.543996 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-8xxqn-config-v4f9s" Oct 01 15:19:13 crc kubenswrapper[4869]: I1001 15:19:13.546399 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovncontroller-extra-scripts" Oct 01 15:19:13 crc kubenswrapper[4869]: I1001 15:19:13.553544 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-8xxqn-config-v4f9s"] Oct 01 15:19:13 crc kubenswrapper[4869]: I1001 15:19:13.657585 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/9bff5dbb-dd36-4646-aeaf-9ece110273b3-var-log-ovn\") pod \"ovn-controller-8xxqn-config-v4f9s\" (UID: \"9bff5dbb-dd36-4646-aeaf-9ece110273b3\") " pod="openstack/ovn-controller-8xxqn-config-v4f9s" Oct 01 15:19:13 crc kubenswrapper[4869]: I1001 15:19:13.658112 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-594d5\" (UniqueName: \"kubernetes.io/projected/9bff5dbb-dd36-4646-aeaf-9ece110273b3-kube-api-access-594d5\") pod \"ovn-controller-8xxqn-config-v4f9s\" (UID: \"9bff5dbb-dd36-4646-aeaf-9ece110273b3\") " pod="openstack/ovn-controller-8xxqn-config-v4f9s" Oct 01 15:19:13 crc kubenswrapper[4869]: I1001 15:19:13.658157 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/9bff5dbb-dd36-4646-aeaf-9ece110273b3-var-run\") pod \"ovn-controller-8xxqn-config-v4f9s\" (UID: \"9bff5dbb-dd36-4646-aeaf-9ece110273b3\") " pod="openstack/ovn-controller-8xxqn-config-v4f9s" Oct 01 15:19:13 crc kubenswrapper[4869]: I1001 15:19:13.658176 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/9bff5dbb-dd36-4646-aeaf-9ece110273b3-scripts\") pod \"ovn-controller-8xxqn-config-v4f9s\" (UID: \"9bff5dbb-dd36-4646-aeaf-9ece110273b3\") " pod="openstack/ovn-controller-8xxqn-config-v4f9s" Oct 01 15:19:13 crc kubenswrapper[4869]: I1001 15:19:13.658225 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/9bff5dbb-dd36-4646-aeaf-9ece110273b3-var-run-ovn\") pod \"ovn-controller-8xxqn-config-v4f9s\" (UID: \"9bff5dbb-dd36-4646-aeaf-9ece110273b3\") " pod="openstack/ovn-controller-8xxqn-config-v4f9s" Oct 01 15:19:13 crc kubenswrapper[4869]: I1001 15:19:13.658281 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/9bff5dbb-dd36-4646-aeaf-9ece110273b3-additional-scripts\") pod \"ovn-controller-8xxqn-config-v4f9s\" (UID: \"9bff5dbb-dd36-4646-aeaf-9ece110273b3\") " pod="openstack/ovn-controller-8xxqn-config-v4f9s" Oct 01 15:19:13 crc kubenswrapper[4869]: I1001 15:19:13.756365 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-db-sync-d6v2s"] Oct 01 15:19:13 crc kubenswrapper[4869]: I1001 15:19:13.760747 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/9bff5dbb-dd36-4646-aeaf-9ece110273b3-var-log-ovn\") pod \"ovn-controller-8xxqn-config-v4f9s\" (UID: \"9bff5dbb-dd36-4646-aeaf-9ece110273b3\") " pod="openstack/ovn-controller-8xxqn-config-v4f9s" Oct 01 15:19:13 crc kubenswrapper[4869]: I1001 15:19:13.760953 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-594d5\" (UniqueName: \"kubernetes.io/projected/9bff5dbb-dd36-4646-aeaf-9ece110273b3-kube-api-access-594d5\") pod \"ovn-controller-8xxqn-config-v4f9s\" (UID: \"9bff5dbb-dd36-4646-aeaf-9ece110273b3\") " pod="openstack/ovn-controller-8xxqn-config-v4f9s" Oct 01 15:19:13 crc kubenswrapper[4869]: I1001 15:19:13.761015 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/9bff5dbb-dd36-4646-aeaf-9ece110273b3-var-run\") pod \"ovn-controller-8xxqn-config-v4f9s\" (UID: \"9bff5dbb-dd36-4646-aeaf-9ece110273b3\") " pod="openstack/ovn-controller-8xxqn-config-v4f9s" Oct 01 15:19:13 crc kubenswrapper[4869]: I1001 15:19:13.761038 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/9bff5dbb-dd36-4646-aeaf-9ece110273b3-scripts\") pod \"ovn-controller-8xxqn-config-v4f9s\" (UID: \"9bff5dbb-dd36-4646-aeaf-9ece110273b3\") " pod="openstack/ovn-controller-8xxqn-config-v4f9s" Oct 01 15:19:13 crc kubenswrapper[4869]: I1001 15:19:13.761146 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/9bff5dbb-dd36-4646-aeaf-9ece110273b3-var-run-ovn\") pod \"ovn-controller-8xxqn-config-v4f9s\" (UID: \"9bff5dbb-dd36-4646-aeaf-9ece110273b3\") " pod="openstack/ovn-controller-8xxqn-config-v4f9s" Oct 01 15:19:13 crc kubenswrapper[4869]: I1001 15:19:13.761178 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/9bff5dbb-dd36-4646-aeaf-9ece110273b3-additional-scripts\") pod \"ovn-controller-8xxqn-config-v4f9s\" (UID: \"9bff5dbb-dd36-4646-aeaf-9ece110273b3\") " pod="openstack/ovn-controller-8xxqn-config-v4f9s" Oct 01 15:19:13 crc kubenswrapper[4869]: I1001 15:19:13.762491 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/9bff5dbb-dd36-4646-aeaf-9ece110273b3-var-log-ovn\") pod \"ovn-controller-8xxqn-config-v4f9s\" (UID: \"9bff5dbb-dd36-4646-aeaf-9ece110273b3\") " pod="openstack/ovn-controller-8xxqn-config-v4f9s" Oct 01 15:19:13 crc kubenswrapper[4869]: I1001 15:19:13.764463 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/9bff5dbb-dd36-4646-aeaf-9ece110273b3-var-run\") pod \"ovn-controller-8xxqn-config-v4f9s\" (UID: \"9bff5dbb-dd36-4646-aeaf-9ece110273b3\") " pod="openstack/ovn-controller-8xxqn-config-v4f9s" Oct 01 15:19:13 crc kubenswrapper[4869]: I1001 15:19:13.766414 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/9bff5dbb-dd36-4646-aeaf-9ece110273b3-scripts\") pod \"ovn-controller-8xxqn-config-v4f9s\" (UID: \"9bff5dbb-dd36-4646-aeaf-9ece110273b3\") " pod="openstack/ovn-controller-8xxqn-config-v4f9s" Oct 01 15:19:13 crc kubenswrapper[4869]: I1001 15:19:13.766565 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/9bff5dbb-dd36-4646-aeaf-9ece110273b3-additional-scripts\") pod \"ovn-controller-8xxqn-config-v4f9s\" (UID: \"9bff5dbb-dd36-4646-aeaf-9ece110273b3\") " pod="openstack/ovn-controller-8xxqn-config-v4f9s" Oct 01 15:19:13 crc kubenswrapper[4869]: I1001 15:19:13.767528 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/9bff5dbb-dd36-4646-aeaf-9ece110273b3-var-run-ovn\") pod \"ovn-controller-8xxqn-config-v4f9s\" (UID: \"9bff5dbb-dd36-4646-aeaf-9ece110273b3\") " pod="openstack/ovn-controller-8xxqn-config-v4f9s" Oct 01 15:19:13 crc kubenswrapper[4869]: I1001 15:19:13.792786 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-594d5\" (UniqueName: \"kubernetes.io/projected/9bff5dbb-dd36-4646-aeaf-9ece110273b3-kube-api-access-594d5\") pod \"ovn-controller-8xxqn-config-v4f9s\" (UID: \"9bff5dbb-dd36-4646-aeaf-9ece110273b3\") " pod="openstack/ovn-controller-8xxqn-config-v4f9s" Oct 01 15:19:13 crc kubenswrapper[4869]: I1001 15:19:13.863947 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-8xxqn-config-v4f9s" Oct 01 15:19:14 crc kubenswrapper[4869]: I1001 15:19:14.144069 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-sync-d6v2s" event={"ID":"16eefc1b-2890-4b09-8f2b-0febecad6523","Type":"ContainerStarted","Data":"61be75e1f41e8c1f9d35095f918b133865a147be815917224cfdb06d66e0cf52"} Oct 01 15:19:14 crc kubenswrapper[4869]: I1001 15:19:14.149341 4869 generic.go:334] "Generic (PLEG): container finished" podID="f7670535-ca76-4d3e-8493-ca7341d7de3f" containerID="e7193361abf25bba8ae9e50ff67a61ea04304dfd7d20fb79ab37092b30828f4e" exitCode=0 Oct 01 15:19:14 crc kubenswrapper[4869]: I1001 15:19:14.149454 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-b636-account-create-sbzps" event={"ID":"f7670535-ca76-4d3e-8493-ca7341d7de3f","Type":"ContainerDied","Data":"e7193361abf25bba8ae9e50ff67a61ea04304dfd7d20fb79ab37092b30828f4e"} Oct 01 15:19:14 crc kubenswrapper[4869]: I1001 15:19:14.149503 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-b636-account-create-sbzps" event={"ID":"f7670535-ca76-4d3e-8493-ca7341d7de3f","Type":"ContainerStarted","Data":"0637429a1331a2f056576d43ca0202f90bb431fe121b2119a33c107d67eeed36"} Oct 01 15:19:14 crc kubenswrapper[4869]: I1001 15:19:14.318782 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-8xxqn-config-v4f9s"] Oct 01 15:19:14 crc kubenswrapper[4869]: I1001 15:19:14.530045 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-9562-account-create-rx6kp" Oct 01 15:19:14 crc kubenswrapper[4869]: I1001 15:19:14.706774 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-k6spc\" (UniqueName: \"kubernetes.io/projected/a2d8b42f-6c11-4f05-baa2-5252184c1e92-kube-api-access-k6spc\") pod \"a2d8b42f-6c11-4f05-baa2-5252184c1e92\" (UID: \"a2d8b42f-6c11-4f05-baa2-5252184c1e92\") " Oct 01 15:19:14 crc kubenswrapper[4869]: I1001 15:19:14.715476 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a2d8b42f-6c11-4f05-baa2-5252184c1e92-kube-api-access-k6spc" (OuterVolumeSpecName: "kube-api-access-k6spc") pod "a2d8b42f-6c11-4f05-baa2-5252184c1e92" (UID: "a2d8b42f-6c11-4f05-baa2-5252184c1e92"). InnerVolumeSpecName "kube-api-access-k6spc". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 15:19:14 crc kubenswrapper[4869]: I1001 15:19:14.809105 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-k6spc\" (UniqueName: \"kubernetes.io/projected/a2d8b42f-6c11-4f05-baa2-5252184c1e92-kube-api-access-k6spc\") on node \"crc\" DevicePath \"\"" Oct 01 15:19:15 crc kubenswrapper[4869]: E1001 15:19:15.011944 4869 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod9bff5dbb_dd36_4646_aeaf_9ece110273b3.slice/crio-conmon-66bcce14f8021da5c860327093bf08a347d2f379c0a2209d540b5e338aeb57d9.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod9bff5dbb_dd36_4646_aeaf_9ece110273b3.slice/crio-66bcce14f8021da5c860327093bf08a347d2f379c0a2209d540b5e338aeb57d9.scope\": RecentStats: unable to find data in memory cache]" Oct 01 15:19:15 crc kubenswrapper[4869]: I1001 15:19:15.159390 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-9562-account-create-rx6kp" event={"ID":"a2d8b42f-6c11-4f05-baa2-5252184c1e92","Type":"ContainerDied","Data":"b397ed5c62b346e0030bfbb54934894f56d681837e9ddd717732cc40461376fc"} Oct 01 15:19:15 crc kubenswrapper[4869]: I1001 15:19:15.159467 4869 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="b397ed5c62b346e0030bfbb54934894f56d681837e9ddd717732cc40461376fc" Oct 01 15:19:15 crc kubenswrapper[4869]: I1001 15:19:15.159411 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-9562-account-create-rx6kp" Oct 01 15:19:15 crc kubenswrapper[4869]: I1001 15:19:15.161051 4869 generic.go:334] "Generic (PLEG): container finished" podID="9bff5dbb-dd36-4646-aeaf-9ece110273b3" containerID="66bcce14f8021da5c860327093bf08a347d2f379c0a2209d540b5e338aeb57d9" exitCode=0 Oct 01 15:19:15 crc kubenswrapper[4869]: I1001 15:19:15.161107 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-8xxqn-config-v4f9s" event={"ID":"9bff5dbb-dd36-4646-aeaf-9ece110273b3","Type":"ContainerDied","Data":"66bcce14f8021da5c860327093bf08a347d2f379c0a2209d540b5e338aeb57d9"} Oct 01 15:19:15 crc kubenswrapper[4869]: I1001 15:19:15.161160 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-8xxqn-config-v4f9s" event={"ID":"9bff5dbb-dd36-4646-aeaf-9ece110273b3","Type":"ContainerStarted","Data":"6cf57c31838deff37a66cff77457d13eede4b8694e942f21b50b97eb04f37bb9"} Oct 01 15:19:15 crc kubenswrapper[4869]: I1001 15:19:15.449880 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-b636-account-create-sbzps" Oct 01 15:19:15 crc kubenswrapper[4869]: I1001 15:19:15.624323 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kszqs\" (UniqueName: \"kubernetes.io/projected/f7670535-ca76-4d3e-8493-ca7341d7de3f-kube-api-access-kszqs\") pod \"f7670535-ca76-4d3e-8493-ca7341d7de3f\" (UID: \"f7670535-ca76-4d3e-8493-ca7341d7de3f\") " Oct 01 15:19:15 crc kubenswrapper[4869]: I1001 15:19:15.631535 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f7670535-ca76-4d3e-8493-ca7341d7de3f-kube-api-access-kszqs" (OuterVolumeSpecName: "kube-api-access-kszqs") pod "f7670535-ca76-4d3e-8493-ca7341d7de3f" (UID: "f7670535-ca76-4d3e-8493-ca7341d7de3f"). InnerVolumeSpecName "kube-api-access-kszqs". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 15:19:15 crc kubenswrapper[4869]: I1001 15:19:15.727894 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kszqs\" (UniqueName: \"kubernetes.io/projected/f7670535-ca76-4d3e-8493-ca7341d7de3f-kube-api-access-kszqs\") on node \"crc\" DevicePath \"\"" Oct 01 15:19:16 crc kubenswrapper[4869]: I1001 15:19:16.176293 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-b636-account-create-sbzps" event={"ID":"f7670535-ca76-4d3e-8493-ca7341d7de3f","Type":"ContainerDied","Data":"0637429a1331a2f056576d43ca0202f90bb431fe121b2119a33c107d67eeed36"} Oct 01 15:19:16 crc kubenswrapper[4869]: I1001 15:19:16.176359 4869 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="0637429a1331a2f056576d43ca0202f90bb431fe121b2119a33c107d67eeed36" Oct 01 15:19:16 crc kubenswrapper[4869]: I1001 15:19:16.176328 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-b636-account-create-sbzps" Oct 01 15:19:16 crc kubenswrapper[4869]: I1001 15:19:16.492653 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-8xxqn-config-v4f9s" Oct 01 15:19:16 crc kubenswrapper[4869]: I1001 15:19:16.640826 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/9bff5dbb-dd36-4646-aeaf-9ece110273b3-scripts\") pod \"9bff5dbb-dd36-4646-aeaf-9ece110273b3\" (UID: \"9bff5dbb-dd36-4646-aeaf-9ece110273b3\") " Oct 01 15:19:16 crc kubenswrapper[4869]: I1001 15:19:16.640915 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/9bff5dbb-dd36-4646-aeaf-9ece110273b3-additional-scripts\") pod \"9bff5dbb-dd36-4646-aeaf-9ece110273b3\" (UID: \"9bff5dbb-dd36-4646-aeaf-9ece110273b3\") " Oct 01 15:19:16 crc kubenswrapper[4869]: I1001 15:19:16.640938 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/9bff5dbb-dd36-4646-aeaf-9ece110273b3-var-log-ovn\") pod \"9bff5dbb-dd36-4646-aeaf-9ece110273b3\" (UID: \"9bff5dbb-dd36-4646-aeaf-9ece110273b3\") " Oct 01 15:19:16 crc kubenswrapper[4869]: I1001 15:19:16.640964 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/9bff5dbb-dd36-4646-aeaf-9ece110273b3-var-run\") pod \"9bff5dbb-dd36-4646-aeaf-9ece110273b3\" (UID: \"9bff5dbb-dd36-4646-aeaf-9ece110273b3\") " Oct 01 15:19:16 crc kubenswrapper[4869]: I1001 15:19:16.641081 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/9bff5dbb-dd36-4646-aeaf-9ece110273b3-var-run-ovn\") pod \"9bff5dbb-dd36-4646-aeaf-9ece110273b3\" (UID: \"9bff5dbb-dd36-4646-aeaf-9ece110273b3\") " Oct 01 15:19:16 crc kubenswrapper[4869]: I1001 15:19:16.641133 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-594d5\" (UniqueName: \"kubernetes.io/projected/9bff5dbb-dd36-4646-aeaf-9ece110273b3-kube-api-access-594d5\") pod \"9bff5dbb-dd36-4646-aeaf-9ece110273b3\" (UID: \"9bff5dbb-dd36-4646-aeaf-9ece110273b3\") " Oct 01 15:19:16 crc kubenswrapper[4869]: I1001 15:19:16.642110 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/9bff5dbb-dd36-4646-aeaf-9ece110273b3-var-log-ovn" (OuterVolumeSpecName: "var-log-ovn") pod "9bff5dbb-dd36-4646-aeaf-9ece110273b3" (UID: "9bff5dbb-dd36-4646-aeaf-9ece110273b3"). InnerVolumeSpecName "var-log-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 01 15:19:16 crc kubenswrapper[4869]: I1001 15:19:16.642166 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/9bff5dbb-dd36-4646-aeaf-9ece110273b3-var-run" (OuterVolumeSpecName: "var-run") pod "9bff5dbb-dd36-4646-aeaf-9ece110273b3" (UID: "9bff5dbb-dd36-4646-aeaf-9ece110273b3"). InnerVolumeSpecName "var-run". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 01 15:19:16 crc kubenswrapper[4869]: I1001 15:19:16.642233 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/9bff5dbb-dd36-4646-aeaf-9ece110273b3-var-run-ovn" (OuterVolumeSpecName: "var-run-ovn") pod "9bff5dbb-dd36-4646-aeaf-9ece110273b3" (UID: "9bff5dbb-dd36-4646-aeaf-9ece110273b3"). InnerVolumeSpecName "var-run-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 01 15:19:16 crc kubenswrapper[4869]: I1001 15:19:16.642669 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9bff5dbb-dd36-4646-aeaf-9ece110273b3-additional-scripts" (OuterVolumeSpecName: "additional-scripts") pod "9bff5dbb-dd36-4646-aeaf-9ece110273b3" (UID: "9bff5dbb-dd36-4646-aeaf-9ece110273b3"). InnerVolumeSpecName "additional-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 15:19:16 crc kubenswrapper[4869]: I1001 15:19:16.642820 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9bff5dbb-dd36-4646-aeaf-9ece110273b3-scripts" (OuterVolumeSpecName: "scripts") pod "9bff5dbb-dd36-4646-aeaf-9ece110273b3" (UID: "9bff5dbb-dd36-4646-aeaf-9ece110273b3"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 15:19:16 crc kubenswrapper[4869]: I1001 15:19:16.657400 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9bff5dbb-dd36-4646-aeaf-9ece110273b3-kube-api-access-594d5" (OuterVolumeSpecName: "kube-api-access-594d5") pod "9bff5dbb-dd36-4646-aeaf-9ece110273b3" (UID: "9bff5dbb-dd36-4646-aeaf-9ece110273b3"). InnerVolumeSpecName "kube-api-access-594d5". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 15:19:16 crc kubenswrapper[4869]: I1001 15:19:16.743208 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-594d5\" (UniqueName: \"kubernetes.io/projected/9bff5dbb-dd36-4646-aeaf-9ece110273b3-kube-api-access-594d5\") on node \"crc\" DevicePath \"\"" Oct 01 15:19:16 crc kubenswrapper[4869]: I1001 15:19:16.743247 4869 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/9bff5dbb-dd36-4646-aeaf-9ece110273b3-scripts\") on node \"crc\" DevicePath \"\"" Oct 01 15:19:16 crc kubenswrapper[4869]: I1001 15:19:16.743272 4869 reconciler_common.go:293] "Volume detached for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/9bff5dbb-dd36-4646-aeaf-9ece110273b3-additional-scripts\") on node \"crc\" DevicePath \"\"" Oct 01 15:19:16 crc kubenswrapper[4869]: I1001 15:19:16.743285 4869 reconciler_common.go:293] "Volume detached for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/9bff5dbb-dd36-4646-aeaf-9ece110273b3-var-log-ovn\") on node \"crc\" DevicePath \"\"" Oct 01 15:19:16 crc kubenswrapper[4869]: I1001 15:19:16.743297 4869 reconciler_common.go:293] "Volume detached for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/9bff5dbb-dd36-4646-aeaf-9ece110273b3-var-run\") on node \"crc\" DevicePath \"\"" Oct 01 15:19:16 crc kubenswrapper[4869]: I1001 15:19:16.743307 4869 reconciler_common.go:293] "Volume detached for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/9bff5dbb-dd36-4646-aeaf-9ece110273b3-var-run-ovn\") on node \"crc\" DevicePath \"\"" Oct 01 15:19:17 crc kubenswrapper[4869]: I1001 15:19:17.186435 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-8xxqn-config-v4f9s" event={"ID":"9bff5dbb-dd36-4646-aeaf-9ece110273b3","Type":"ContainerDied","Data":"6cf57c31838deff37a66cff77457d13eede4b8694e942f21b50b97eb04f37bb9"} Oct 01 15:19:17 crc kubenswrapper[4869]: I1001 15:19:17.186481 4869 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="6cf57c31838deff37a66cff77457d13eede4b8694e942f21b50b97eb04f37bb9" Oct 01 15:19:17 crc kubenswrapper[4869]: I1001 15:19:17.186545 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-8xxqn-config-v4f9s" Oct 01 15:19:17 crc kubenswrapper[4869]: I1001 15:19:17.600643 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ovn-controller-8xxqn-config-v4f9s"] Oct 01 15:19:17 crc kubenswrapper[4869]: I1001 15:19:17.608917 4869 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ovn-controller-8xxqn-config-v4f9s"] Oct 01 15:19:18 crc kubenswrapper[4869]: I1001 15:19:18.286453 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovn-controller-8xxqn" Oct 01 15:19:18 crc kubenswrapper[4869]: I1001 15:19:18.613426 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/rabbitmq-server-0" Oct 01 15:19:18 crc kubenswrapper[4869]: I1001 15:19:18.871220 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-db-create-v7rjt"] Oct 01 15:19:18 crc kubenswrapper[4869]: E1001 15:19:18.871528 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a2d8b42f-6c11-4f05-baa2-5252184c1e92" containerName="mariadb-account-create" Oct 01 15:19:18 crc kubenswrapper[4869]: I1001 15:19:18.871540 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="a2d8b42f-6c11-4f05-baa2-5252184c1e92" containerName="mariadb-account-create" Oct 01 15:19:18 crc kubenswrapper[4869]: E1001 15:19:18.871548 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9bff5dbb-dd36-4646-aeaf-9ece110273b3" containerName="ovn-config" Oct 01 15:19:18 crc kubenswrapper[4869]: I1001 15:19:18.871554 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="9bff5dbb-dd36-4646-aeaf-9ece110273b3" containerName="ovn-config" Oct 01 15:19:18 crc kubenswrapper[4869]: E1001 15:19:18.871572 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f7670535-ca76-4d3e-8493-ca7341d7de3f" containerName="mariadb-account-create" Oct 01 15:19:18 crc kubenswrapper[4869]: I1001 15:19:18.871579 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="f7670535-ca76-4d3e-8493-ca7341d7de3f" containerName="mariadb-account-create" Oct 01 15:19:18 crc kubenswrapper[4869]: I1001 15:19:18.871726 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="a2d8b42f-6c11-4f05-baa2-5252184c1e92" containerName="mariadb-account-create" Oct 01 15:19:18 crc kubenswrapper[4869]: I1001 15:19:18.871744 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="f7670535-ca76-4d3e-8493-ca7341d7de3f" containerName="mariadb-account-create" Oct 01 15:19:18 crc kubenswrapper[4869]: I1001 15:19:18.871754 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="9bff5dbb-dd36-4646-aeaf-9ece110273b3" containerName="ovn-config" Oct 01 15:19:18 crc kubenswrapper[4869]: I1001 15:19:18.872219 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-create-v7rjt" Oct 01 15:19:18 crc kubenswrapper[4869]: I1001 15:19:18.887456 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-db-create-v7rjt"] Oct 01 15:19:18 crc kubenswrapper[4869]: I1001 15:19:18.916498 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/rabbitmq-cell1-server-0" Oct 01 15:19:18 crc kubenswrapper[4869]: I1001 15:19:18.974952 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-db-create-k6f4h"] Oct 01 15:19:18 crc kubenswrapper[4869]: I1001 15:19:18.976156 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-create-k6f4h" Oct 01 15:19:18 crc kubenswrapper[4869]: I1001 15:19:18.982777 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-p72hl\" (UniqueName: \"kubernetes.io/projected/24526b59-4280-4dc6-9980-ea48d0143071-kube-api-access-p72hl\") pod \"cinder-db-create-v7rjt\" (UID: \"24526b59-4280-4dc6-9980-ea48d0143071\") " pod="openstack/cinder-db-create-v7rjt" Oct 01 15:19:18 crc kubenswrapper[4869]: I1001 15:19:18.988780 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-db-create-k6f4h"] Oct 01 15:19:19 crc kubenswrapper[4869]: I1001 15:19:19.084145 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-s2qjb\" (UniqueName: \"kubernetes.io/projected/52104f6a-5c9e-46c9-9197-4015634558a6-kube-api-access-s2qjb\") pod \"barbican-db-create-k6f4h\" (UID: \"52104f6a-5c9e-46c9-9197-4015634558a6\") " pod="openstack/barbican-db-create-k6f4h" Oct 01 15:19:19 crc kubenswrapper[4869]: I1001 15:19:19.084325 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-p72hl\" (UniqueName: \"kubernetes.io/projected/24526b59-4280-4dc6-9980-ea48d0143071-kube-api-access-p72hl\") pod \"cinder-db-create-v7rjt\" (UID: \"24526b59-4280-4dc6-9980-ea48d0143071\") " pod="openstack/cinder-db-create-v7rjt" Oct 01 15:19:19 crc kubenswrapper[4869]: I1001 15:19:19.102640 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-p72hl\" (UniqueName: \"kubernetes.io/projected/24526b59-4280-4dc6-9980-ea48d0143071-kube-api-access-p72hl\") pod \"cinder-db-create-v7rjt\" (UID: \"24526b59-4280-4dc6-9980-ea48d0143071\") " pod="openstack/cinder-db-create-v7rjt" Oct 01 15:19:19 crc kubenswrapper[4869]: I1001 15:19:19.185483 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2qjb\" (UniqueName: \"kubernetes.io/projected/52104f6a-5c9e-46c9-9197-4015634558a6-kube-api-access-s2qjb\") pod \"barbican-db-create-k6f4h\" (UID: \"52104f6a-5c9e-46c9-9197-4015634558a6\") " pod="openstack/barbican-db-create-k6f4h" Oct 01 15:19:19 crc kubenswrapper[4869]: I1001 15:19:19.191168 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-create-v7rjt" Oct 01 15:19:19 crc kubenswrapper[4869]: I1001 15:19:19.207335 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s2qjb\" (UniqueName: \"kubernetes.io/projected/52104f6a-5c9e-46c9-9197-4015634558a6-kube-api-access-s2qjb\") pod \"barbican-db-create-k6f4h\" (UID: \"52104f6a-5c9e-46c9-9197-4015634558a6\") " pod="openstack/barbican-db-create-k6f4h" Oct 01 15:19:19 crc kubenswrapper[4869]: I1001 15:19:19.214531 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-db-create-sdgm5"] Oct 01 15:19:19 crc kubenswrapper[4869]: I1001 15:19:19.215485 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-create-sdgm5" Oct 01 15:19:19 crc kubenswrapper[4869]: I1001 15:19:19.224739 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-db-create-sdgm5"] Oct 01 15:19:19 crc kubenswrapper[4869]: I1001 15:19:19.305781 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-create-k6f4h" Oct 01 15:19:19 crc kubenswrapper[4869]: I1001 15:19:19.332365 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-db-sync-j277p"] Oct 01 15:19:19 crc kubenswrapper[4869]: I1001 15:19:19.333284 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-sync-j277p" Oct 01 15:19:19 crc kubenswrapper[4869]: I1001 15:19:19.335642 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-keystone-dockercfg-vhhr5" Oct 01 15:19:19 crc kubenswrapper[4869]: I1001 15:19:19.335832 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone" Oct 01 15:19:19 crc kubenswrapper[4869]: I1001 15:19:19.335978 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-config-data" Oct 01 15:19:19 crc kubenswrapper[4869]: I1001 15:19:19.336113 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-scripts" Oct 01 15:19:19 crc kubenswrapper[4869]: I1001 15:19:19.348906 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-db-sync-j277p"] Oct 01 15:19:19 crc kubenswrapper[4869]: I1001 15:19:19.388541 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sm92n\" (UniqueName: \"kubernetes.io/projected/6ba3f80b-83bc-4a34-9c76-44fafb693520-kube-api-access-sm92n\") pod \"neutron-db-create-sdgm5\" (UID: \"6ba3f80b-83bc-4a34-9c76-44fafb693520\") " pod="openstack/neutron-db-create-sdgm5" Oct 01 15:19:19 crc kubenswrapper[4869]: I1001 15:19:19.489661 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fpcgj\" (UniqueName: \"kubernetes.io/projected/92372d1e-b67d-4615-9978-946caec18e59-kube-api-access-fpcgj\") pod \"keystone-db-sync-j277p\" (UID: \"92372d1e-b67d-4615-9978-946caec18e59\") " pod="openstack/keystone-db-sync-j277p" Oct 01 15:19:19 crc kubenswrapper[4869]: I1001 15:19:19.489727 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/92372d1e-b67d-4615-9978-946caec18e59-combined-ca-bundle\") pod \"keystone-db-sync-j277p\" (UID: \"92372d1e-b67d-4615-9978-946caec18e59\") " pod="openstack/keystone-db-sync-j277p" Oct 01 15:19:19 crc kubenswrapper[4869]: I1001 15:19:19.490110 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/92372d1e-b67d-4615-9978-946caec18e59-config-data\") pod \"keystone-db-sync-j277p\" (UID: \"92372d1e-b67d-4615-9978-946caec18e59\") " pod="openstack/keystone-db-sync-j277p" Oct 01 15:19:19 crc kubenswrapper[4869]: I1001 15:19:19.490198 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sm92n\" (UniqueName: \"kubernetes.io/projected/6ba3f80b-83bc-4a34-9c76-44fafb693520-kube-api-access-sm92n\") pod \"neutron-db-create-sdgm5\" (UID: \"6ba3f80b-83bc-4a34-9c76-44fafb693520\") " pod="openstack/neutron-db-create-sdgm5" Oct 01 15:19:19 crc kubenswrapper[4869]: I1001 15:19:19.527193 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sm92n\" (UniqueName: \"kubernetes.io/projected/6ba3f80b-83bc-4a34-9c76-44fafb693520-kube-api-access-sm92n\") pod \"neutron-db-create-sdgm5\" (UID: \"6ba3f80b-83bc-4a34-9c76-44fafb693520\") " pod="openstack/neutron-db-create-sdgm5" Oct 01 15:19:19 crc kubenswrapper[4869]: I1001 15:19:19.591347 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fpcgj\" (UniqueName: \"kubernetes.io/projected/92372d1e-b67d-4615-9978-946caec18e59-kube-api-access-fpcgj\") pod \"keystone-db-sync-j277p\" (UID: \"92372d1e-b67d-4615-9978-946caec18e59\") " pod="openstack/keystone-db-sync-j277p" Oct 01 15:19:19 crc kubenswrapper[4869]: I1001 15:19:19.591418 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/92372d1e-b67d-4615-9978-946caec18e59-combined-ca-bundle\") pod \"keystone-db-sync-j277p\" (UID: \"92372d1e-b67d-4615-9978-946caec18e59\") " pod="openstack/keystone-db-sync-j277p" Oct 01 15:19:19 crc kubenswrapper[4869]: I1001 15:19:19.591556 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/92372d1e-b67d-4615-9978-946caec18e59-config-data\") pod \"keystone-db-sync-j277p\" (UID: \"92372d1e-b67d-4615-9978-946caec18e59\") " pod="openstack/keystone-db-sync-j277p" Oct 01 15:19:19 crc kubenswrapper[4869]: I1001 15:19:19.596342 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/92372d1e-b67d-4615-9978-946caec18e59-combined-ca-bundle\") pod \"keystone-db-sync-j277p\" (UID: \"92372d1e-b67d-4615-9978-946caec18e59\") " pod="openstack/keystone-db-sync-j277p" Oct 01 15:19:19 crc kubenswrapper[4869]: I1001 15:19:19.596629 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/92372d1e-b67d-4615-9978-946caec18e59-config-data\") pod \"keystone-db-sync-j277p\" (UID: \"92372d1e-b67d-4615-9978-946caec18e59\") " pod="openstack/keystone-db-sync-j277p" Oct 01 15:19:19 crc kubenswrapper[4869]: I1001 15:19:19.607519 4869 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9bff5dbb-dd36-4646-aeaf-9ece110273b3" path="/var/lib/kubelet/pods/9bff5dbb-dd36-4646-aeaf-9ece110273b3/volumes" Oct 01 15:19:19 crc kubenswrapper[4869]: I1001 15:19:19.610920 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fpcgj\" (UniqueName: \"kubernetes.io/projected/92372d1e-b67d-4615-9978-946caec18e59-kube-api-access-fpcgj\") pod \"keystone-db-sync-j277p\" (UID: \"92372d1e-b67d-4615-9978-946caec18e59\") " pod="openstack/keystone-db-sync-j277p" Oct 01 15:19:19 crc kubenswrapper[4869]: I1001 15:19:19.632889 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-create-sdgm5" Oct 01 15:19:19 crc kubenswrapper[4869]: I1001 15:19:19.658027 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-sync-j277p" Oct 01 15:19:19 crc kubenswrapper[4869]: I1001 15:19:19.665765 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-db-create-v7rjt"] Oct 01 15:19:26 crc kubenswrapper[4869]: I1001 15:19:26.254378 4869 generic.go:334] "Generic (PLEG): container finished" podID="24526b59-4280-4dc6-9980-ea48d0143071" containerID="768b2a45e689a166340d2874079e94b55e0e86c9218ffa627b0f6a909ef9531c" exitCode=0 Oct 01 15:19:26 crc kubenswrapper[4869]: I1001 15:19:26.254578 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-create-v7rjt" event={"ID":"24526b59-4280-4dc6-9980-ea48d0143071","Type":"ContainerDied","Data":"768b2a45e689a166340d2874079e94b55e0e86c9218ffa627b0f6a909ef9531c"} Oct 01 15:19:26 crc kubenswrapper[4869]: I1001 15:19:26.254924 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-create-v7rjt" event={"ID":"24526b59-4280-4dc6-9980-ea48d0143071","Type":"ContainerStarted","Data":"d774d373c39e8bc047a9f91e99ba61aec6855d0b8e10db032329081d67679730"} Oct 01 15:19:26 crc kubenswrapper[4869]: I1001 15:19:26.370988 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-db-create-k6f4h"] Oct 01 15:19:26 crc kubenswrapper[4869]: I1001 15:19:26.431208 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-db-sync-j277p"] Oct 01 15:19:26 crc kubenswrapper[4869]: W1001 15:19:26.469596 4869 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod6ba3f80b_83bc_4a34_9c76_44fafb693520.slice/crio-d3471e21fae2c8c4144eb5b4688f24373140d87128da256a38609c5c535df613 WatchSource:0}: Error finding container d3471e21fae2c8c4144eb5b4688f24373140d87128da256a38609c5c535df613: Status 404 returned error can't find the container with id d3471e21fae2c8c4144eb5b4688f24373140d87128da256a38609c5c535df613 Oct 01 15:19:26 crc kubenswrapper[4869]: I1001 15:19:26.470133 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-db-create-sdgm5"] Oct 01 15:19:27 crc kubenswrapper[4869]: I1001 15:19:27.277442 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-sync-d6v2s" event={"ID":"16eefc1b-2890-4b09-8f2b-0febecad6523","Type":"ContainerStarted","Data":"aec0744a0a58cab7b01468080828f89287315954d0b524ca5cbd2d95f9cdb650"} Oct 01 15:19:27 crc kubenswrapper[4869]: I1001 15:19:27.279759 4869 generic.go:334] "Generic (PLEG): container finished" podID="52104f6a-5c9e-46c9-9197-4015634558a6" containerID="1726125b1c080f02b2db667cf3b002bb0c5c61a58e129ca346f4df4c00e0391a" exitCode=0 Oct 01 15:19:27 crc kubenswrapper[4869]: I1001 15:19:27.279856 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-create-k6f4h" event={"ID":"52104f6a-5c9e-46c9-9197-4015634558a6","Type":"ContainerDied","Data":"1726125b1c080f02b2db667cf3b002bb0c5c61a58e129ca346f4df4c00e0391a"} Oct 01 15:19:27 crc kubenswrapper[4869]: I1001 15:19:27.280010 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-create-k6f4h" event={"ID":"52104f6a-5c9e-46c9-9197-4015634558a6","Type":"ContainerStarted","Data":"5e2ee09c0567c812af4cb91b5db2a0ceb60f3f6e5af66e7598c4f3819408e3a1"} Oct 01 15:19:27 crc kubenswrapper[4869]: I1001 15:19:27.284003 4869 generic.go:334] "Generic (PLEG): container finished" podID="6ba3f80b-83bc-4a34-9c76-44fafb693520" containerID="0e2955c0044e102e705874bc0b017f85080ab523359c3e8639eb1f5b32884a8e" exitCode=0 Oct 01 15:19:27 crc kubenswrapper[4869]: I1001 15:19:27.284078 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-create-sdgm5" event={"ID":"6ba3f80b-83bc-4a34-9c76-44fafb693520","Type":"ContainerDied","Data":"0e2955c0044e102e705874bc0b017f85080ab523359c3e8639eb1f5b32884a8e"} Oct 01 15:19:27 crc kubenswrapper[4869]: I1001 15:19:27.284109 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-create-sdgm5" event={"ID":"6ba3f80b-83bc-4a34-9c76-44fafb693520","Type":"ContainerStarted","Data":"d3471e21fae2c8c4144eb5b4688f24373140d87128da256a38609c5c535df613"} Oct 01 15:19:27 crc kubenswrapper[4869]: I1001 15:19:27.287024 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-sync-j277p" event={"ID":"92372d1e-b67d-4615-9978-946caec18e59","Type":"ContainerStarted","Data":"614bfe137d01ec075f1a6ccbc70ac5f054227cbb472ec6c2972eb17998cfc5d7"} Oct 01 15:19:27 crc kubenswrapper[4869]: I1001 15:19:27.294961 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-db-sync-d6v2s" podStartSLOduration=3.046948161 podStartE2EDuration="15.294915738s" podCreationTimestamp="2025-10-01 15:19:12 +0000 UTC" firstStartedPulling="2025-10-01 15:19:13.781443049 +0000 UTC m=+862.928286165" lastFinishedPulling="2025-10-01 15:19:26.029410616 +0000 UTC m=+875.176253742" observedRunningTime="2025-10-01 15:19:27.29459381 +0000 UTC m=+876.441436926" watchObservedRunningTime="2025-10-01 15:19:27.294915738 +0000 UTC m=+876.441758874" Oct 01 15:19:27 crc kubenswrapper[4869]: I1001 15:19:27.598957 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-create-v7rjt" Oct 01 15:19:27 crc kubenswrapper[4869]: I1001 15:19:27.744547 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-p72hl\" (UniqueName: \"kubernetes.io/projected/24526b59-4280-4dc6-9980-ea48d0143071-kube-api-access-p72hl\") pod \"24526b59-4280-4dc6-9980-ea48d0143071\" (UID: \"24526b59-4280-4dc6-9980-ea48d0143071\") " Oct 01 15:19:27 crc kubenswrapper[4869]: I1001 15:19:27.751439 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/24526b59-4280-4dc6-9980-ea48d0143071-kube-api-access-p72hl" (OuterVolumeSpecName: "kube-api-access-p72hl") pod "24526b59-4280-4dc6-9980-ea48d0143071" (UID: "24526b59-4280-4dc6-9980-ea48d0143071"). InnerVolumeSpecName "kube-api-access-p72hl". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 15:19:27 crc kubenswrapper[4869]: I1001 15:19:27.846519 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-p72hl\" (UniqueName: \"kubernetes.io/projected/24526b59-4280-4dc6-9980-ea48d0143071-kube-api-access-p72hl\") on node \"crc\" DevicePath \"\"" Oct 01 15:19:28 crc kubenswrapper[4869]: I1001 15:19:28.298687 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-create-v7rjt" Oct 01 15:19:28 crc kubenswrapper[4869]: I1001 15:19:28.300448 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-create-v7rjt" event={"ID":"24526b59-4280-4dc6-9980-ea48d0143071","Type":"ContainerDied","Data":"d774d373c39e8bc047a9f91e99ba61aec6855d0b8e10db032329081d67679730"} Oct 01 15:19:28 crc kubenswrapper[4869]: I1001 15:19:28.300535 4869 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="d774d373c39e8bc047a9f91e99ba61aec6855d0b8e10db032329081d67679730" Oct 01 15:19:29 crc kubenswrapper[4869]: I1001 15:19:29.008863 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-0946-account-create-g5vxh"] Oct 01 15:19:29 crc kubenswrapper[4869]: E1001 15:19:29.009229 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="24526b59-4280-4dc6-9980-ea48d0143071" containerName="mariadb-database-create" Oct 01 15:19:29 crc kubenswrapper[4869]: I1001 15:19:29.009243 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="24526b59-4280-4dc6-9980-ea48d0143071" containerName="mariadb-database-create" Oct 01 15:19:29 crc kubenswrapper[4869]: I1001 15:19:29.009807 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="24526b59-4280-4dc6-9980-ea48d0143071" containerName="mariadb-database-create" Oct 01 15:19:29 crc kubenswrapper[4869]: I1001 15:19:29.012909 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-0946-account-create-g5vxh" Oct 01 15:19:29 crc kubenswrapper[4869]: I1001 15:19:29.015384 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-db-secret" Oct 01 15:19:29 crc kubenswrapper[4869]: I1001 15:19:29.023154 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-0946-account-create-g5vxh"] Oct 01 15:19:29 crc kubenswrapper[4869]: I1001 15:19:29.170046 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wchjz\" (UniqueName: \"kubernetes.io/projected/4937bc6d-5b93-47a8-bbe6-957b30612d76-kube-api-access-wchjz\") pod \"cinder-0946-account-create-g5vxh\" (UID: \"4937bc6d-5b93-47a8-bbe6-957b30612d76\") " pod="openstack/cinder-0946-account-create-g5vxh" Oct 01 15:19:29 crc kubenswrapper[4869]: I1001 15:19:29.271178 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wchjz\" (UniqueName: \"kubernetes.io/projected/4937bc6d-5b93-47a8-bbe6-957b30612d76-kube-api-access-wchjz\") pod \"cinder-0946-account-create-g5vxh\" (UID: \"4937bc6d-5b93-47a8-bbe6-957b30612d76\") " pod="openstack/cinder-0946-account-create-g5vxh" Oct 01 15:19:29 crc kubenswrapper[4869]: I1001 15:19:29.309966 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wchjz\" (UniqueName: \"kubernetes.io/projected/4937bc6d-5b93-47a8-bbe6-957b30612d76-kube-api-access-wchjz\") pod \"cinder-0946-account-create-g5vxh\" (UID: \"4937bc6d-5b93-47a8-bbe6-957b30612d76\") " pod="openstack/cinder-0946-account-create-g5vxh" Oct 01 15:19:29 crc kubenswrapper[4869]: I1001 15:19:29.332354 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-0946-account-create-g5vxh" Oct 01 15:19:30 crc kubenswrapper[4869]: I1001 15:19:30.893386 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-create-k6f4h" Oct 01 15:19:30 crc kubenswrapper[4869]: I1001 15:19:30.905908 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-create-sdgm5" Oct 01 15:19:30 crc kubenswrapper[4869]: I1001 15:19:30.929421 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-0946-account-create-g5vxh"] Oct 01 15:19:30 crc kubenswrapper[4869]: W1001 15:19:30.945475 4869 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod4937bc6d_5b93_47a8_bbe6_957b30612d76.slice/crio-ce0a5f2df35b7ac4b522fb0d228529d5eec638a37193d697cd70b1d32a024890 WatchSource:0}: Error finding container ce0a5f2df35b7ac4b522fb0d228529d5eec638a37193d697cd70b1d32a024890: Status 404 returned error can't find the container with id ce0a5f2df35b7ac4b522fb0d228529d5eec638a37193d697cd70b1d32a024890 Oct 01 15:19:31 crc kubenswrapper[4869]: I1001 15:19:31.002619 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-s2qjb\" (UniqueName: \"kubernetes.io/projected/52104f6a-5c9e-46c9-9197-4015634558a6-kube-api-access-s2qjb\") pod \"52104f6a-5c9e-46c9-9197-4015634558a6\" (UID: \"52104f6a-5c9e-46c9-9197-4015634558a6\") " Oct 01 15:19:31 crc kubenswrapper[4869]: I1001 15:19:31.002674 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-sm92n\" (UniqueName: \"kubernetes.io/projected/6ba3f80b-83bc-4a34-9c76-44fafb693520-kube-api-access-sm92n\") pod \"6ba3f80b-83bc-4a34-9c76-44fafb693520\" (UID: \"6ba3f80b-83bc-4a34-9c76-44fafb693520\") " Oct 01 15:19:31 crc kubenswrapper[4869]: I1001 15:19:31.006550 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/52104f6a-5c9e-46c9-9197-4015634558a6-kube-api-access-s2qjb" (OuterVolumeSpecName: "kube-api-access-s2qjb") pod "52104f6a-5c9e-46c9-9197-4015634558a6" (UID: "52104f6a-5c9e-46c9-9197-4015634558a6"). InnerVolumeSpecName "kube-api-access-s2qjb". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 15:19:31 crc kubenswrapper[4869]: I1001 15:19:31.007023 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6ba3f80b-83bc-4a34-9c76-44fafb693520-kube-api-access-sm92n" (OuterVolumeSpecName: "kube-api-access-sm92n") pod "6ba3f80b-83bc-4a34-9c76-44fafb693520" (UID: "6ba3f80b-83bc-4a34-9c76-44fafb693520"). InnerVolumeSpecName "kube-api-access-sm92n". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 15:19:31 crc kubenswrapper[4869]: I1001 15:19:31.104961 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-s2qjb\" (UniqueName: \"kubernetes.io/projected/52104f6a-5c9e-46c9-9197-4015634558a6-kube-api-access-s2qjb\") on node \"crc\" DevicePath \"\"" Oct 01 15:19:31 crc kubenswrapper[4869]: I1001 15:19:31.104997 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-sm92n\" (UniqueName: \"kubernetes.io/projected/6ba3f80b-83bc-4a34-9c76-44fafb693520-kube-api-access-sm92n\") on node \"crc\" DevicePath \"\"" Oct 01 15:19:31 crc kubenswrapper[4869]: I1001 15:19:31.324048 4869 generic.go:334] "Generic (PLEG): container finished" podID="4937bc6d-5b93-47a8-bbe6-957b30612d76" containerID="8cf151c7d788727aca7efd8e2402e6c98a597180d730fc0d23b4c172fa3b3047" exitCode=0 Oct 01 15:19:31 crc kubenswrapper[4869]: I1001 15:19:31.324111 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-0946-account-create-g5vxh" event={"ID":"4937bc6d-5b93-47a8-bbe6-957b30612d76","Type":"ContainerDied","Data":"8cf151c7d788727aca7efd8e2402e6c98a597180d730fc0d23b4c172fa3b3047"} Oct 01 15:19:31 crc kubenswrapper[4869]: I1001 15:19:31.324153 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-0946-account-create-g5vxh" event={"ID":"4937bc6d-5b93-47a8-bbe6-957b30612d76","Type":"ContainerStarted","Data":"ce0a5f2df35b7ac4b522fb0d228529d5eec638a37193d697cd70b1d32a024890"} Oct 01 15:19:31 crc kubenswrapper[4869]: I1001 15:19:31.326561 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-create-k6f4h" event={"ID":"52104f6a-5c9e-46c9-9197-4015634558a6","Type":"ContainerDied","Data":"5e2ee09c0567c812af4cb91b5db2a0ceb60f3f6e5af66e7598c4f3819408e3a1"} Oct 01 15:19:31 crc kubenswrapper[4869]: I1001 15:19:31.326600 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-create-k6f4h" Oct 01 15:19:31 crc kubenswrapper[4869]: I1001 15:19:31.326608 4869 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="5e2ee09c0567c812af4cb91b5db2a0ceb60f3f6e5af66e7598c4f3819408e3a1" Oct 01 15:19:31 crc kubenswrapper[4869]: I1001 15:19:31.328891 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-create-sdgm5" Oct 01 15:19:31 crc kubenswrapper[4869]: I1001 15:19:31.328908 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-create-sdgm5" event={"ID":"6ba3f80b-83bc-4a34-9c76-44fafb693520","Type":"ContainerDied","Data":"d3471e21fae2c8c4144eb5b4688f24373140d87128da256a38609c5c535df613"} Oct 01 15:19:31 crc kubenswrapper[4869]: I1001 15:19:31.328962 4869 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="d3471e21fae2c8c4144eb5b4688f24373140d87128da256a38609c5c535df613" Oct 01 15:19:31 crc kubenswrapper[4869]: I1001 15:19:31.331406 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-sync-j277p" event={"ID":"92372d1e-b67d-4615-9978-946caec18e59","Type":"ContainerStarted","Data":"0eda98a080d6660e765a3fc5cdd826413f49f5b145e7c422de30995e88bcac0c"} Oct 01 15:19:31 crc kubenswrapper[4869]: I1001 15:19:31.370448 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-db-sync-j277p" podStartSLOduration=8.104718814 podStartE2EDuration="12.370425923s" podCreationTimestamp="2025-10-01 15:19:19 +0000 UTC" firstStartedPulling="2025-10-01 15:19:26.461013296 +0000 UTC m=+875.607856422" lastFinishedPulling="2025-10-01 15:19:30.726720415 +0000 UTC m=+879.873563531" observedRunningTime="2025-10-01 15:19:31.362472302 +0000 UTC m=+880.509315458" watchObservedRunningTime="2025-10-01 15:19:31.370425923 +0000 UTC m=+880.517269059" Oct 01 15:19:32 crc kubenswrapper[4869]: I1001 15:19:32.648601 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-0946-account-create-g5vxh" Oct 01 15:19:32 crc kubenswrapper[4869]: I1001 15:19:32.737216 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wchjz\" (UniqueName: \"kubernetes.io/projected/4937bc6d-5b93-47a8-bbe6-957b30612d76-kube-api-access-wchjz\") pod \"4937bc6d-5b93-47a8-bbe6-957b30612d76\" (UID: \"4937bc6d-5b93-47a8-bbe6-957b30612d76\") " Oct 01 15:19:32 crc kubenswrapper[4869]: I1001 15:19:32.744685 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4937bc6d-5b93-47a8-bbe6-957b30612d76-kube-api-access-wchjz" (OuterVolumeSpecName: "kube-api-access-wchjz") pod "4937bc6d-5b93-47a8-bbe6-957b30612d76" (UID: "4937bc6d-5b93-47a8-bbe6-957b30612d76"). InnerVolumeSpecName "kube-api-access-wchjz". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 15:19:32 crc kubenswrapper[4869]: I1001 15:19:32.839718 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wchjz\" (UniqueName: \"kubernetes.io/projected/4937bc6d-5b93-47a8-bbe6-957b30612d76-kube-api-access-wchjz\") on node \"crc\" DevicePath \"\"" Oct 01 15:19:33 crc kubenswrapper[4869]: I1001 15:19:33.351523 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-0946-account-create-g5vxh" Oct 01 15:19:33 crc kubenswrapper[4869]: I1001 15:19:33.351531 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-0946-account-create-g5vxh" event={"ID":"4937bc6d-5b93-47a8-bbe6-957b30612d76","Type":"ContainerDied","Data":"ce0a5f2df35b7ac4b522fb0d228529d5eec638a37193d697cd70b1d32a024890"} Oct 01 15:19:33 crc kubenswrapper[4869]: I1001 15:19:33.351571 4869 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="ce0a5f2df35b7ac4b522fb0d228529d5eec638a37193d697cd70b1d32a024890" Oct 01 15:19:33 crc kubenswrapper[4869]: I1001 15:19:33.354501 4869 generic.go:334] "Generic (PLEG): container finished" podID="16eefc1b-2890-4b09-8f2b-0febecad6523" containerID="aec0744a0a58cab7b01468080828f89287315954d0b524ca5cbd2d95f9cdb650" exitCode=0 Oct 01 15:19:33 crc kubenswrapper[4869]: I1001 15:19:33.354551 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-sync-d6v2s" event={"ID":"16eefc1b-2890-4b09-8f2b-0febecad6523","Type":"ContainerDied","Data":"aec0744a0a58cab7b01468080828f89287315954d0b524ca5cbd2d95f9cdb650"} Oct 01 15:19:34 crc kubenswrapper[4869]: I1001 15:19:34.365722 4869 generic.go:334] "Generic (PLEG): container finished" podID="92372d1e-b67d-4615-9978-946caec18e59" containerID="0eda98a080d6660e765a3fc5cdd826413f49f5b145e7c422de30995e88bcac0c" exitCode=0 Oct 01 15:19:34 crc kubenswrapper[4869]: I1001 15:19:34.365818 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-sync-j277p" event={"ID":"92372d1e-b67d-4615-9978-946caec18e59","Type":"ContainerDied","Data":"0eda98a080d6660e765a3fc5cdd826413f49f5b145e7c422de30995e88bcac0c"} Oct 01 15:19:34 crc kubenswrapper[4869]: I1001 15:19:34.737824 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-sync-d6v2s" Oct 01 15:19:34 crc kubenswrapper[4869]: I1001 15:19:34.869994 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/16eefc1b-2890-4b09-8f2b-0febecad6523-combined-ca-bundle\") pod \"16eefc1b-2890-4b09-8f2b-0febecad6523\" (UID: \"16eefc1b-2890-4b09-8f2b-0febecad6523\") " Oct 01 15:19:34 crc kubenswrapper[4869]: I1001 15:19:34.870165 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/16eefc1b-2890-4b09-8f2b-0febecad6523-config-data\") pod \"16eefc1b-2890-4b09-8f2b-0febecad6523\" (UID: \"16eefc1b-2890-4b09-8f2b-0febecad6523\") " Oct 01 15:19:34 crc kubenswrapper[4869]: I1001 15:19:34.870247 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/16eefc1b-2890-4b09-8f2b-0febecad6523-db-sync-config-data\") pod \"16eefc1b-2890-4b09-8f2b-0febecad6523\" (UID: \"16eefc1b-2890-4b09-8f2b-0febecad6523\") " Oct 01 15:19:34 crc kubenswrapper[4869]: I1001 15:19:34.870306 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5qfhq\" (UniqueName: \"kubernetes.io/projected/16eefc1b-2890-4b09-8f2b-0febecad6523-kube-api-access-5qfhq\") pod \"16eefc1b-2890-4b09-8f2b-0febecad6523\" (UID: \"16eefc1b-2890-4b09-8f2b-0febecad6523\") " Oct 01 15:19:34 crc kubenswrapper[4869]: I1001 15:19:34.875846 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/16eefc1b-2890-4b09-8f2b-0febecad6523-db-sync-config-data" (OuterVolumeSpecName: "db-sync-config-data") pod "16eefc1b-2890-4b09-8f2b-0febecad6523" (UID: "16eefc1b-2890-4b09-8f2b-0febecad6523"). InnerVolumeSpecName "db-sync-config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 15:19:34 crc kubenswrapper[4869]: I1001 15:19:34.876804 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/16eefc1b-2890-4b09-8f2b-0febecad6523-kube-api-access-5qfhq" (OuterVolumeSpecName: "kube-api-access-5qfhq") pod "16eefc1b-2890-4b09-8f2b-0febecad6523" (UID: "16eefc1b-2890-4b09-8f2b-0febecad6523"). InnerVolumeSpecName "kube-api-access-5qfhq". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 15:19:34 crc kubenswrapper[4869]: I1001 15:19:34.893215 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/16eefc1b-2890-4b09-8f2b-0febecad6523-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "16eefc1b-2890-4b09-8f2b-0febecad6523" (UID: "16eefc1b-2890-4b09-8f2b-0febecad6523"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 15:19:34 crc kubenswrapper[4869]: I1001 15:19:34.920346 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/16eefc1b-2890-4b09-8f2b-0febecad6523-config-data" (OuterVolumeSpecName: "config-data") pod "16eefc1b-2890-4b09-8f2b-0febecad6523" (UID: "16eefc1b-2890-4b09-8f2b-0febecad6523"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 15:19:34 crc kubenswrapper[4869]: I1001 15:19:34.972534 4869 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/16eefc1b-2890-4b09-8f2b-0febecad6523-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 01 15:19:34 crc kubenswrapper[4869]: I1001 15:19:34.972567 4869 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/16eefc1b-2890-4b09-8f2b-0febecad6523-config-data\") on node \"crc\" DevicePath \"\"" Oct 01 15:19:34 crc kubenswrapper[4869]: I1001 15:19:34.972576 4869 reconciler_common.go:293] "Volume detached for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/16eefc1b-2890-4b09-8f2b-0febecad6523-db-sync-config-data\") on node \"crc\" DevicePath \"\"" Oct 01 15:19:34 crc kubenswrapper[4869]: I1001 15:19:34.972585 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5qfhq\" (UniqueName: \"kubernetes.io/projected/16eefc1b-2890-4b09-8f2b-0febecad6523-kube-api-access-5qfhq\") on node \"crc\" DevicePath \"\"" Oct 01 15:19:35 crc kubenswrapper[4869]: I1001 15:19:35.377658 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-sync-d6v2s" Oct 01 15:19:35 crc kubenswrapper[4869]: I1001 15:19:35.377652 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-sync-d6v2s" event={"ID":"16eefc1b-2890-4b09-8f2b-0febecad6523","Type":"ContainerDied","Data":"61be75e1f41e8c1f9d35095f918b133865a147be815917224cfdb06d66e0cf52"} Oct 01 15:19:35 crc kubenswrapper[4869]: I1001 15:19:35.377710 4869 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="61be75e1f41e8c1f9d35095f918b133865a147be815917224cfdb06d66e0cf52" Oct 01 15:19:35 crc kubenswrapper[4869]: E1001 15:19:35.465353 4869 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod16eefc1b_2890_4b09_8f2b_0febecad6523.slice\": RecentStats: unable to find data in memory cache]" Oct 01 15:19:35 crc kubenswrapper[4869]: I1001 15:19:35.767693 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-sync-j277p" Oct 01 15:19:35 crc kubenswrapper[4869]: I1001 15:19:35.809042 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-7565f89d5c-ldvdn"] Oct 01 15:19:35 crc kubenswrapper[4869]: E1001 15:19:35.809363 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="92372d1e-b67d-4615-9978-946caec18e59" containerName="keystone-db-sync" Oct 01 15:19:35 crc kubenswrapper[4869]: I1001 15:19:35.809377 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="92372d1e-b67d-4615-9978-946caec18e59" containerName="keystone-db-sync" Oct 01 15:19:35 crc kubenswrapper[4869]: E1001 15:19:35.809389 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4937bc6d-5b93-47a8-bbe6-957b30612d76" containerName="mariadb-account-create" Oct 01 15:19:35 crc kubenswrapper[4869]: I1001 15:19:35.809395 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="4937bc6d-5b93-47a8-bbe6-957b30612d76" containerName="mariadb-account-create" Oct 01 15:19:35 crc kubenswrapper[4869]: E1001 15:19:35.809404 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="52104f6a-5c9e-46c9-9197-4015634558a6" containerName="mariadb-database-create" Oct 01 15:19:35 crc kubenswrapper[4869]: I1001 15:19:35.809410 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="52104f6a-5c9e-46c9-9197-4015634558a6" containerName="mariadb-database-create" Oct 01 15:19:35 crc kubenswrapper[4869]: E1001 15:19:35.809420 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="16eefc1b-2890-4b09-8f2b-0febecad6523" containerName="glance-db-sync" Oct 01 15:19:35 crc kubenswrapper[4869]: I1001 15:19:35.809425 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="16eefc1b-2890-4b09-8f2b-0febecad6523" containerName="glance-db-sync" Oct 01 15:19:35 crc kubenswrapper[4869]: E1001 15:19:35.809438 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6ba3f80b-83bc-4a34-9c76-44fafb693520" containerName="mariadb-database-create" Oct 01 15:19:35 crc kubenswrapper[4869]: I1001 15:19:35.809446 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="6ba3f80b-83bc-4a34-9c76-44fafb693520" containerName="mariadb-database-create" Oct 01 15:19:35 crc kubenswrapper[4869]: I1001 15:19:35.809593 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="92372d1e-b67d-4615-9978-946caec18e59" containerName="keystone-db-sync" Oct 01 15:19:35 crc kubenswrapper[4869]: I1001 15:19:35.809603 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="4937bc6d-5b93-47a8-bbe6-957b30612d76" containerName="mariadb-account-create" Oct 01 15:19:35 crc kubenswrapper[4869]: I1001 15:19:35.809611 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="52104f6a-5c9e-46c9-9197-4015634558a6" containerName="mariadb-database-create" Oct 01 15:19:35 crc kubenswrapper[4869]: I1001 15:19:35.809623 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="6ba3f80b-83bc-4a34-9c76-44fafb693520" containerName="mariadb-database-create" Oct 01 15:19:35 crc kubenswrapper[4869]: I1001 15:19:35.809635 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="16eefc1b-2890-4b09-8f2b-0febecad6523" containerName="glance-db-sync" Oct 01 15:19:35 crc kubenswrapper[4869]: I1001 15:19:35.810403 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7565f89d5c-ldvdn" Oct 01 15:19:35 crc kubenswrapper[4869]: I1001 15:19:35.820688 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-7565f89d5c-ldvdn"] Oct 01 15:19:35 crc kubenswrapper[4869]: I1001 15:19:35.897390 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fpcgj\" (UniqueName: \"kubernetes.io/projected/92372d1e-b67d-4615-9978-946caec18e59-kube-api-access-fpcgj\") pod \"92372d1e-b67d-4615-9978-946caec18e59\" (UID: \"92372d1e-b67d-4615-9978-946caec18e59\") " Oct 01 15:19:35 crc kubenswrapper[4869]: I1001 15:19:35.897526 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/92372d1e-b67d-4615-9978-946caec18e59-config-data\") pod \"92372d1e-b67d-4615-9978-946caec18e59\" (UID: \"92372d1e-b67d-4615-9978-946caec18e59\") " Oct 01 15:19:35 crc kubenswrapper[4869]: I1001 15:19:35.897634 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/92372d1e-b67d-4615-9978-946caec18e59-combined-ca-bundle\") pod \"92372d1e-b67d-4615-9978-946caec18e59\" (UID: \"92372d1e-b67d-4615-9978-946caec18e59\") " Oct 01 15:19:35 crc kubenswrapper[4869]: I1001 15:19:35.897847 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-s79rb\" (UniqueName: \"kubernetes.io/projected/2dc1d5e4-3aad-4bf9-b27b-e2d35547583f-kube-api-access-s79rb\") pod \"dnsmasq-dns-7565f89d5c-ldvdn\" (UID: \"2dc1d5e4-3aad-4bf9-b27b-e2d35547583f\") " pod="openstack/dnsmasq-dns-7565f89d5c-ldvdn" Oct 01 15:19:35 crc kubenswrapper[4869]: I1001 15:19:35.897898 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2dc1d5e4-3aad-4bf9-b27b-e2d35547583f-config\") pod \"dnsmasq-dns-7565f89d5c-ldvdn\" (UID: \"2dc1d5e4-3aad-4bf9-b27b-e2d35547583f\") " pod="openstack/dnsmasq-dns-7565f89d5c-ldvdn" Oct 01 15:19:35 crc kubenswrapper[4869]: I1001 15:19:35.897957 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/2dc1d5e4-3aad-4bf9-b27b-e2d35547583f-ovsdbserver-sb\") pod \"dnsmasq-dns-7565f89d5c-ldvdn\" (UID: \"2dc1d5e4-3aad-4bf9-b27b-e2d35547583f\") " pod="openstack/dnsmasq-dns-7565f89d5c-ldvdn" Oct 01 15:19:35 crc kubenswrapper[4869]: I1001 15:19:35.897986 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/2dc1d5e4-3aad-4bf9-b27b-e2d35547583f-dns-svc\") pod \"dnsmasq-dns-7565f89d5c-ldvdn\" (UID: \"2dc1d5e4-3aad-4bf9-b27b-e2d35547583f\") " pod="openstack/dnsmasq-dns-7565f89d5c-ldvdn" Oct 01 15:19:35 crc kubenswrapper[4869]: I1001 15:19:35.898067 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/2dc1d5e4-3aad-4bf9-b27b-e2d35547583f-ovsdbserver-nb\") pod \"dnsmasq-dns-7565f89d5c-ldvdn\" (UID: \"2dc1d5e4-3aad-4bf9-b27b-e2d35547583f\") " pod="openstack/dnsmasq-dns-7565f89d5c-ldvdn" Oct 01 15:19:35 crc kubenswrapper[4869]: I1001 15:19:35.929438 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/92372d1e-b67d-4615-9978-946caec18e59-kube-api-access-fpcgj" (OuterVolumeSpecName: "kube-api-access-fpcgj") pod "92372d1e-b67d-4615-9978-946caec18e59" (UID: "92372d1e-b67d-4615-9978-946caec18e59"). InnerVolumeSpecName "kube-api-access-fpcgj". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 15:19:35 crc kubenswrapper[4869]: I1001 15:19:35.945887 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/92372d1e-b67d-4615-9978-946caec18e59-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "92372d1e-b67d-4615-9978-946caec18e59" (UID: "92372d1e-b67d-4615-9978-946caec18e59"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 15:19:36 crc kubenswrapper[4869]: I1001 15:19:36.002060 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/2dc1d5e4-3aad-4bf9-b27b-e2d35547583f-ovsdbserver-sb\") pod \"dnsmasq-dns-7565f89d5c-ldvdn\" (UID: \"2dc1d5e4-3aad-4bf9-b27b-e2d35547583f\") " pod="openstack/dnsmasq-dns-7565f89d5c-ldvdn" Oct 01 15:19:36 crc kubenswrapper[4869]: I1001 15:19:36.002129 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/2dc1d5e4-3aad-4bf9-b27b-e2d35547583f-dns-svc\") pod \"dnsmasq-dns-7565f89d5c-ldvdn\" (UID: \"2dc1d5e4-3aad-4bf9-b27b-e2d35547583f\") " pod="openstack/dnsmasq-dns-7565f89d5c-ldvdn" Oct 01 15:19:36 crc kubenswrapper[4869]: I1001 15:19:36.002177 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/2dc1d5e4-3aad-4bf9-b27b-e2d35547583f-ovsdbserver-nb\") pod \"dnsmasq-dns-7565f89d5c-ldvdn\" (UID: \"2dc1d5e4-3aad-4bf9-b27b-e2d35547583f\") " pod="openstack/dnsmasq-dns-7565f89d5c-ldvdn" Oct 01 15:19:36 crc kubenswrapper[4869]: I1001 15:19:36.002239 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s79rb\" (UniqueName: \"kubernetes.io/projected/2dc1d5e4-3aad-4bf9-b27b-e2d35547583f-kube-api-access-s79rb\") pod \"dnsmasq-dns-7565f89d5c-ldvdn\" (UID: \"2dc1d5e4-3aad-4bf9-b27b-e2d35547583f\") " pod="openstack/dnsmasq-dns-7565f89d5c-ldvdn" Oct 01 15:19:36 crc kubenswrapper[4869]: I1001 15:19:36.002462 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2dc1d5e4-3aad-4bf9-b27b-e2d35547583f-config\") pod \"dnsmasq-dns-7565f89d5c-ldvdn\" (UID: \"2dc1d5e4-3aad-4bf9-b27b-e2d35547583f\") " pod="openstack/dnsmasq-dns-7565f89d5c-ldvdn" Oct 01 15:19:36 crc kubenswrapper[4869]: I1001 15:19:36.002522 4869 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/92372d1e-b67d-4615-9978-946caec18e59-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 01 15:19:36 crc kubenswrapper[4869]: I1001 15:19:36.002532 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fpcgj\" (UniqueName: \"kubernetes.io/projected/92372d1e-b67d-4615-9978-946caec18e59-kube-api-access-fpcgj\") on node \"crc\" DevicePath \"\"" Oct 01 15:19:36 crc kubenswrapper[4869]: I1001 15:19:36.003308 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2dc1d5e4-3aad-4bf9-b27b-e2d35547583f-config\") pod \"dnsmasq-dns-7565f89d5c-ldvdn\" (UID: \"2dc1d5e4-3aad-4bf9-b27b-e2d35547583f\") " pod="openstack/dnsmasq-dns-7565f89d5c-ldvdn" Oct 01 15:19:36 crc kubenswrapper[4869]: I1001 15:19:36.003390 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/92372d1e-b67d-4615-9978-946caec18e59-config-data" (OuterVolumeSpecName: "config-data") pod "92372d1e-b67d-4615-9978-946caec18e59" (UID: "92372d1e-b67d-4615-9978-946caec18e59"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 15:19:36 crc kubenswrapper[4869]: I1001 15:19:36.003987 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/2dc1d5e4-3aad-4bf9-b27b-e2d35547583f-ovsdbserver-nb\") pod \"dnsmasq-dns-7565f89d5c-ldvdn\" (UID: \"2dc1d5e4-3aad-4bf9-b27b-e2d35547583f\") " pod="openstack/dnsmasq-dns-7565f89d5c-ldvdn" Oct 01 15:19:36 crc kubenswrapper[4869]: I1001 15:19:36.004086 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/2dc1d5e4-3aad-4bf9-b27b-e2d35547583f-dns-svc\") pod \"dnsmasq-dns-7565f89d5c-ldvdn\" (UID: \"2dc1d5e4-3aad-4bf9-b27b-e2d35547583f\") " pod="openstack/dnsmasq-dns-7565f89d5c-ldvdn" Oct 01 15:19:36 crc kubenswrapper[4869]: I1001 15:19:36.004702 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/2dc1d5e4-3aad-4bf9-b27b-e2d35547583f-ovsdbserver-sb\") pod \"dnsmasq-dns-7565f89d5c-ldvdn\" (UID: \"2dc1d5e4-3aad-4bf9-b27b-e2d35547583f\") " pod="openstack/dnsmasq-dns-7565f89d5c-ldvdn" Oct 01 15:19:36 crc kubenswrapper[4869]: I1001 15:19:36.028160 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s79rb\" (UniqueName: \"kubernetes.io/projected/2dc1d5e4-3aad-4bf9-b27b-e2d35547583f-kube-api-access-s79rb\") pod \"dnsmasq-dns-7565f89d5c-ldvdn\" (UID: \"2dc1d5e4-3aad-4bf9-b27b-e2d35547583f\") " pod="openstack/dnsmasq-dns-7565f89d5c-ldvdn" Oct 01 15:19:36 crc kubenswrapper[4869]: I1001 15:19:36.104193 4869 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/92372d1e-b67d-4615-9978-946caec18e59-config-data\") on node \"crc\" DevicePath \"\"" Oct 01 15:19:36 crc kubenswrapper[4869]: I1001 15:19:36.128977 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7565f89d5c-ldvdn" Oct 01 15:19:36 crc kubenswrapper[4869]: I1001 15:19:36.386480 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-sync-j277p" event={"ID":"92372d1e-b67d-4615-9978-946caec18e59","Type":"ContainerDied","Data":"614bfe137d01ec075f1a6ccbc70ac5f054227cbb472ec6c2972eb17998cfc5d7"} Oct 01 15:19:36 crc kubenswrapper[4869]: I1001 15:19:36.386783 4869 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="614bfe137d01ec075f1a6ccbc70ac5f054227cbb472ec6c2972eb17998cfc5d7" Oct 01 15:19:36 crc kubenswrapper[4869]: I1001 15:19:36.386559 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-sync-j277p" Oct 01 15:19:36 crc kubenswrapper[4869]: I1001 15:19:36.529763 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-7565f89d5c-ldvdn"] Oct 01 15:19:36 crc kubenswrapper[4869]: I1001 15:19:36.642313 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-7565f89d5c-ldvdn"] Oct 01 15:19:36 crc kubenswrapper[4869]: I1001 15:19:36.679938 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-68db7cd549-vl7f5"] Oct 01 15:19:36 crc kubenswrapper[4869]: I1001 15:19:36.682777 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-68db7cd549-vl7f5" Oct 01 15:19:36 crc kubenswrapper[4869]: I1001 15:19:36.697034 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-bootstrap-fb9lk"] Oct 01 15:19:36 crc kubenswrapper[4869]: I1001 15:19:36.704024 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-68db7cd549-vl7f5"] Oct 01 15:19:36 crc kubenswrapper[4869]: I1001 15:19:36.704104 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-fb9lk" Oct 01 15:19:36 crc kubenswrapper[4869]: I1001 15:19:36.719388 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-bootstrap-fb9lk"] Oct 01 15:19:36 crc kubenswrapper[4869]: I1001 15:19:36.722601 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-scripts" Oct 01 15:19:36 crc kubenswrapper[4869]: I1001 15:19:36.722979 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-keystone-dockercfg-vhhr5" Oct 01 15:19:36 crc kubenswrapper[4869]: I1001 15:19:36.723240 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone" Oct 01 15:19:36 crc kubenswrapper[4869]: I1001 15:19:36.723499 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-config-data" Oct 01 15:19:36 crc kubenswrapper[4869]: I1001 15:19:36.828167 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/e6e277cf-6fbb-46e6-96c2-004009b93faa-credential-keys\") pod \"keystone-bootstrap-fb9lk\" (UID: \"e6e277cf-6fbb-46e6-96c2-004009b93faa\") " pod="openstack/keystone-bootstrap-fb9lk" Oct 01 15:19:36 crc kubenswrapper[4869]: I1001 15:19:36.828224 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e6e277cf-6fbb-46e6-96c2-004009b93faa-config-data\") pod \"keystone-bootstrap-fb9lk\" (UID: \"e6e277cf-6fbb-46e6-96c2-004009b93faa\") " pod="openstack/keystone-bootstrap-fb9lk" Oct 01 15:19:36 crc kubenswrapper[4869]: I1001 15:19:36.828244 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/0113a14b-fb25-42f3-b8e7-ef15a3041a31-dns-svc\") pod \"dnsmasq-dns-68db7cd549-vl7f5\" (UID: \"0113a14b-fb25-42f3-b8e7-ef15a3041a31\") " pod="openstack/dnsmasq-dns-68db7cd549-vl7f5" Oct 01 15:19:36 crc kubenswrapper[4869]: I1001 15:19:36.828279 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e6e277cf-6fbb-46e6-96c2-004009b93faa-scripts\") pod \"keystone-bootstrap-fb9lk\" (UID: \"e6e277cf-6fbb-46e6-96c2-004009b93faa\") " pod="openstack/keystone-bootstrap-fb9lk" Oct 01 15:19:36 crc kubenswrapper[4869]: I1001 15:19:36.828296 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-slwmz\" (UniqueName: \"kubernetes.io/projected/0113a14b-fb25-42f3-b8e7-ef15a3041a31-kube-api-access-slwmz\") pod \"dnsmasq-dns-68db7cd549-vl7f5\" (UID: \"0113a14b-fb25-42f3-b8e7-ef15a3041a31\") " pod="openstack/dnsmasq-dns-68db7cd549-vl7f5" Oct 01 15:19:36 crc kubenswrapper[4869]: I1001 15:19:36.828321 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/0113a14b-fb25-42f3-b8e7-ef15a3041a31-ovsdbserver-nb\") pod \"dnsmasq-dns-68db7cd549-vl7f5\" (UID: \"0113a14b-fb25-42f3-b8e7-ef15a3041a31\") " pod="openstack/dnsmasq-dns-68db7cd549-vl7f5" Oct 01 15:19:36 crc kubenswrapper[4869]: I1001 15:19:36.828368 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/0113a14b-fb25-42f3-b8e7-ef15a3041a31-ovsdbserver-sb\") pod \"dnsmasq-dns-68db7cd549-vl7f5\" (UID: \"0113a14b-fb25-42f3-b8e7-ef15a3041a31\") " pod="openstack/dnsmasq-dns-68db7cd549-vl7f5" Oct 01 15:19:36 crc kubenswrapper[4869]: I1001 15:19:36.828391 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0113a14b-fb25-42f3-b8e7-ef15a3041a31-config\") pod \"dnsmasq-dns-68db7cd549-vl7f5\" (UID: \"0113a14b-fb25-42f3-b8e7-ef15a3041a31\") " pod="openstack/dnsmasq-dns-68db7cd549-vl7f5" Oct 01 15:19:36 crc kubenswrapper[4869]: I1001 15:19:36.828453 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e6e277cf-6fbb-46e6-96c2-004009b93faa-combined-ca-bundle\") pod \"keystone-bootstrap-fb9lk\" (UID: \"e6e277cf-6fbb-46e6-96c2-004009b93faa\") " pod="openstack/keystone-bootstrap-fb9lk" Oct 01 15:19:36 crc kubenswrapper[4869]: I1001 15:19:36.828474 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/e6e277cf-6fbb-46e6-96c2-004009b93faa-fernet-keys\") pod \"keystone-bootstrap-fb9lk\" (UID: \"e6e277cf-6fbb-46e6-96c2-004009b93faa\") " pod="openstack/keystone-bootstrap-fb9lk" Oct 01 15:19:36 crc kubenswrapper[4869]: I1001 15:19:36.828507 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-l4hn8\" (UniqueName: \"kubernetes.io/projected/e6e277cf-6fbb-46e6-96c2-004009b93faa-kube-api-access-l4hn8\") pod \"keystone-bootstrap-fb9lk\" (UID: \"e6e277cf-6fbb-46e6-96c2-004009b93faa\") " pod="openstack/keystone-bootstrap-fb9lk" Oct 01 15:19:36 crc kubenswrapper[4869]: I1001 15:19:36.839160 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/horizon-695b54fb65-rp2zz"] Oct 01 15:19:36 crc kubenswrapper[4869]: I1001 15:19:36.843584 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-695b54fb65-rp2zz" Oct 01 15:19:36 crc kubenswrapper[4869]: I1001 15:19:36.849168 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"horizon-scripts" Oct 01 15:19:36 crc kubenswrapper[4869]: I1001 15:19:36.849744 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"horizon-config-data" Oct 01 15:19:36 crc kubenswrapper[4869]: I1001 15:19:36.849870 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"horizon-horizon-dockercfg-n7khd" Oct 01 15:19:36 crc kubenswrapper[4869]: I1001 15:19:36.849983 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"horizon" Oct 01 15:19:36 crc kubenswrapper[4869]: I1001 15:19:36.865732 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-695b54fb65-rp2zz"] Oct 01 15:19:36 crc kubenswrapper[4869]: I1001 15:19:36.913427 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-db-sync-slhz2"] Oct 01 15:19:36 crc kubenswrapper[4869]: I1001 15:19:36.914663 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-sync-slhz2" Oct 01 15:19:36 crc kubenswrapper[4869]: I1001 15:19:36.918102 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-db-sync-slhz2"] Oct 01 15:19:36 crc kubenswrapper[4869]: I1001 15:19:36.924469 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-cinder-dockercfg-4mwvx" Oct 01 15:19:36 crc kubenswrapper[4869]: I1001 15:19:36.924834 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-config-data" Oct 01 15:19:36 crc kubenswrapper[4869]: I1001 15:19:36.924976 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-scripts" Oct 01 15:19:36 crc kubenswrapper[4869]: I1001 15:19:36.930072 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/e6e277cf-6fbb-46e6-96c2-004009b93faa-fernet-keys\") pod \"keystone-bootstrap-fb9lk\" (UID: \"e6e277cf-6fbb-46e6-96c2-004009b93faa\") " pod="openstack/keystone-bootstrap-fb9lk" Oct 01 15:19:36 crc kubenswrapper[4869]: I1001 15:19:36.930131 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/abaaae88-543a-4735-a3e2-08978a450647-config-data\") pod \"horizon-695b54fb65-rp2zz\" (UID: \"abaaae88-543a-4735-a3e2-08978a450647\") " pod="openstack/horizon-695b54fb65-rp2zz" Oct 01 15:19:36 crc kubenswrapper[4869]: I1001 15:19:36.930165 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-l4hn8\" (UniqueName: \"kubernetes.io/projected/e6e277cf-6fbb-46e6-96c2-004009b93faa-kube-api-access-l4hn8\") pod \"keystone-bootstrap-fb9lk\" (UID: \"e6e277cf-6fbb-46e6-96c2-004009b93faa\") " pod="openstack/keystone-bootstrap-fb9lk" Oct 01 15:19:36 crc kubenswrapper[4869]: I1001 15:19:36.930189 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/e6e277cf-6fbb-46e6-96c2-004009b93faa-credential-keys\") pod \"keystone-bootstrap-fb9lk\" (UID: \"e6e277cf-6fbb-46e6-96c2-004009b93faa\") " pod="openstack/keystone-bootstrap-fb9lk" Oct 01 15:19:36 crc kubenswrapper[4869]: I1001 15:19:36.930211 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/abaaae88-543a-4735-a3e2-08978a450647-horizon-secret-key\") pod \"horizon-695b54fb65-rp2zz\" (UID: \"abaaae88-543a-4735-a3e2-08978a450647\") " pod="openstack/horizon-695b54fb65-rp2zz" Oct 01 15:19:36 crc kubenswrapper[4869]: I1001 15:19:36.930234 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e6e277cf-6fbb-46e6-96c2-004009b93faa-config-data\") pod \"keystone-bootstrap-fb9lk\" (UID: \"e6e277cf-6fbb-46e6-96c2-004009b93faa\") " pod="openstack/keystone-bootstrap-fb9lk" Oct 01 15:19:36 crc kubenswrapper[4869]: I1001 15:19:36.930268 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/0113a14b-fb25-42f3-b8e7-ef15a3041a31-dns-svc\") pod \"dnsmasq-dns-68db7cd549-vl7f5\" (UID: \"0113a14b-fb25-42f3-b8e7-ef15a3041a31\") " pod="openstack/dnsmasq-dns-68db7cd549-vl7f5" Oct 01 15:19:36 crc kubenswrapper[4869]: I1001 15:19:36.930287 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e6e277cf-6fbb-46e6-96c2-004009b93faa-scripts\") pod \"keystone-bootstrap-fb9lk\" (UID: \"e6e277cf-6fbb-46e6-96c2-004009b93faa\") " pod="openstack/keystone-bootstrap-fb9lk" Oct 01 15:19:36 crc kubenswrapper[4869]: I1001 15:19:36.930305 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-slwmz\" (UniqueName: \"kubernetes.io/projected/0113a14b-fb25-42f3-b8e7-ef15a3041a31-kube-api-access-slwmz\") pod \"dnsmasq-dns-68db7cd549-vl7f5\" (UID: \"0113a14b-fb25-42f3-b8e7-ef15a3041a31\") " pod="openstack/dnsmasq-dns-68db7cd549-vl7f5" Oct 01 15:19:36 crc kubenswrapper[4869]: I1001 15:19:36.930328 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/0113a14b-fb25-42f3-b8e7-ef15a3041a31-ovsdbserver-nb\") pod \"dnsmasq-dns-68db7cd549-vl7f5\" (UID: \"0113a14b-fb25-42f3-b8e7-ef15a3041a31\") " pod="openstack/dnsmasq-dns-68db7cd549-vl7f5" Oct 01 15:19:36 crc kubenswrapper[4869]: I1001 15:19:36.930354 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/abaaae88-543a-4735-a3e2-08978a450647-scripts\") pod \"horizon-695b54fb65-rp2zz\" (UID: \"abaaae88-543a-4735-a3e2-08978a450647\") " pod="openstack/horizon-695b54fb65-rp2zz" Oct 01 15:19:36 crc kubenswrapper[4869]: I1001 15:19:36.930386 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/0113a14b-fb25-42f3-b8e7-ef15a3041a31-ovsdbserver-sb\") pod \"dnsmasq-dns-68db7cd549-vl7f5\" (UID: \"0113a14b-fb25-42f3-b8e7-ef15a3041a31\") " pod="openstack/dnsmasq-dns-68db7cd549-vl7f5" Oct 01 15:19:36 crc kubenswrapper[4869]: I1001 15:19:36.930404 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0113a14b-fb25-42f3-b8e7-ef15a3041a31-config\") pod \"dnsmasq-dns-68db7cd549-vl7f5\" (UID: \"0113a14b-fb25-42f3-b8e7-ef15a3041a31\") " pod="openstack/dnsmasq-dns-68db7cd549-vl7f5" Oct 01 15:19:36 crc kubenswrapper[4869]: I1001 15:19:36.930422 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/abaaae88-543a-4735-a3e2-08978a450647-logs\") pod \"horizon-695b54fb65-rp2zz\" (UID: \"abaaae88-543a-4735-a3e2-08978a450647\") " pod="openstack/horizon-695b54fb65-rp2zz" Oct 01 15:19:36 crc kubenswrapper[4869]: I1001 15:19:36.930444 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dgr8s\" (UniqueName: \"kubernetes.io/projected/abaaae88-543a-4735-a3e2-08978a450647-kube-api-access-dgr8s\") pod \"horizon-695b54fb65-rp2zz\" (UID: \"abaaae88-543a-4735-a3e2-08978a450647\") " pod="openstack/horizon-695b54fb65-rp2zz" Oct 01 15:19:36 crc kubenswrapper[4869]: I1001 15:19:36.930493 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e6e277cf-6fbb-46e6-96c2-004009b93faa-combined-ca-bundle\") pod \"keystone-bootstrap-fb9lk\" (UID: \"e6e277cf-6fbb-46e6-96c2-004009b93faa\") " pod="openstack/keystone-bootstrap-fb9lk" Oct 01 15:19:36 crc kubenswrapper[4869]: I1001 15:19:36.933058 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/0113a14b-fb25-42f3-b8e7-ef15a3041a31-ovsdbserver-sb\") pod \"dnsmasq-dns-68db7cd549-vl7f5\" (UID: \"0113a14b-fb25-42f3-b8e7-ef15a3041a31\") " pod="openstack/dnsmasq-dns-68db7cd549-vl7f5" Oct 01 15:19:36 crc kubenswrapper[4869]: I1001 15:19:36.936448 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/0113a14b-fb25-42f3-b8e7-ef15a3041a31-ovsdbserver-nb\") pod \"dnsmasq-dns-68db7cd549-vl7f5\" (UID: \"0113a14b-fb25-42f3-b8e7-ef15a3041a31\") " pod="openstack/dnsmasq-dns-68db7cd549-vl7f5" Oct 01 15:19:36 crc kubenswrapper[4869]: I1001 15:19:36.937675 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0113a14b-fb25-42f3-b8e7-ef15a3041a31-config\") pod \"dnsmasq-dns-68db7cd549-vl7f5\" (UID: \"0113a14b-fb25-42f3-b8e7-ef15a3041a31\") " pod="openstack/dnsmasq-dns-68db7cd549-vl7f5" Oct 01 15:19:36 crc kubenswrapper[4869]: I1001 15:19:36.938318 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/0113a14b-fb25-42f3-b8e7-ef15a3041a31-dns-svc\") pod \"dnsmasq-dns-68db7cd549-vl7f5\" (UID: \"0113a14b-fb25-42f3-b8e7-ef15a3041a31\") " pod="openstack/dnsmasq-dns-68db7cd549-vl7f5" Oct 01 15:19:36 crc kubenswrapper[4869]: I1001 15:19:36.963524 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/e6e277cf-6fbb-46e6-96c2-004009b93faa-fernet-keys\") pod \"keystone-bootstrap-fb9lk\" (UID: \"e6e277cf-6fbb-46e6-96c2-004009b93faa\") " pod="openstack/keystone-bootstrap-fb9lk" Oct 01 15:19:36 crc kubenswrapper[4869]: I1001 15:19:36.964101 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e6e277cf-6fbb-46e6-96c2-004009b93faa-combined-ca-bundle\") pod \"keystone-bootstrap-fb9lk\" (UID: \"e6e277cf-6fbb-46e6-96c2-004009b93faa\") " pod="openstack/keystone-bootstrap-fb9lk" Oct 01 15:19:36 crc kubenswrapper[4869]: I1001 15:19:36.966640 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e6e277cf-6fbb-46e6-96c2-004009b93faa-config-data\") pod \"keystone-bootstrap-fb9lk\" (UID: \"e6e277cf-6fbb-46e6-96c2-004009b93faa\") " pod="openstack/keystone-bootstrap-fb9lk" Oct 01 15:19:36 crc kubenswrapper[4869]: I1001 15:19:36.970960 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-l4hn8\" (UniqueName: \"kubernetes.io/projected/e6e277cf-6fbb-46e6-96c2-004009b93faa-kube-api-access-l4hn8\") pod \"keystone-bootstrap-fb9lk\" (UID: \"e6e277cf-6fbb-46e6-96c2-004009b93faa\") " pod="openstack/keystone-bootstrap-fb9lk" Oct 01 15:19:36 crc kubenswrapper[4869]: I1001 15:19:36.971393 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e6e277cf-6fbb-46e6-96c2-004009b93faa-scripts\") pod \"keystone-bootstrap-fb9lk\" (UID: \"e6e277cf-6fbb-46e6-96c2-004009b93faa\") " pod="openstack/keystone-bootstrap-fb9lk" Oct 01 15:19:36 crc kubenswrapper[4869]: I1001 15:19:36.971790 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/e6e277cf-6fbb-46e6-96c2-004009b93faa-credential-keys\") pod \"keystone-bootstrap-fb9lk\" (UID: \"e6e277cf-6fbb-46e6-96c2-004009b93faa\") " pod="openstack/keystone-bootstrap-fb9lk" Oct 01 15:19:36 crc kubenswrapper[4869]: I1001 15:19:36.979725 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-slwmz\" (UniqueName: \"kubernetes.io/projected/0113a14b-fb25-42f3-b8e7-ef15a3041a31-kube-api-access-slwmz\") pod \"dnsmasq-dns-68db7cd549-vl7f5\" (UID: \"0113a14b-fb25-42f3-b8e7-ef15a3041a31\") " pod="openstack/dnsmasq-dns-68db7cd549-vl7f5" Oct 01 15:19:36 crc kubenswrapper[4869]: I1001 15:19:36.990759 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/horizon-56977fbfb5-99lb5"] Oct 01 15:19:37 crc kubenswrapper[4869]: I1001 15:19:37.001574 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-56977fbfb5-99lb5" Oct 01 15:19:37 crc kubenswrapper[4869]: I1001 15:19:37.034106 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/abaaae88-543a-4735-a3e2-08978a450647-logs\") pod \"horizon-695b54fb65-rp2zz\" (UID: \"abaaae88-543a-4735-a3e2-08978a450647\") " pod="openstack/horizon-695b54fb65-rp2zz" Oct 01 15:19:37 crc kubenswrapper[4869]: I1001 15:19:37.034165 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dgr8s\" (UniqueName: \"kubernetes.io/projected/abaaae88-543a-4735-a3e2-08978a450647-kube-api-access-dgr8s\") pod \"horizon-695b54fb65-rp2zz\" (UID: \"abaaae88-543a-4735-a3e2-08978a450647\") " pod="openstack/horizon-695b54fb65-rp2zz" Oct 01 15:19:37 crc kubenswrapper[4869]: I1001 15:19:37.034208 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/f9c5d763-90ad-4611-8cac-193343af1b78-db-sync-config-data\") pod \"cinder-db-sync-slhz2\" (UID: \"f9c5d763-90ad-4611-8cac-193343af1b78\") " pod="openstack/cinder-db-sync-slhz2" Oct 01 15:19:37 crc kubenswrapper[4869]: I1001 15:19:37.034304 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/abaaae88-543a-4735-a3e2-08978a450647-config-data\") pod \"horizon-695b54fb65-rp2zz\" (UID: \"abaaae88-543a-4735-a3e2-08978a450647\") " pod="openstack/horizon-695b54fb65-rp2zz" Oct 01 15:19:37 crc kubenswrapper[4869]: I1001 15:19:37.034341 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2r52b\" (UniqueName: \"kubernetes.io/projected/f9c5d763-90ad-4611-8cac-193343af1b78-kube-api-access-2r52b\") pod \"cinder-db-sync-slhz2\" (UID: \"f9c5d763-90ad-4611-8cac-193343af1b78\") " pod="openstack/cinder-db-sync-slhz2" Oct 01 15:19:37 crc kubenswrapper[4869]: I1001 15:19:37.034371 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f9c5d763-90ad-4611-8cac-193343af1b78-config-data\") pod \"cinder-db-sync-slhz2\" (UID: \"f9c5d763-90ad-4611-8cac-193343af1b78\") " pod="openstack/cinder-db-sync-slhz2" Oct 01 15:19:37 crc kubenswrapper[4869]: I1001 15:19:37.034402 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/abaaae88-543a-4735-a3e2-08978a450647-horizon-secret-key\") pod \"horizon-695b54fb65-rp2zz\" (UID: \"abaaae88-543a-4735-a3e2-08978a450647\") " pod="openstack/horizon-695b54fb65-rp2zz" Oct 01 15:19:37 crc kubenswrapper[4869]: I1001 15:19:37.034435 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f9c5d763-90ad-4611-8cac-193343af1b78-scripts\") pod \"cinder-db-sync-slhz2\" (UID: \"f9c5d763-90ad-4611-8cac-193343af1b78\") " pod="openstack/cinder-db-sync-slhz2" Oct 01 15:19:37 crc kubenswrapper[4869]: I1001 15:19:37.034475 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f9c5d763-90ad-4611-8cac-193343af1b78-combined-ca-bundle\") pod \"cinder-db-sync-slhz2\" (UID: \"f9c5d763-90ad-4611-8cac-193343af1b78\") " pod="openstack/cinder-db-sync-slhz2" Oct 01 15:19:37 crc kubenswrapper[4869]: I1001 15:19:37.034512 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/f9c5d763-90ad-4611-8cac-193343af1b78-etc-machine-id\") pod \"cinder-db-sync-slhz2\" (UID: \"f9c5d763-90ad-4611-8cac-193343af1b78\") " pod="openstack/cinder-db-sync-slhz2" Oct 01 15:19:37 crc kubenswrapper[4869]: I1001 15:19:37.034536 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/abaaae88-543a-4735-a3e2-08978a450647-scripts\") pod \"horizon-695b54fb65-rp2zz\" (UID: \"abaaae88-543a-4735-a3e2-08978a450647\") " pod="openstack/horizon-695b54fb65-rp2zz" Oct 01 15:19:37 crc kubenswrapper[4869]: I1001 15:19:37.035357 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/abaaae88-543a-4735-a3e2-08978a450647-scripts\") pod \"horizon-695b54fb65-rp2zz\" (UID: \"abaaae88-543a-4735-a3e2-08978a450647\") " pod="openstack/horizon-695b54fb65-rp2zz" Oct 01 15:19:37 crc kubenswrapper[4869]: I1001 15:19:37.036370 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-56977fbfb5-99lb5"] Oct 01 15:19:37 crc kubenswrapper[4869]: I1001 15:19:37.037870 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/abaaae88-543a-4735-a3e2-08978a450647-config-data\") pod \"horizon-695b54fb65-rp2zz\" (UID: \"abaaae88-543a-4735-a3e2-08978a450647\") " pod="openstack/horizon-695b54fb65-rp2zz" Oct 01 15:19:37 crc kubenswrapper[4869]: I1001 15:19:37.038112 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/abaaae88-543a-4735-a3e2-08978a450647-logs\") pod \"horizon-695b54fb65-rp2zz\" (UID: \"abaaae88-543a-4735-a3e2-08978a450647\") " pod="openstack/horizon-695b54fb65-rp2zz" Oct 01 15:19:37 crc kubenswrapper[4869]: I1001 15:19:37.065698 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/abaaae88-543a-4735-a3e2-08978a450647-horizon-secret-key\") pod \"horizon-695b54fb65-rp2zz\" (UID: \"abaaae88-543a-4735-a3e2-08978a450647\") " pod="openstack/horizon-695b54fb65-rp2zz" Oct 01 15:19:37 crc kubenswrapper[4869]: I1001 15:19:37.083955 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-68db7cd549-vl7f5" Oct 01 15:19:37 crc kubenswrapper[4869]: I1001 15:19:37.099653 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-fb9lk" Oct 01 15:19:37 crc kubenswrapper[4869]: I1001 15:19:37.107025 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Oct 01 15:19:37 crc kubenswrapper[4869]: I1001 15:19:37.133697 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dgr8s\" (UniqueName: \"kubernetes.io/projected/abaaae88-543a-4735-a3e2-08978a450647-kube-api-access-dgr8s\") pod \"horizon-695b54fb65-rp2zz\" (UID: \"abaaae88-543a-4735-a3e2-08978a450647\") " pod="openstack/horizon-695b54fb65-rp2zz" Oct 01 15:19:37 crc kubenswrapper[4869]: I1001 15:19:37.140944 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/f9c5d763-90ad-4611-8cac-193343af1b78-db-sync-config-data\") pod \"cinder-db-sync-slhz2\" (UID: \"f9c5d763-90ad-4611-8cac-193343af1b78\") " pod="openstack/cinder-db-sync-slhz2" Oct 01 15:19:37 crc kubenswrapper[4869]: I1001 15:19:37.140988 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/84004c40-05d7-4037-b3c3-748ad141fcf7-scripts\") pod \"horizon-56977fbfb5-99lb5\" (UID: \"84004c40-05d7-4037-b3c3-748ad141fcf7\") " pod="openstack/horizon-56977fbfb5-99lb5" Oct 01 15:19:37 crc kubenswrapper[4869]: I1001 15:19:37.141032 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zzw47\" (UniqueName: \"kubernetes.io/projected/84004c40-05d7-4037-b3c3-748ad141fcf7-kube-api-access-zzw47\") pod \"horizon-56977fbfb5-99lb5\" (UID: \"84004c40-05d7-4037-b3c3-748ad141fcf7\") " pod="openstack/horizon-56977fbfb5-99lb5" Oct 01 15:19:37 crc kubenswrapper[4869]: I1001 15:19:37.141077 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2r52b\" (UniqueName: \"kubernetes.io/projected/f9c5d763-90ad-4611-8cac-193343af1b78-kube-api-access-2r52b\") pod \"cinder-db-sync-slhz2\" (UID: \"f9c5d763-90ad-4611-8cac-193343af1b78\") " pod="openstack/cinder-db-sync-slhz2" Oct 01 15:19:37 crc kubenswrapper[4869]: I1001 15:19:37.141101 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f9c5d763-90ad-4611-8cac-193343af1b78-config-data\") pod \"cinder-db-sync-slhz2\" (UID: \"f9c5d763-90ad-4611-8cac-193343af1b78\") " pod="openstack/cinder-db-sync-slhz2" Oct 01 15:19:37 crc kubenswrapper[4869]: I1001 15:19:37.141134 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f9c5d763-90ad-4611-8cac-193343af1b78-scripts\") pod \"cinder-db-sync-slhz2\" (UID: \"f9c5d763-90ad-4611-8cac-193343af1b78\") " pod="openstack/cinder-db-sync-slhz2" Oct 01 15:19:37 crc kubenswrapper[4869]: I1001 15:19:37.141153 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/84004c40-05d7-4037-b3c3-748ad141fcf7-horizon-secret-key\") pod \"horizon-56977fbfb5-99lb5\" (UID: \"84004c40-05d7-4037-b3c3-748ad141fcf7\") " pod="openstack/horizon-56977fbfb5-99lb5" Oct 01 15:19:37 crc kubenswrapper[4869]: I1001 15:19:37.141171 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/84004c40-05d7-4037-b3c3-748ad141fcf7-config-data\") pod \"horizon-56977fbfb5-99lb5\" (UID: \"84004c40-05d7-4037-b3c3-748ad141fcf7\") " pod="openstack/horizon-56977fbfb5-99lb5" Oct 01 15:19:37 crc kubenswrapper[4869]: I1001 15:19:37.141185 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/84004c40-05d7-4037-b3c3-748ad141fcf7-logs\") pod \"horizon-56977fbfb5-99lb5\" (UID: \"84004c40-05d7-4037-b3c3-748ad141fcf7\") " pod="openstack/horizon-56977fbfb5-99lb5" Oct 01 15:19:37 crc kubenswrapper[4869]: I1001 15:19:37.141213 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f9c5d763-90ad-4611-8cac-193343af1b78-combined-ca-bundle\") pod \"cinder-db-sync-slhz2\" (UID: \"f9c5d763-90ad-4611-8cac-193343af1b78\") " pod="openstack/cinder-db-sync-slhz2" Oct 01 15:19:37 crc kubenswrapper[4869]: I1001 15:19:37.141234 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/f9c5d763-90ad-4611-8cac-193343af1b78-etc-machine-id\") pod \"cinder-db-sync-slhz2\" (UID: \"f9c5d763-90ad-4611-8cac-193343af1b78\") " pod="openstack/cinder-db-sync-slhz2" Oct 01 15:19:37 crc kubenswrapper[4869]: I1001 15:19:37.141338 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/f9c5d763-90ad-4611-8cac-193343af1b78-etc-machine-id\") pod \"cinder-db-sync-slhz2\" (UID: \"f9c5d763-90ad-4611-8cac-193343af1b78\") " pod="openstack/cinder-db-sync-slhz2" Oct 01 15:19:37 crc kubenswrapper[4869]: I1001 15:19:37.144712 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/f9c5d763-90ad-4611-8cac-193343af1b78-db-sync-config-data\") pod \"cinder-db-sync-slhz2\" (UID: \"f9c5d763-90ad-4611-8cac-193343af1b78\") " pod="openstack/cinder-db-sync-slhz2" Oct 01 15:19:37 crc kubenswrapper[4869]: I1001 15:19:37.146472 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f9c5d763-90ad-4611-8cac-193343af1b78-scripts\") pod \"cinder-db-sync-slhz2\" (UID: \"f9c5d763-90ad-4611-8cac-193343af1b78\") " pod="openstack/cinder-db-sync-slhz2" Oct 01 15:19:37 crc kubenswrapper[4869]: I1001 15:19:37.147583 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-68db7cd549-vl7f5"] Oct 01 15:19:37 crc kubenswrapper[4869]: I1001 15:19:37.147609 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Oct 01 15:19:37 crc kubenswrapper[4869]: I1001 15:19:37.147688 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 01 15:19:37 crc kubenswrapper[4869]: I1001 15:19:37.149791 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Oct 01 15:19:37 crc kubenswrapper[4869]: I1001 15:19:37.149976 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Oct 01 15:19:37 crc kubenswrapper[4869]: I1001 15:19:37.152246 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f9c5d763-90ad-4611-8cac-193343af1b78-combined-ca-bundle\") pod \"cinder-db-sync-slhz2\" (UID: \"f9c5d763-90ad-4611-8cac-193343af1b78\") " pod="openstack/cinder-db-sync-slhz2" Oct 01 15:19:37 crc kubenswrapper[4869]: I1001 15:19:37.167141 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/placement-db-sync-25k96"] Oct 01 15:19:37 crc kubenswrapper[4869]: I1001 15:19:37.168434 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f9c5d763-90ad-4611-8cac-193343af1b78-config-data\") pod \"cinder-db-sync-slhz2\" (UID: \"f9c5d763-90ad-4611-8cac-193343af1b78\") " pod="openstack/cinder-db-sync-slhz2" Oct 01 15:19:37 crc kubenswrapper[4869]: I1001 15:19:37.169445 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-sync-25k96" Oct 01 15:19:37 crc kubenswrapper[4869]: I1001 15:19:37.180727 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-placement-dockercfg-qj7j6" Oct 01 15:19:37 crc kubenswrapper[4869]: I1001 15:19:37.180924 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-scripts" Oct 01 15:19:37 crc kubenswrapper[4869]: I1001 15:19:37.181076 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-config-data" Oct 01 15:19:37 crc kubenswrapper[4869]: I1001 15:19:37.191163 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2r52b\" (UniqueName: \"kubernetes.io/projected/f9c5d763-90ad-4611-8cac-193343af1b78-kube-api-access-2r52b\") pod \"cinder-db-sync-slhz2\" (UID: \"f9c5d763-90ad-4611-8cac-193343af1b78\") " pod="openstack/cinder-db-sync-slhz2" Oct 01 15:19:37 crc kubenswrapper[4869]: I1001 15:19:37.194765 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-695b54fb65-rp2zz" Oct 01 15:19:37 crc kubenswrapper[4869]: I1001 15:19:37.206284 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-db-sync-25k96"] Oct 01 15:19:37 crc kubenswrapper[4869]: I1001 15:19:37.244095 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/84004c40-05d7-4037-b3c3-748ad141fcf7-horizon-secret-key\") pod \"horizon-56977fbfb5-99lb5\" (UID: \"84004c40-05d7-4037-b3c3-748ad141fcf7\") " pod="openstack/horizon-56977fbfb5-99lb5" Oct 01 15:19:37 crc kubenswrapper[4869]: I1001 15:19:37.244139 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cc0dd8d7-8230-4c07-87b1-6f4fc5e2e631-config-data\") pod \"ceilometer-0\" (UID: \"cc0dd8d7-8230-4c07-87b1-6f4fc5e2e631\") " pod="openstack/ceilometer-0" Oct 01 15:19:37 crc kubenswrapper[4869]: I1001 15:19:37.244156 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/cc0dd8d7-8230-4c07-87b1-6f4fc5e2e631-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"cc0dd8d7-8230-4c07-87b1-6f4fc5e2e631\") " pod="openstack/ceilometer-0" Oct 01 15:19:37 crc kubenswrapper[4869]: I1001 15:19:37.244175 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/84004c40-05d7-4037-b3c3-748ad141fcf7-logs\") pod \"horizon-56977fbfb5-99lb5\" (UID: \"84004c40-05d7-4037-b3c3-748ad141fcf7\") " pod="openstack/horizon-56977fbfb5-99lb5" Oct 01 15:19:37 crc kubenswrapper[4869]: I1001 15:19:37.244193 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/84004c40-05d7-4037-b3c3-748ad141fcf7-config-data\") pod \"horizon-56977fbfb5-99lb5\" (UID: \"84004c40-05d7-4037-b3c3-748ad141fcf7\") " pod="openstack/horizon-56977fbfb5-99lb5" Oct 01 15:19:37 crc kubenswrapper[4869]: I1001 15:19:37.244213 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/cc0dd8d7-8230-4c07-87b1-6f4fc5e2e631-run-httpd\") pod \"ceilometer-0\" (UID: \"cc0dd8d7-8230-4c07-87b1-6f4fc5e2e631\") " pod="openstack/ceilometer-0" Oct 01 15:19:37 crc kubenswrapper[4869]: I1001 15:19:37.244230 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dm4cr\" (UniqueName: \"kubernetes.io/projected/cc0dd8d7-8230-4c07-87b1-6f4fc5e2e631-kube-api-access-dm4cr\") pod \"ceilometer-0\" (UID: \"cc0dd8d7-8230-4c07-87b1-6f4fc5e2e631\") " pod="openstack/ceilometer-0" Oct 01 15:19:37 crc kubenswrapper[4869]: I1001 15:19:37.244352 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/cc0dd8d7-8230-4c07-87b1-6f4fc5e2e631-scripts\") pod \"ceilometer-0\" (UID: \"cc0dd8d7-8230-4c07-87b1-6f4fc5e2e631\") " pod="openstack/ceilometer-0" Oct 01 15:19:37 crc kubenswrapper[4869]: I1001 15:19:37.244398 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/546e76f6-f453-481b-8115-369d6ff9326c-scripts\") pod \"placement-db-sync-25k96\" (UID: \"546e76f6-f453-481b-8115-369d6ff9326c\") " pod="openstack/placement-db-sync-25k96" Oct 01 15:19:37 crc kubenswrapper[4869]: I1001 15:19:37.244414 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/546e76f6-f453-481b-8115-369d6ff9326c-config-data\") pod \"placement-db-sync-25k96\" (UID: \"546e76f6-f453-481b-8115-369d6ff9326c\") " pod="openstack/placement-db-sync-25k96" Oct 01 15:19:37 crc kubenswrapper[4869]: I1001 15:19:37.244436 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9rx94\" (UniqueName: \"kubernetes.io/projected/546e76f6-f453-481b-8115-369d6ff9326c-kube-api-access-9rx94\") pod \"placement-db-sync-25k96\" (UID: \"546e76f6-f453-481b-8115-369d6ff9326c\") " pod="openstack/placement-db-sync-25k96" Oct 01 15:19:37 crc kubenswrapper[4869]: I1001 15:19:37.244474 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/84004c40-05d7-4037-b3c3-748ad141fcf7-scripts\") pod \"horizon-56977fbfb5-99lb5\" (UID: \"84004c40-05d7-4037-b3c3-748ad141fcf7\") " pod="openstack/horizon-56977fbfb5-99lb5" Oct 01 15:19:37 crc kubenswrapper[4869]: I1001 15:19:37.244500 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/546e76f6-f453-481b-8115-369d6ff9326c-logs\") pod \"placement-db-sync-25k96\" (UID: \"546e76f6-f453-481b-8115-369d6ff9326c\") " pod="openstack/placement-db-sync-25k96" Oct 01 15:19:37 crc kubenswrapper[4869]: I1001 15:19:37.244528 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zzw47\" (UniqueName: \"kubernetes.io/projected/84004c40-05d7-4037-b3c3-748ad141fcf7-kube-api-access-zzw47\") pod \"horizon-56977fbfb5-99lb5\" (UID: \"84004c40-05d7-4037-b3c3-748ad141fcf7\") " pod="openstack/horizon-56977fbfb5-99lb5" Oct 01 15:19:37 crc kubenswrapper[4869]: I1001 15:19:37.244549 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/546e76f6-f453-481b-8115-369d6ff9326c-combined-ca-bundle\") pod \"placement-db-sync-25k96\" (UID: \"546e76f6-f453-481b-8115-369d6ff9326c\") " pod="openstack/placement-db-sync-25k96" Oct 01 15:19:37 crc kubenswrapper[4869]: I1001 15:19:37.244585 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/cc0dd8d7-8230-4c07-87b1-6f4fc5e2e631-log-httpd\") pod \"ceilometer-0\" (UID: \"cc0dd8d7-8230-4c07-87b1-6f4fc5e2e631\") " pod="openstack/ceilometer-0" Oct 01 15:19:37 crc kubenswrapper[4869]: I1001 15:19:37.244605 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cc0dd8d7-8230-4c07-87b1-6f4fc5e2e631-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"cc0dd8d7-8230-4c07-87b1-6f4fc5e2e631\") " pod="openstack/ceilometer-0" Oct 01 15:19:37 crc kubenswrapper[4869]: I1001 15:19:37.245292 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-d6669bb45-npxjb"] Oct 01 15:19:37 crc kubenswrapper[4869]: I1001 15:19:37.246476 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-d6669bb45-npxjb" Oct 01 15:19:37 crc kubenswrapper[4869]: I1001 15:19:37.247477 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/84004c40-05d7-4037-b3c3-748ad141fcf7-logs\") pod \"horizon-56977fbfb5-99lb5\" (UID: \"84004c40-05d7-4037-b3c3-748ad141fcf7\") " pod="openstack/horizon-56977fbfb5-99lb5" Oct 01 15:19:37 crc kubenswrapper[4869]: I1001 15:19:37.247767 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/84004c40-05d7-4037-b3c3-748ad141fcf7-scripts\") pod \"horizon-56977fbfb5-99lb5\" (UID: \"84004c40-05d7-4037-b3c3-748ad141fcf7\") " pod="openstack/horizon-56977fbfb5-99lb5" Oct 01 15:19:37 crc kubenswrapper[4869]: I1001 15:19:37.248391 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/84004c40-05d7-4037-b3c3-748ad141fcf7-config-data\") pod \"horizon-56977fbfb5-99lb5\" (UID: \"84004c40-05d7-4037-b3c3-748ad141fcf7\") " pod="openstack/horizon-56977fbfb5-99lb5" Oct 01 15:19:37 crc kubenswrapper[4869]: I1001 15:19:37.252799 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/84004c40-05d7-4037-b3c3-748ad141fcf7-horizon-secret-key\") pod \"horizon-56977fbfb5-99lb5\" (UID: \"84004c40-05d7-4037-b3c3-748ad141fcf7\") " pod="openstack/horizon-56977fbfb5-99lb5" Oct 01 15:19:37 crc kubenswrapper[4869]: I1001 15:19:37.267579 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zzw47\" (UniqueName: \"kubernetes.io/projected/84004c40-05d7-4037-b3c3-748ad141fcf7-kube-api-access-zzw47\") pod \"horizon-56977fbfb5-99lb5\" (UID: \"84004c40-05d7-4037-b3c3-748ad141fcf7\") " pod="openstack/horizon-56977fbfb5-99lb5" Oct 01 15:19:37 crc kubenswrapper[4869]: I1001 15:19:37.268733 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-d6669bb45-npxjb"] Oct 01 15:19:37 crc kubenswrapper[4869]: I1001 15:19:37.346233 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/546e76f6-f453-481b-8115-369d6ff9326c-logs\") pod \"placement-db-sync-25k96\" (UID: \"546e76f6-f453-481b-8115-369d6ff9326c\") " pod="openstack/placement-db-sync-25k96" Oct 01 15:19:37 crc kubenswrapper[4869]: I1001 15:19:37.346312 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/546e76f6-f453-481b-8115-369d6ff9326c-combined-ca-bundle\") pod \"placement-db-sync-25k96\" (UID: \"546e76f6-f453-481b-8115-369d6ff9326c\") " pod="openstack/placement-db-sync-25k96" Oct 01 15:19:37 crc kubenswrapper[4869]: I1001 15:19:37.346333 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/7b58b756-3a68-4820-a610-e9d23f2cc4bb-ovsdbserver-nb\") pod \"dnsmasq-dns-d6669bb45-npxjb\" (UID: \"7b58b756-3a68-4820-a610-e9d23f2cc4bb\") " pod="openstack/dnsmasq-dns-d6669bb45-npxjb" Oct 01 15:19:37 crc kubenswrapper[4869]: I1001 15:19:37.346350 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7b58b756-3a68-4820-a610-e9d23f2cc4bb-config\") pod \"dnsmasq-dns-d6669bb45-npxjb\" (UID: \"7b58b756-3a68-4820-a610-e9d23f2cc4bb\") " pod="openstack/dnsmasq-dns-d6669bb45-npxjb" Oct 01 15:19:37 crc kubenswrapper[4869]: I1001 15:19:37.346393 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/cc0dd8d7-8230-4c07-87b1-6f4fc5e2e631-log-httpd\") pod \"ceilometer-0\" (UID: \"cc0dd8d7-8230-4c07-87b1-6f4fc5e2e631\") " pod="openstack/ceilometer-0" Oct 01 15:19:37 crc kubenswrapper[4869]: I1001 15:19:37.346410 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cc0dd8d7-8230-4c07-87b1-6f4fc5e2e631-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"cc0dd8d7-8230-4c07-87b1-6f4fc5e2e631\") " pod="openstack/ceilometer-0" Oct 01 15:19:37 crc kubenswrapper[4869]: I1001 15:19:37.346449 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jqssh\" (UniqueName: \"kubernetes.io/projected/7b58b756-3a68-4820-a610-e9d23f2cc4bb-kube-api-access-jqssh\") pod \"dnsmasq-dns-d6669bb45-npxjb\" (UID: \"7b58b756-3a68-4820-a610-e9d23f2cc4bb\") " pod="openstack/dnsmasq-dns-d6669bb45-npxjb" Oct 01 15:19:37 crc kubenswrapper[4869]: I1001 15:19:37.346485 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cc0dd8d7-8230-4c07-87b1-6f4fc5e2e631-config-data\") pod \"ceilometer-0\" (UID: \"cc0dd8d7-8230-4c07-87b1-6f4fc5e2e631\") " pod="openstack/ceilometer-0" Oct 01 15:19:37 crc kubenswrapper[4869]: I1001 15:19:37.346501 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/cc0dd8d7-8230-4c07-87b1-6f4fc5e2e631-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"cc0dd8d7-8230-4c07-87b1-6f4fc5e2e631\") " pod="openstack/ceilometer-0" Oct 01 15:19:37 crc kubenswrapper[4869]: I1001 15:19:37.346531 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/cc0dd8d7-8230-4c07-87b1-6f4fc5e2e631-run-httpd\") pod \"ceilometer-0\" (UID: \"cc0dd8d7-8230-4c07-87b1-6f4fc5e2e631\") " pod="openstack/ceilometer-0" Oct 01 15:19:37 crc kubenswrapper[4869]: I1001 15:19:37.346547 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dm4cr\" (UniqueName: \"kubernetes.io/projected/cc0dd8d7-8230-4c07-87b1-6f4fc5e2e631-kube-api-access-dm4cr\") pod \"ceilometer-0\" (UID: \"cc0dd8d7-8230-4c07-87b1-6f4fc5e2e631\") " pod="openstack/ceilometer-0" Oct 01 15:19:37 crc kubenswrapper[4869]: I1001 15:19:37.346581 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/cc0dd8d7-8230-4c07-87b1-6f4fc5e2e631-scripts\") pod \"ceilometer-0\" (UID: \"cc0dd8d7-8230-4c07-87b1-6f4fc5e2e631\") " pod="openstack/ceilometer-0" Oct 01 15:19:37 crc kubenswrapper[4869]: I1001 15:19:37.346608 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/546e76f6-f453-481b-8115-369d6ff9326c-scripts\") pod \"placement-db-sync-25k96\" (UID: \"546e76f6-f453-481b-8115-369d6ff9326c\") " pod="openstack/placement-db-sync-25k96" Oct 01 15:19:37 crc kubenswrapper[4869]: I1001 15:19:37.346625 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/7b58b756-3a68-4820-a610-e9d23f2cc4bb-dns-svc\") pod \"dnsmasq-dns-d6669bb45-npxjb\" (UID: \"7b58b756-3a68-4820-a610-e9d23f2cc4bb\") " pod="openstack/dnsmasq-dns-d6669bb45-npxjb" Oct 01 15:19:37 crc kubenswrapper[4869]: I1001 15:19:37.346643 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/546e76f6-f453-481b-8115-369d6ff9326c-config-data\") pod \"placement-db-sync-25k96\" (UID: \"546e76f6-f453-481b-8115-369d6ff9326c\") " pod="openstack/placement-db-sync-25k96" Oct 01 15:19:37 crc kubenswrapper[4869]: I1001 15:19:37.346660 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9rx94\" (UniqueName: \"kubernetes.io/projected/546e76f6-f453-481b-8115-369d6ff9326c-kube-api-access-9rx94\") pod \"placement-db-sync-25k96\" (UID: \"546e76f6-f453-481b-8115-369d6ff9326c\") " pod="openstack/placement-db-sync-25k96" Oct 01 15:19:37 crc kubenswrapper[4869]: I1001 15:19:37.346718 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/7b58b756-3a68-4820-a610-e9d23f2cc4bb-ovsdbserver-sb\") pod \"dnsmasq-dns-d6669bb45-npxjb\" (UID: \"7b58b756-3a68-4820-a610-e9d23f2cc4bb\") " pod="openstack/dnsmasq-dns-d6669bb45-npxjb" Oct 01 15:19:37 crc kubenswrapper[4869]: I1001 15:19:37.346716 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/546e76f6-f453-481b-8115-369d6ff9326c-logs\") pod \"placement-db-sync-25k96\" (UID: \"546e76f6-f453-481b-8115-369d6ff9326c\") " pod="openstack/placement-db-sync-25k96" Oct 01 15:19:37 crc kubenswrapper[4869]: I1001 15:19:37.349170 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/cc0dd8d7-8230-4c07-87b1-6f4fc5e2e631-run-httpd\") pod \"ceilometer-0\" (UID: \"cc0dd8d7-8230-4c07-87b1-6f4fc5e2e631\") " pod="openstack/ceilometer-0" Oct 01 15:19:37 crc kubenswrapper[4869]: I1001 15:19:37.350157 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/cc0dd8d7-8230-4c07-87b1-6f4fc5e2e631-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"cc0dd8d7-8230-4c07-87b1-6f4fc5e2e631\") " pod="openstack/ceilometer-0" Oct 01 15:19:37 crc kubenswrapper[4869]: I1001 15:19:37.351161 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/cc0dd8d7-8230-4c07-87b1-6f4fc5e2e631-log-httpd\") pod \"ceilometer-0\" (UID: \"cc0dd8d7-8230-4c07-87b1-6f4fc5e2e631\") " pod="openstack/ceilometer-0" Oct 01 15:19:37 crc kubenswrapper[4869]: I1001 15:19:37.351780 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/546e76f6-f453-481b-8115-369d6ff9326c-combined-ca-bundle\") pod \"placement-db-sync-25k96\" (UID: \"546e76f6-f453-481b-8115-369d6ff9326c\") " pod="openstack/placement-db-sync-25k96" Oct 01 15:19:37 crc kubenswrapper[4869]: I1001 15:19:37.352316 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/546e76f6-f453-481b-8115-369d6ff9326c-config-data\") pod \"placement-db-sync-25k96\" (UID: \"546e76f6-f453-481b-8115-369d6ff9326c\") " pod="openstack/placement-db-sync-25k96" Oct 01 15:19:37 crc kubenswrapper[4869]: I1001 15:19:37.353941 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/cc0dd8d7-8230-4c07-87b1-6f4fc5e2e631-scripts\") pod \"ceilometer-0\" (UID: \"cc0dd8d7-8230-4c07-87b1-6f4fc5e2e631\") " pod="openstack/ceilometer-0" Oct 01 15:19:37 crc kubenswrapper[4869]: I1001 15:19:37.354678 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/546e76f6-f453-481b-8115-369d6ff9326c-scripts\") pod \"placement-db-sync-25k96\" (UID: \"546e76f6-f453-481b-8115-369d6ff9326c\") " pod="openstack/placement-db-sync-25k96" Oct 01 15:19:37 crc kubenswrapper[4869]: I1001 15:19:37.356798 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cc0dd8d7-8230-4c07-87b1-6f4fc5e2e631-config-data\") pod \"ceilometer-0\" (UID: \"cc0dd8d7-8230-4c07-87b1-6f4fc5e2e631\") " pod="openstack/ceilometer-0" Oct 01 15:19:37 crc kubenswrapper[4869]: I1001 15:19:37.363756 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cc0dd8d7-8230-4c07-87b1-6f4fc5e2e631-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"cc0dd8d7-8230-4c07-87b1-6f4fc5e2e631\") " pod="openstack/ceilometer-0" Oct 01 15:19:37 crc kubenswrapper[4869]: I1001 15:19:37.369016 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dm4cr\" (UniqueName: \"kubernetes.io/projected/cc0dd8d7-8230-4c07-87b1-6f4fc5e2e631-kube-api-access-dm4cr\") pod \"ceilometer-0\" (UID: \"cc0dd8d7-8230-4c07-87b1-6f4fc5e2e631\") " pod="openstack/ceilometer-0" Oct 01 15:19:37 crc kubenswrapper[4869]: I1001 15:19:37.370227 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9rx94\" (UniqueName: \"kubernetes.io/projected/546e76f6-f453-481b-8115-369d6ff9326c-kube-api-access-9rx94\") pod \"placement-db-sync-25k96\" (UID: \"546e76f6-f453-481b-8115-369d6ff9326c\") " pod="openstack/placement-db-sync-25k96" Oct 01 15:19:37 crc kubenswrapper[4869]: I1001 15:19:37.397378 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7565f89d5c-ldvdn" event={"ID":"2dc1d5e4-3aad-4bf9-b27b-e2d35547583f","Type":"ContainerStarted","Data":"15d5f0da8fcdf04b41bfffd1885bb06d97fe2114793027243d0941fbedb2ff62"} Oct 01 15:19:37 crc kubenswrapper[4869]: I1001 15:19:37.448691 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/7b58b756-3a68-4820-a610-e9d23f2cc4bb-ovsdbserver-sb\") pod \"dnsmasq-dns-d6669bb45-npxjb\" (UID: \"7b58b756-3a68-4820-a610-e9d23f2cc4bb\") " pod="openstack/dnsmasq-dns-d6669bb45-npxjb" Oct 01 15:19:37 crc kubenswrapper[4869]: I1001 15:19:37.448757 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/7b58b756-3a68-4820-a610-e9d23f2cc4bb-ovsdbserver-nb\") pod \"dnsmasq-dns-d6669bb45-npxjb\" (UID: \"7b58b756-3a68-4820-a610-e9d23f2cc4bb\") " pod="openstack/dnsmasq-dns-d6669bb45-npxjb" Oct 01 15:19:37 crc kubenswrapper[4869]: I1001 15:19:37.448775 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7b58b756-3a68-4820-a610-e9d23f2cc4bb-config\") pod \"dnsmasq-dns-d6669bb45-npxjb\" (UID: \"7b58b756-3a68-4820-a610-e9d23f2cc4bb\") " pod="openstack/dnsmasq-dns-d6669bb45-npxjb" Oct 01 15:19:37 crc kubenswrapper[4869]: I1001 15:19:37.448831 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jqssh\" (UniqueName: \"kubernetes.io/projected/7b58b756-3a68-4820-a610-e9d23f2cc4bb-kube-api-access-jqssh\") pod \"dnsmasq-dns-d6669bb45-npxjb\" (UID: \"7b58b756-3a68-4820-a610-e9d23f2cc4bb\") " pod="openstack/dnsmasq-dns-d6669bb45-npxjb" Oct 01 15:19:37 crc kubenswrapper[4869]: I1001 15:19:37.449637 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/7b58b756-3a68-4820-a610-e9d23f2cc4bb-ovsdbserver-sb\") pod \"dnsmasq-dns-d6669bb45-npxjb\" (UID: \"7b58b756-3a68-4820-a610-e9d23f2cc4bb\") " pod="openstack/dnsmasq-dns-d6669bb45-npxjb" Oct 01 15:19:37 crc kubenswrapper[4869]: I1001 15:19:37.449800 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/7b58b756-3a68-4820-a610-e9d23f2cc4bb-ovsdbserver-nb\") pod \"dnsmasq-dns-d6669bb45-npxjb\" (UID: \"7b58b756-3a68-4820-a610-e9d23f2cc4bb\") " pod="openstack/dnsmasq-dns-d6669bb45-npxjb" Oct 01 15:19:37 crc kubenswrapper[4869]: I1001 15:19:37.450190 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7b58b756-3a68-4820-a610-e9d23f2cc4bb-config\") pod \"dnsmasq-dns-d6669bb45-npxjb\" (UID: \"7b58b756-3a68-4820-a610-e9d23f2cc4bb\") " pod="openstack/dnsmasq-dns-d6669bb45-npxjb" Oct 01 15:19:37 crc kubenswrapper[4869]: I1001 15:19:37.451034 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/7b58b756-3a68-4820-a610-e9d23f2cc4bb-dns-svc\") pod \"dnsmasq-dns-d6669bb45-npxjb\" (UID: \"7b58b756-3a68-4820-a610-e9d23f2cc4bb\") " pod="openstack/dnsmasq-dns-d6669bb45-npxjb" Oct 01 15:19:37 crc kubenswrapper[4869]: I1001 15:19:37.451706 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/7b58b756-3a68-4820-a610-e9d23f2cc4bb-dns-svc\") pod \"dnsmasq-dns-d6669bb45-npxjb\" (UID: \"7b58b756-3a68-4820-a610-e9d23f2cc4bb\") " pod="openstack/dnsmasq-dns-d6669bb45-npxjb" Oct 01 15:19:37 crc kubenswrapper[4869]: I1001 15:19:37.473669 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jqssh\" (UniqueName: \"kubernetes.io/projected/7b58b756-3a68-4820-a610-e9d23f2cc4bb-kube-api-access-jqssh\") pod \"dnsmasq-dns-d6669bb45-npxjb\" (UID: \"7b58b756-3a68-4820-a610-e9d23f2cc4bb\") " pod="openstack/dnsmasq-dns-d6669bb45-npxjb" Oct 01 15:19:37 crc kubenswrapper[4869]: I1001 15:19:37.482444 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-sync-slhz2" Oct 01 15:19:37 crc kubenswrapper[4869]: I1001 15:19:37.506089 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-56977fbfb5-99lb5" Oct 01 15:19:37 crc kubenswrapper[4869]: I1001 15:19:37.509426 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 01 15:19:37 crc kubenswrapper[4869]: I1001 15:19:37.536786 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-sync-25k96" Oct 01 15:19:37 crc kubenswrapper[4869]: I1001 15:19:37.580641 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-d6669bb45-npxjb" Oct 01 15:19:37 crc kubenswrapper[4869]: I1001 15:19:37.658511 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-68db7cd549-vl7f5"] Oct 01 15:19:37 crc kubenswrapper[4869]: I1001 15:19:37.715174 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-695b54fb65-rp2zz"] Oct 01 15:19:37 crc kubenswrapper[4869]: I1001 15:19:37.760027 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-bootstrap-fb9lk"] Oct 01 15:19:38 crc kubenswrapper[4869]: I1001 15:19:38.082027 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-db-sync-slhz2"] Oct 01 15:19:38 crc kubenswrapper[4869]: W1001 15:19:38.086851 4869 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podf9c5d763_90ad_4611_8cac_193343af1b78.slice/crio-87f0430d9a77a41624a7e692546232b10783a736f8a1277ca16159fc4795f254 WatchSource:0}: Error finding container 87f0430d9a77a41624a7e692546232b10783a736f8a1277ca16159fc4795f254: Status 404 returned error can't find the container with id 87f0430d9a77a41624a7e692546232b10783a736f8a1277ca16159fc4795f254 Oct 01 15:19:38 crc kubenswrapper[4869]: I1001 15:19:38.102782 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-56977fbfb5-99lb5"] Oct 01 15:19:38 crc kubenswrapper[4869]: W1001 15:19:38.106796 4869 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod84004c40_05d7_4037_b3c3_748ad141fcf7.slice/crio-8406b2566c3b001aff4f6df899aedc6ce4327835d618d4200f75c50d6cc56166 WatchSource:0}: Error finding container 8406b2566c3b001aff4f6df899aedc6ce4327835d618d4200f75c50d6cc56166: Status 404 returned error can't find the container with id 8406b2566c3b001aff4f6df899aedc6ce4327835d618d4200f75c50d6cc56166 Oct 01 15:19:38 crc kubenswrapper[4869]: I1001 15:19:38.198380 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Oct 01 15:19:38 crc kubenswrapper[4869]: I1001 15:19:38.406761 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-695b54fb65-rp2zz" event={"ID":"abaaae88-543a-4735-a3e2-08978a450647","Type":"ContainerStarted","Data":"eb3783afbda1d725ad8856081e5f2a6d95536f52c4c5d12d96f5146b7f8f78c4"} Oct 01 15:19:38 crc kubenswrapper[4869]: I1001 15:19:38.408134 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-56977fbfb5-99lb5" event={"ID":"84004c40-05d7-4037-b3c3-748ad141fcf7","Type":"ContainerStarted","Data":"8406b2566c3b001aff4f6df899aedc6ce4327835d618d4200f75c50d6cc56166"} Oct 01 15:19:38 crc kubenswrapper[4869]: I1001 15:19:38.409807 4869 generic.go:334] "Generic (PLEG): container finished" podID="2dc1d5e4-3aad-4bf9-b27b-e2d35547583f" containerID="765816b650891c3a97949970fad1a43018cb7cd54bacda3d7e5081d6c92bc37c" exitCode=0 Oct 01 15:19:38 crc kubenswrapper[4869]: I1001 15:19:38.409873 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7565f89d5c-ldvdn" event={"ID":"2dc1d5e4-3aad-4bf9-b27b-e2d35547583f","Type":"ContainerDied","Data":"765816b650891c3a97949970fad1a43018cb7cd54bacda3d7e5081d6c92bc37c"} Oct 01 15:19:38 crc kubenswrapper[4869]: I1001 15:19:38.411545 4869 generic.go:334] "Generic (PLEG): container finished" podID="0113a14b-fb25-42f3-b8e7-ef15a3041a31" containerID="e1fefc118331d862bc6d511a0ff75af2b31453d2e6a48b409562ec2d9f6e9338" exitCode=0 Oct 01 15:19:38 crc kubenswrapper[4869]: I1001 15:19:38.411619 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-68db7cd549-vl7f5" event={"ID":"0113a14b-fb25-42f3-b8e7-ef15a3041a31","Type":"ContainerDied","Data":"e1fefc118331d862bc6d511a0ff75af2b31453d2e6a48b409562ec2d9f6e9338"} Oct 01 15:19:38 crc kubenswrapper[4869]: I1001 15:19:38.411645 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-68db7cd549-vl7f5" event={"ID":"0113a14b-fb25-42f3-b8e7-ef15a3041a31","Type":"ContainerStarted","Data":"5561937f3f06944bf10d76dbd9f692fa1a9f1ff1d0d24e2635c16896226e9824"} Oct 01 15:19:38 crc kubenswrapper[4869]: I1001 15:19:38.414799 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"cc0dd8d7-8230-4c07-87b1-6f4fc5e2e631","Type":"ContainerStarted","Data":"ca1d0cde3b352d9ca482a458374343992e8e99fc078b7c27009c3f2d7ede5292"} Oct 01 15:19:38 crc kubenswrapper[4869]: I1001 15:19:38.416117 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-fb9lk" event={"ID":"e6e277cf-6fbb-46e6-96c2-004009b93faa","Type":"ContainerStarted","Data":"1f949349f800051a952b2fe8bbf88dacd5fa2e71449eab4d35a9a964ac60e4c6"} Oct 01 15:19:38 crc kubenswrapper[4869]: I1001 15:19:38.416144 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-fb9lk" event={"ID":"e6e277cf-6fbb-46e6-96c2-004009b93faa","Type":"ContainerStarted","Data":"e842449260587100a1eeded7a1f24f60571f09ed20946e70e30c7b23099d619e"} Oct 01 15:19:38 crc kubenswrapper[4869]: I1001 15:19:38.416917 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-sync-slhz2" event={"ID":"f9c5d763-90ad-4611-8cac-193343af1b78","Type":"ContainerStarted","Data":"87f0430d9a77a41624a7e692546232b10783a736f8a1277ca16159fc4795f254"} Oct 01 15:19:38 crc kubenswrapper[4869]: I1001 15:19:38.497303 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-bootstrap-fb9lk" podStartSLOduration=2.497242425 podStartE2EDuration="2.497242425s" podCreationTimestamp="2025-10-01 15:19:36 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-01 15:19:38.471841503 +0000 UTC m=+887.618684639" watchObservedRunningTime="2025-10-01 15:19:38.497242425 +0000 UTC m=+887.644085561" Oct 01 15:19:38 crc kubenswrapper[4869]: I1001 15:19:38.953443 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-6b4b-account-create-8zk2q"] Oct 01 15:19:38 crc kubenswrapper[4869]: I1001 15:19:38.954832 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-6b4b-account-create-8zk2q" Oct 01 15:19:38 crc kubenswrapper[4869]: I1001 15:19:38.957020 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-db-secret" Oct 01 15:19:38 crc kubenswrapper[4869]: I1001 15:19:38.971799 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-db-sync-25k96"] Oct 01 15:19:38 crc kubenswrapper[4869]: I1001 15:19:38.976237 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-6b4b-account-create-8zk2q"] Oct 01 15:19:38 crc kubenswrapper[4869]: I1001 15:19:38.982729 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-d6669bb45-npxjb"] Oct 01 15:19:38 crc kubenswrapper[4869]: W1001 15:19:38.985818 4869 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod7b58b756_3a68_4820_a610_e9d23f2cc4bb.slice/crio-407ae8ccc0336c490520cf6284719406a77cfd102545843daa9f5053793352e6 WatchSource:0}: Error finding container 407ae8ccc0336c490520cf6284719406a77cfd102545843daa9f5053793352e6: Status 404 returned error can't find the container with id 407ae8ccc0336c490520cf6284719406a77cfd102545843daa9f5053793352e6 Oct 01 15:19:39 crc kubenswrapper[4869]: I1001 15:19:39.081679 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-q999b\" (UniqueName: \"kubernetes.io/projected/40fc21e1-9cc1-42cb-84ae-a5edde36cefd-kube-api-access-q999b\") pod \"barbican-6b4b-account-create-8zk2q\" (UID: \"40fc21e1-9cc1-42cb-84ae-a5edde36cefd\") " pod="openstack/barbican-6b4b-account-create-8zk2q" Oct 01 15:19:39 crc kubenswrapper[4869]: I1001 15:19:39.102813 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-68db7cd549-vl7f5" Oct 01 15:19:39 crc kubenswrapper[4869]: I1001 15:19:39.137861 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7565f89d5c-ldvdn" Oct 01 15:19:39 crc kubenswrapper[4869]: I1001 15:19:39.146236 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-3b43-account-create-fgnrl"] Oct 01 15:19:39 crc kubenswrapper[4869]: E1001 15:19:39.146624 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2dc1d5e4-3aad-4bf9-b27b-e2d35547583f" containerName="init" Oct 01 15:19:39 crc kubenswrapper[4869]: I1001 15:19:39.146635 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="2dc1d5e4-3aad-4bf9-b27b-e2d35547583f" containerName="init" Oct 01 15:19:39 crc kubenswrapper[4869]: E1001 15:19:39.146662 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0113a14b-fb25-42f3-b8e7-ef15a3041a31" containerName="init" Oct 01 15:19:39 crc kubenswrapper[4869]: I1001 15:19:39.146669 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="0113a14b-fb25-42f3-b8e7-ef15a3041a31" containerName="init" Oct 01 15:19:39 crc kubenswrapper[4869]: I1001 15:19:39.146827 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="0113a14b-fb25-42f3-b8e7-ef15a3041a31" containerName="init" Oct 01 15:19:39 crc kubenswrapper[4869]: I1001 15:19:39.146844 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="2dc1d5e4-3aad-4bf9-b27b-e2d35547583f" containerName="init" Oct 01 15:19:39 crc kubenswrapper[4869]: I1001 15:19:39.147416 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-3b43-account-create-fgnrl" Oct 01 15:19:39 crc kubenswrapper[4869]: I1001 15:19:39.149664 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-db-secret" Oct 01 15:19:39 crc kubenswrapper[4869]: I1001 15:19:39.160444 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-3b43-account-create-fgnrl"] Oct 01 15:19:39 crc kubenswrapper[4869]: I1001 15:19:39.182887 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/0113a14b-fb25-42f3-b8e7-ef15a3041a31-dns-svc\") pod \"0113a14b-fb25-42f3-b8e7-ef15a3041a31\" (UID: \"0113a14b-fb25-42f3-b8e7-ef15a3041a31\") " Oct 01 15:19:39 crc kubenswrapper[4869]: I1001 15:19:39.183037 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-slwmz\" (UniqueName: \"kubernetes.io/projected/0113a14b-fb25-42f3-b8e7-ef15a3041a31-kube-api-access-slwmz\") pod \"0113a14b-fb25-42f3-b8e7-ef15a3041a31\" (UID: \"0113a14b-fb25-42f3-b8e7-ef15a3041a31\") " Oct 01 15:19:39 crc kubenswrapper[4869]: I1001 15:19:39.183121 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0113a14b-fb25-42f3-b8e7-ef15a3041a31-config\") pod \"0113a14b-fb25-42f3-b8e7-ef15a3041a31\" (UID: \"0113a14b-fb25-42f3-b8e7-ef15a3041a31\") " Oct 01 15:19:39 crc kubenswrapper[4869]: I1001 15:19:39.183180 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/0113a14b-fb25-42f3-b8e7-ef15a3041a31-ovsdbserver-nb\") pod \"0113a14b-fb25-42f3-b8e7-ef15a3041a31\" (UID: \"0113a14b-fb25-42f3-b8e7-ef15a3041a31\") " Oct 01 15:19:39 crc kubenswrapper[4869]: I1001 15:19:39.183230 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/0113a14b-fb25-42f3-b8e7-ef15a3041a31-ovsdbserver-sb\") pod \"0113a14b-fb25-42f3-b8e7-ef15a3041a31\" (UID: \"0113a14b-fb25-42f3-b8e7-ef15a3041a31\") " Oct 01 15:19:39 crc kubenswrapper[4869]: I1001 15:19:39.183524 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-q999b\" (UniqueName: \"kubernetes.io/projected/40fc21e1-9cc1-42cb-84ae-a5edde36cefd-kube-api-access-q999b\") pod \"barbican-6b4b-account-create-8zk2q\" (UID: \"40fc21e1-9cc1-42cb-84ae-a5edde36cefd\") " pod="openstack/barbican-6b4b-account-create-8zk2q" Oct 01 15:19:39 crc kubenswrapper[4869]: I1001 15:19:39.190272 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0113a14b-fb25-42f3-b8e7-ef15a3041a31-kube-api-access-slwmz" (OuterVolumeSpecName: "kube-api-access-slwmz") pod "0113a14b-fb25-42f3-b8e7-ef15a3041a31" (UID: "0113a14b-fb25-42f3-b8e7-ef15a3041a31"). InnerVolumeSpecName "kube-api-access-slwmz". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 15:19:39 crc kubenswrapper[4869]: I1001 15:19:39.210695 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-q999b\" (UniqueName: \"kubernetes.io/projected/40fc21e1-9cc1-42cb-84ae-a5edde36cefd-kube-api-access-q999b\") pod \"barbican-6b4b-account-create-8zk2q\" (UID: \"40fc21e1-9cc1-42cb-84ae-a5edde36cefd\") " pod="openstack/barbican-6b4b-account-create-8zk2q" Oct 01 15:19:39 crc kubenswrapper[4869]: I1001 15:19:39.256463 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0113a14b-fb25-42f3-b8e7-ef15a3041a31-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "0113a14b-fb25-42f3-b8e7-ef15a3041a31" (UID: "0113a14b-fb25-42f3-b8e7-ef15a3041a31"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 15:19:39 crc kubenswrapper[4869]: I1001 15:19:39.264313 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0113a14b-fb25-42f3-b8e7-ef15a3041a31-config" (OuterVolumeSpecName: "config") pod "0113a14b-fb25-42f3-b8e7-ef15a3041a31" (UID: "0113a14b-fb25-42f3-b8e7-ef15a3041a31"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 15:19:39 crc kubenswrapper[4869]: I1001 15:19:39.270597 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0113a14b-fb25-42f3-b8e7-ef15a3041a31-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "0113a14b-fb25-42f3-b8e7-ef15a3041a31" (UID: "0113a14b-fb25-42f3-b8e7-ef15a3041a31"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 15:19:39 crc kubenswrapper[4869]: I1001 15:19:39.273977 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0113a14b-fb25-42f3-b8e7-ef15a3041a31-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "0113a14b-fb25-42f3-b8e7-ef15a3041a31" (UID: "0113a14b-fb25-42f3-b8e7-ef15a3041a31"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 15:19:39 crc kubenswrapper[4869]: I1001 15:19:39.285046 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/2dc1d5e4-3aad-4bf9-b27b-e2d35547583f-ovsdbserver-nb\") pod \"2dc1d5e4-3aad-4bf9-b27b-e2d35547583f\" (UID: \"2dc1d5e4-3aad-4bf9-b27b-e2d35547583f\") " Oct 01 15:19:39 crc kubenswrapper[4869]: I1001 15:19:39.285134 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-s79rb\" (UniqueName: \"kubernetes.io/projected/2dc1d5e4-3aad-4bf9-b27b-e2d35547583f-kube-api-access-s79rb\") pod \"2dc1d5e4-3aad-4bf9-b27b-e2d35547583f\" (UID: \"2dc1d5e4-3aad-4bf9-b27b-e2d35547583f\") " Oct 01 15:19:39 crc kubenswrapper[4869]: I1001 15:19:39.286170 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/2dc1d5e4-3aad-4bf9-b27b-e2d35547583f-dns-svc\") pod \"2dc1d5e4-3aad-4bf9-b27b-e2d35547583f\" (UID: \"2dc1d5e4-3aad-4bf9-b27b-e2d35547583f\") " Oct 01 15:19:39 crc kubenswrapper[4869]: I1001 15:19:39.286245 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2dc1d5e4-3aad-4bf9-b27b-e2d35547583f-config\") pod \"2dc1d5e4-3aad-4bf9-b27b-e2d35547583f\" (UID: \"2dc1d5e4-3aad-4bf9-b27b-e2d35547583f\") " Oct 01 15:19:39 crc kubenswrapper[4869]: I1001 15:19:39.286465 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/2dc1d5e4-3aad-4bf9-b27b-e2d35547583f-ovsdbserver-sb\") pod \"2dc1d5e4-3aad-4bf9-b27b-e2d35547583f\" (UID: \"2dc1d5e4-3aad-4bf9-b27b-e2d35547583f\") " Oct 01 15:19:39 crc kubenswrapper[4869]: I1001 15:19:39.287532 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-k4nzc\" (UniqueName: \"kubernetes.io/projected/b685c639-4b3f-4597-8b0d-9f03283f18ed-kube-api-access-k4nzc\") pod \"neutron-3b43-account-create-fgnrl\" (UID: \"b685c639-4b3f-4597-8b0d-9f03283f18ed\") " pod="openstack/neutron-3b43-account-create-fgnrl" Oct 01 15:19:39 crc kubenswrapper[4869]: I1001 15:19:39.287623 4869 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/0113a14b-fb25-42f3-b8e7-ef15a3041a31-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Oct 01 15:19:39 crc kubenswrapper[4869]: I1001 15:19:39.287680 4869 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/0113a14b-fb25-42f3-b8e7-ef15a3041a31-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Oct 01 15:19:39 crc kubenswrapper[4869]: I1001 15:19:39.287704 4869 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/0113a14b-fb25-42f3-b8e7-ef15a3041a31-dns-svc\") on node \"crc\" DevicePath \"\"" Oct 01 15:19:39 crc kubenswrapper[4869]: I1001 15:19:39.287750 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-slwmz\" (UniqueName: \"kubernetes.io/projected/0113a14b-fb25-42f3-b8e7-ef15a3041a31-kube-api-access-slwmz\") on node \"crc\" DevicePath \"\"" Oct 01 15:19:39 crc kubenswrapper[4869]: I1001 15:19:39.289088 4869 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0113a14b-fb25-42f3-b8e7-ef15a3041a31-config\") on node \"crc\" DevicePath \"\"" Oct 01 15:19:39 crc kubenswrapper[4869]: I1001 15:19:39.291649 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2dc1d5e4-3aad-4bf9-b27b-e2d35547583f-kube-api-access-s79rb" (OuterVolumeSpecName: "kube-api-access-s79rb") pod "2dc1d5e4-3aad-4bf9-b27b-e2d35547583f" (UID: "2dc1d5e4-3aad-4bf9-b27b-e2d35547583f"). InnerVolumeSpecName "kube-api-access-s79rb". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 15:19:39 crc kubenswrapper[4869]: I1001 15:19:39.305989 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2dc1d5e4-3aad-4bf9-b27b-e2d35547583f-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "2dc1d5e4-3aad-4bf9-b27b-e2d35547583f" (UID: "2dc1d5e4-3aad-4bf9-b27b-e2d35547583f"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 15:19:39 crc kubenswrapper[4869]: I1001 15:19:39.310699 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2dc1d5e4-3aad-4bf9-b27b-e2d35547583f-config" (OuterVolumeSpecName: "config") pod "2dc1d5e4-3aad-4bf9-b27b-e2d35547583f" (UID: "2dc1d5e4-3aad-4bf9-b27b-e2d35547583f"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 15:19:39 crc kubenswrapper[4869]: I1001 15:19:39.322647 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2dc1d5e4-3aad-4bf9-b27b-e2d35547583f-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "2dc1d5e4-3aad-4bf9-b27b-e2d35547583f" (UID: "2dc1d5e4-3aad-4bf9-b27b-e2d35547583f"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 15:19:39 crc kubenswrapper[4869]: I1001 15:19:39.326037 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2dc1d5e4-3aad-4bf9-b27b-e2d35547583f-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "2dc1d5e4-3aad-4bf9-b27b-e2d35547583f" (UID: "2dc1d5e4-3aad-4bf9-b27b-e2d35547583f"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 15:19:39 crc kubenswrapper[4869]: I1001 15:19:39.393944 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-k4nzc\" (UniqueName: \"kubernetes.io/projected/b685c639-4b3f-4597-8b0d-9f03283f18ed-kube-api-access-k4nzc\") pod \"neutron-3b43-account-create-fgnrl\" (UID: \"b685c639-4b3f-4597-8b0d-9f03283f18ed\") " pod="openstack/neutron-3b43-account-create-fgnrl" Oct 01 15:19:39 crc kubenswrapper[4869]: I1001 15:19:39.394723 4869 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2dc1d5e4-3aad-4bf9-b27b-e2d35547583f-config\") on node \"crc\" DevicePath \"\"" Oct 01 15:19:39 crc kubenswrapper[4869]: I1001 15:19:39.394747 4869 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/2dc1d5e4-3aad-4bf9-b27b-e2d35547583f-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Oct 01 15:19:39 crc kubenswrapper[4869]: I1001 15:19:39.394760 4869 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/2dc1d5e4-3aad-4bf9-b27b-e2d35547583f-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Oct 01 15:19:39 crc kubenswrapper[4869]: I1001 15:19:39.394772 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-s79rb\" (UniqueName: \"kubernetes.io/projected/2dc1d5e4-3aad-4bf9-b27b-e2d35547583f-kube-api-access-s79rb\") on node \"crc\" DevicePath \"\"" Oct 01 15:19:39 crc kubenswrapper[4869]: I1001 15:19:39.394785 4869 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/2dc1d5e4-3aad-4bf9-b27b-e2d35547583f-dns-svc\") on node \"crc\" DevicePath \"\"" Oct 01 15:19:39 crc kubenswrapper[4869]: I1001 15:19:39.402209 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-6b4b-account-create-8zk2q" Oct 01 15:19:39 crc kubenswrapper[4869]: I1001 15:19:39.416618 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-k4nzc\" (UniqueName: \"kubernetes.io/projected/b685c639-4b3f-4597-8b0d-9f03283f18ed-kube-api-access-k4nzc\") pod \"neutron-3b43-account-create-fgnrl\" (UID: \"b685c639-4b3f-4597-8b0d-9f03283f18ed\") " pod="openstack/neutron-3b43-account-create-fgnrl" Oct 01 15:19:39 crc kubenswrapper[4869]: I1001 15:19:39.431577 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-68db7cd549-vl7f5" Oct 01 15:19:39 crc kubenswrapper[4869]: I1001 15:19:39.431707 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-68db7cd549-vl7f5" event={"ID":"0113a14b-fb25-42f3-b8e7-ef15a3041a31","Type":"ContainerDied","Data":"5561937f3f06944bf10d76dbd9f692fa1a9f1ff1d0d24e2635c16896226e9824"} Oct 01 15:19:39 crc kubenswrapper[4869]: I1001 15:19:39.431768 4869 scope.go:117] "RemoveContainer" containerID="e1fefc118331d862bc6d511a0ff75af2b31453d2e6a48b409562ec2d9f6e9338" Oct 01 15:19:39 crc kubenswrapper[4869]: I1001 15:19:39.434280 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-sync-25k96" event={"ID":"546e76f6-f453-481b-8115-369d6ff9326c","Type":"ContainerStarted","Data":"6f72994f966da5a03597a2acba01ed115457273eb1801981333b602664360083"} Oct 01 15:19:39 crc kubenswrapper[4869]: I1001 15:19:39.438149 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7565f89d5c-ldvdn" event={"ID":"2dc1d5e4-3aad-4bf9-b27b-e2d35547583f","Type":"ContainerDied","Data":"15d5f0da8fcdf04b41bfffd1885bb06d97fe2114793027243d0941fbedb2ff62"} Oct 01 15:19:39 crc kubenswrapper[4869]: I1001 15:19:39.438175 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7565f89d5c-ldvdn" Oct 01 15:19:39 crc kubenswrapper[4869]: I1001 15:19:39.441550 4869 generic.go:334] "Generic (PLEG): container finished" podID="7b58b756-3a68-4820-a610-e9d23f2cc4bb" containerID="76edf854ebd4d43f579051422e21f9dcac3447506c81fc9dcd283ff343cf78a1" exitCode=0 Oct 01 15:19:39 crc kubenswrapper[4869]: I1001 15:19:39.441624 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-d6669bb45-npxjb" event={"ID":"7b58b756-3a68-4820-a610-e9d23f2cc4bb","Type":"ContainerDied","Data":"76edf854ebd4d43f579051422e21f9dcac3447506c81fc9dcd283ff343cf78a1"} Oct 01 15:19:39 crc kubenswrapper[4869]: I1001 15:19:39.441671 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-d6669bb45-npxjb" event={"ID":"7b58b756-3a68-4820-a610-e9d23f2cc4bb","Type":"ContainerStarted","Data":"407ae8ccc0336c490520cf6284719406a77cfd102545843daa9f5053793352e6"} Oct 01 15:19:39 crc kubenswrapper[4869]: I1001 15:19:39.471775 4869 scope.go:117] "RemoveContainer" containerID="765816b650891c3a97949970fad1a43018cb7cd54bacda3d7e5081d6c92bc37c" Oct 01 15:19:39 crc kubenswrapper[4869]: I1001 15:19:39.482658 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-3b43-account-create-fgnrl" Oct 01 15:19:39 crc kubenswrapper[4869]: I1001 15:19:39.514864 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-68db7cd549-vl7f5"] Oct 01 15:19:39 crc kubenswrapper[4869]: I1001 15:19:39.521081 4869 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-68db7cd549-vl7f5"] Oct 01 15:19:39 crc kubenswrapper[4869]: I1001 15:19:39.543877 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-7565f89d5c-ldvdn"] Oct 01 15:19:39 crc kubenswrapper[4869]: I1001 15:19:39.549935 4869 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-7565f89d5c-ldvdn"] Oct 01 15:19:39 crc kubenswrapper[4869]: I1001 15:19:39.635585 4869 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0113a14b-fb25-42f3-b8e7-ef15a3041a31" path="/var/lib/kubelet/pods/0113a14b-fb25-42f3-b8e7-ef15a3041a31/volumes" Oct 01 15:19:39 crc kubenswrapper[4869]: I1001 15:19:39.637206 4869 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2dc1d5e4-3aad-4bf9-b27b-e2d35547583f" path="/var/lib/kubelet/pods/2dc1d5e4-3aad-4bf9-b27b-e2d35547583f/volumes" Oct 01 15:19:39 crc kubenswrapper[4869]: I1001 15:19:39.997340 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-6b4b-account-create-8zk2q"] Oct 01 15:19:40 crc kubenswrapper[4869]: I1001 15:19:40.104151 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-3b43-account-create-fgnrl"] Oct 01 15:19:40 crc kubenswrapper[4869]: I1001 15:19:40.456929 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-3b43-account-create-fgnrl" event={"ID":"b685c639-4b3f-4597-8b0d-9f03283f18ed","Type":"ContainerStarted","Data":"f016e01d825d31393c24de728542ad4d00ece51da7b3df220722829a51122719"} Oct 01 15:19:40 crc kubenswrapper[4869]: I1001 15:19:40.456984 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-3b43-account-create-fgnrl" event={"ID":"b685c639-4b3f-4597-8b0d-9f03283f18ed","Type":"ContainerStarted","Data":"1183957ca0c521e2928a6e04f618bf68813ad753f15766c04df6a6006d5fc112"} Oct 01 15:19:40 crc kubenswrapper[4869]: I1001 15:19:40.461312 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-d6669bb45-npxjb" event={"ID":"7b58b756-3a68-4820-a610-e9d23f2cc4bb","Type":"ContainerStarted","Data":"4233b1734d1bb72ea33d30f635bd91befb5597fa01a2bd88f116bd250f8d1831"} Oct 01 15:19:40 crc kubenswrapper[4869]: I1001 15:19:40.461393 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-d6669bb45-npxjb" Oct 01 15:19:40 crc kubenswrapper[4869]: I1001 15:19:40.467458 4869 generic.go:334] "Generic (PLEG): container finished" podID="40fc21e1-9cc1-42cb-84ae-a5edde36cefd" containerID="3eeff271b18b415270d44d04b22de017726b98da37306fe19d1395319ff66461" exitCode=0 Oct 01 15:19:40 crc kubenswrapper[4869]: I1001 15:19:40.467570 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-6b4b-account-create-8zk2q" event={"ID":"40fc21e1-9cc1-42cb-84ae-a5edde36cefd","Type":"ContainerDied","Data":"3eeff271b18b415270d44d04b22de017726b98da37306fe19d1395319ff66461"} Oct 01 15:19:40 crc kubenswrapper[4869]: I1001 15:19:40.467594 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-6b4b-account-create-8zk2q" event={"ID":"40fc21e1-9cc1-42cb-84ae-a5edde36cefd","Type":"ContainerStarted","Data":"9e9686e1f6fbc80090960ca859a789a0cf99a7c9a15830afed4616856ab5a4fa"} Oct 01 15:19:40 crc kubenswrapper[4869]: I1001 15:19:40.518419 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-d6669bb45-npxjb" podStartSLOduration=3.518396483 podStartE2EDuration="3.518396483s" podCreationTimestamp="2025-10-01 15:19:37 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-01 15:19:40.495588977 +0000 UTC m=+889.642432103" watchObservedRunningTime="2025-10-01 15:19:40.518396483 +0000 UTC m=+889.665239609" Oct 01 15:19:41 crc kubenswrapper[4869]: I1001 15:19:41.479202 4869 generic.go:334] "Generic (PLEG): container finished" podID="b685c639-4b3f-4597-8b0d-9f03283f18ed" containerID="f016e01d825d31393c24de728542ad4d00ece51da7b3df220722829a51122719" exitCode=0 Oct 01 15:19:41 crc kubenswrapper[4869]: I1001 15:19:41.479597 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-3b43-account-create-fgnrl" event={"ID":"b685c639-4b3f-4597-8b0d-9f03283f18ed","Type":"ContainerDied","Data":"f016e01d825d31393c24de728542ad4d00ece51da7b3df220722829a51122719"} Oct 01 15:19:42 crc kubenswrapper[4869]: I1001 15:19:42.240779 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-56977fbfb5-99lb5"] Oct 01 15:19:42 crc kubenswrapper[4869]: I1001 15:19:42.246510 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Oct 01 15:19:42 crc kubenswrapper[4869]: I1001 15:19:42.279718 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/horizon-85845f7997-n9h7g"] Oct 01 15:19:42 crc kubenswrapper[4869]: I1001 15:19:42.280975 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-85845f7997-n9h7g" Oct 01 15:19:42 crc kubenswrapper[4869]: I1001 15:19:42.295927 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-85845f7997-n9h7g"] Oct 01 15:19:42 crc kubenswrapper[4869]: I1001 15:19:42.377405 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b141c51a-44cd-4c2c-be11-6c8b5576a289-logs\") pod \"horizon-85845f7997-n9h7g\" (UID: \"b141c51a-44cd-4c2c-be11-6c8b5576a289\") " pod="openstack/horizon-85845f7997-n9h7g" Oct 01 15:19:42 crc kubenswrapper[4869]: I1001 15:19:42.377546 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wnt2w\" (UniqueName: \"kubernetes.io/projected/b141c51a-44cd-4c2c-be11-6c8b5576a289-kube-api-access-wnt2w\") pod \"horizon-85845f7997-n9h7g\" (UID: \"b141c51a-44cd-4c2c-be11-6c8b5576a289\") " pod="openstack/horizon-85845f7997-n9h7g" Oct 01 15:19:42 crc kubenswrapper[4869]: I1001 15:19:42.377600 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/b141c51a-44cd-4c2c-be11-6c8b5576a289-config-data\") pod \"horizon-85845f7997-n9h7g\" (UID: \"b141c51a-44cd-4c2c-be11-6c8b5576a289\") " pod="openstack/horizon-85845f7997-n9h7g" Oct 01 15:19:42 crc kubenswrapper[4869]: I1001 15:19:42.377783 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/b141c51a-44cd-4c2c-be11-6c8b5576a289-horizon-secret-key\") pod \"horizon-85845f7997-n9h7g\" (UID: \"b141c51a-44cd-4c2c-be11-6c8b5576a289\") " pod="openstack/horizon-85845f7997-n9h7g" Oct 01 15:19:42 crc kubenswrapper[4869]: I1001 15:19:42.377835 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/b141c51a-44cd-4c2c-be11-6c8b5576a289-scripts\") pod \"horizon-85845f7997-n9h7g\" (UID: \"b141c51a-44cd-4c2c-be11-6c8b5576a289\") " pod="openstack/horizon-85845f7997-n9h7g" Oct 01 15:19:42 crc kubenswrapper[4869]: I1001 15:19:42.479465 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b141c51a-44cd-4c2c-be11-6c8b5576a289-logs\") pod \"horizon-85845f7997-n9h7g\" (UID: \"b141c51a-44cd-4c2c-be11-6c8b5576a289\") " pod="openstack/horizon-85845f7997-n9h7g" Oct 01 15:19:42 crc kubenswrapper[4869]: I1001 15:19:42.479523 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wnt2w\" (UniqueName: \"kubernetes.io/projected/b141c51a-44cd-4c2c-be11-6c8b5576a289-kube-api-access-wnt2w\") pod \"horizon-85845f7997-n9h7g\" (UID: \"b141c51a-44cd-4c2c-be11-6c8b5576a289\") " pod="openstack/horizon-85845f7997-n9h7g" Oct 01 15:19:42 crc kubenswrapper[4869]: I1001 15:19:42.479552 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/b141c51a-44cd-4c2c-be11-6c8b5576a289-config-data\") pod \"horizon-85845f7997-n9h7g\" (UID: \"b141c51a-44cd-4c2c-be11-6c8b5576a289\") " pod="openstack/horizon-85845f7997-n9h7g" Oct 01 15:19:42 crc kubenswrapper[4869]: I1001 15:19:42.479612 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/b141c51a-44cd-4c2c-be11-6c8b5576a289-horizon-secret-key\") pod \"horizon-85845f7997-n9h7g\" (UID: \"b141c51a-44cd-4c2c-be11-6c8b5576a289\") " pod="openstack/horizon-85845f7997-n9h7g" Oct 01 15:19:42 crc kubenswrapper[4869]: I1001 15:19:42.479636 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/b141c51a-44cd-4c2c-be11-6c8b5576a289-scripts\") pod \"horizon-85845f7997-n9h7g\" (UID: \"b141c51a-44cd-4c2c-be11-6c8b5576a289\") " pod="openstack/horizon-85845f7997-n9h7g" Oct 01 15:19:42 crc kubenswrapper[4869]: I1001 15:19:42.480228 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b141c51a-44cd-4c2c-be11-6c8b5576a289-logs\") pod \"horizon-85845f7997-n9h7g\" (UID: \"b141c51a-44cd-4c2c-be11-6c8b5576a289\") " pod="openstack/horizon-85845f7997-n9h7g" Oct 01 15:19:42 crc kubenswrapper[4869]: I1001 15:19:42.480469 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/b141c51a-44cd-4c2c-be11-6c8b5576a289-scripts\") pod \"horizon-85845f7997-n9h7g\" (UID: \"b141c51a-44cd-4c2c-be11-6c8b5576a289\") " pod="openstack/horizon-85845f7997-n9h7g" Oct 01 15:19:42 crc kubenswrapper[4869]: I1001 15:19:42.480876 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/b141c51a-44cd-4c2c-be11-6c8b5576a289-config-data\") pod \"horizon-85845f7997-n9h7g\" (UID: \"b141c51a-44cd-4c2c-be11-6c8b5576a289\") " pod="openstack/horizon-85845f7997-n9h7g" Oct 01 15:19:42 crc kubenswrapper[4869]: I1001 15:19:42.486033 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/b141c51a-44cd-4c2c-be11-6c8b5576a289-horizon-secret-key\") pod \"horizon-85845f7997-n9h7g\" (UID: \"b141c51a-44cd-4c2c-be11-6c8b5576a289\") " pod="openstack/horizon-85845f7997-n9h7g" Oct 01 15:19:42 crc kubenswrapper[4869]: I1001 15:19:42.504425 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wnt2w\" (UniqueName: \"kubernetes.io/projected/b141c51a-44cd-4c2c-be11-6c8b5576a289-kube-api-access-wnt2w\") pod \"horizon-85845f7997-n9h7g\" (UID: \"b141c51a-44cd-4c2c-be11-6c8b5576a289\") " pod="openstack/horizon-85845f7997-n9h7g" Oct 01 15:19:42 crc kubenswrapper[4869]: I1001 15:19:42.511142 4869 generic.go:334] "Generic (PLEG): container finished" podID="e6e277cf-6fbb-46e6-96c2-004009b93faa" containerID="1f949349f800051a952b2fe8bbf88dacd5fa2e71449eab4d35a9a964ac60e4c6" exitCode=0 Oct 01 15:19:42 crc kubenswrapper[4869]: I1001 15:19:42.511214 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-fb9lk" event={"ID":"e6e277cf-6fbb-46e6-96c2-004009b93faa","Type":"ContainerDied","Data":"1f949349f800051a952b2fe8bbf88dacd5fa2e71449eab4d35a9a964ac60e4c6"} Oct 01 15:19:42 crc kubenswrapper[4869]: I1001 15:19:42.601511 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-85845f7997-n9h7g" Oct 01 15:19:43 crc kubenswrapper[4869]: I1001 15:19:43.353885 4869 patch_prober.go:28] interesting pod/machine-config-daemon-c86m8 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 01 15:19:43 crc kubenswrapper[4869]: I1001 15:19:43.354327 4869 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-c86m8" podUID="a4b64f7f-0b03-4f47-965b-9fde048b735c" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 01 15:19:47 crc kubenswrapper[4869]: I1001 15:19:47.593853 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-d6669bb45-npxjb" Oct 01 15:19:47 crc kubenswrapper[4869]: I1001 15:19:47.661010 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-86ddb7fb65-5th7v"] Oct 01 15:19:47 crc kubenswrapper[4869]: I1001 15:19:47.662823 4869 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-86ddb7fb65-5th7v" podUID="298c162a-4d72-4d7c-bd59-d5d5c8f7cf7f" containerName="dnsmasq-dns" containerID="cri-o://261d9771c3e0c08c4b3e742d5b6028f0b71083327f06a52fbf4b771d3c082ac0" gracePeriod=10 Oct 01 15:19:48 crc kubenswrapper[4869]: I1001 15:19:48.567789 4869 generic.go:334] "Generic (PLEG): container finished" podID="298c162a-4d72-4d7c-bd59-d5d5c8f7cf7f" containerID="261d9771c3e0c08c4b3e742d5b6028f0b71083327f06a52fbf4b771d3c082ac0" exitCode=0 Oct 01 15:19:48 crc kubenswrapper[4869]: I1001 15:19:48.567914 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-86ddb7fb65-5th7v" event={"ID":"298c162a-4d72-4d7c-bd59-d5d5c8f7cf7f","Type":"ContainerDied","Data":"261d9771c3e0c08c4b3e742d5b6028f0b71083327f06a52fbf4b771d3c082ac0"} Oct 01 15:19:48 crc kubenswrapper[4869]: I1001 15:19:48.820803 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-695b54fb65-rp2zz"] Oct 01 15:19:48 crc kubenswrapper[4869]: I1001 15:19:48.865614 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/horizon-5766b74c9d-wpxpf"] Oct 01 15:19:48 crc kubenswrapper[4869]: I1001 15:19:48.871314 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-5766b74c9d-wpxpf" Oct 01 15:19:48 crc kubenswrapper[4869]: I1001 15:19:48.876239 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-horizon-svc" Oct 01 15:19:48 crc kubenswrapper[4869]: I1001 15:19:48.885334 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-5766b74c9d-wpxpf"] Oct 01 15:19:48 crc kubenswrapper[4869]: I1001 15:19:48.893991 4869 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-86ddb7fb65-5th7v" podUID="298c162a-4d72-4d7c-bd59-d5d5c8f7cf7f" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.113:5353: connect: connection refused" Oct 01 15:19:48 crc kubenswrapper[4869]: I1001 15:19:48.921102 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-85845f7997-n9h7g"] Oct 01 15:19:48 crc kubenswrapper[4869]: I1001 15:19:48.937991 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/horizon-5f66f6967d-mnbqz"] Oct 01 15:19:48 crc kubenswrapper[4869]: I1001 15:19:48.939245 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-5f66f6967d-mnbqz" Oct 01 15:19:48 crc kubenswrapper[4869]: I1001 15:19:48.957235 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-5f66f6967d-mnbqz"] Oct 01 15:19:48 crc kubenswrapper[4869]: I1001 15:19:48.998446 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/011bdc54-d9ef-4ae9-a16b-5eaf4e97b2cf-combined-ca-bundle\") pod \"horizon-5766b74c9d-wpxpf\" (UID: \"011bdc54-d9ef-4ae9-a16b-5eaf4e97b2cf\") " pod="openstack/horizon-5766b74c9d-wpxpf" Oct 01 15:19:48 crc kubenswrapper[4869]: I1001 15:19:48.998551 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/011bdc54-d9ef-4ae9-a16b-5eaf4e97b2cf-logs\") pod \"horizon-5766b74c9d-wpxpf\" (UID: \"011bdc54-d9ef-4ae9-a16b-5eaf4e97b2cf\") " pod="openstack/horizon-5766b74c9d-wpxpf" Oct 01 15:19:48 crc kubenswrapper[4869]: I1001 15:19:48.998588 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/011bdc54-d9ef-4ae9-a16b-5eaf4e97b2cf-config-data\") pod \"horizon-5766b74c9d-wpxpf\" (UID: \"011bdc54-d9ef-4ae9-a16b-5eaf4e97b2cf\") " pod="openstack/horizon-5766b74c9d-wpxpf" Oct 01 15:19:48 crc kubenswrapper[4869]: I1001 15:19:48.998618 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/011bdc54-d9ef-4ae9-a16b-5eaf4e97b2cf-scripts\") pod \"horizon-5766b74c9d-wpxpf\" (UID: \"011bdc54-d9ef-4ae9-a16b-5eaf4e97b2cf\") " pod="openstack/horizon-5766b74c9d-wpxpf" Oct 01 15:19:48 crc kubenswrapper[4869]: I1001 15:19:48.998688 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/011bdc54-d9ef-4ae9-a16b-5eaf4e97b2cf-horizon-secret-key\") pod \"horizon-5766b74c9d-wpxpf\" (UID: \"011bdc54-d9ef-4ae9-a16b-5eaf4e97b2cf\") " pod="openstack/horizon-5766b74c9d-wpxpf" Oct 01 15:19:48 crc kubenswrapper[4869]: I1001 15:19:48.998767 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"horizon-tls-certs\" (UniqueName: \"kubernetes.io/secret/011bdc54-d9ef-4ae9-a16b-5eaf4e97b2cf-horizon-tls-certs\") pod \"horizon-5766b74c9d-wpxpf\" (UID: \"011bdc54-d9ef-4ae9-a16b-5eaf4e97b2cf\") " pod="openstack/horizon-5766b74c9d-wpxpf" Oct 01 15:19:48 crc kubenswrapper[4869]: I1001 15:19:48.998800 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pq6dl\" (UniqueName: \"kubernetes.io/projected/011bdc54-d9ef-4ae9-a16b-5eaf4e97b2cf-kube-api-access-pq6dl\") pod \"horizon-5766b74c9d-wpxpf\" (UID: \"011bdc54-d9ef-4ae9-a16b-5eaf4e97b2cf\") " pod="openstack/horizon-5766b74c9d-wpxpf" Oct 01 15:19:49 crc kubenswrapper[4869]: I1001 15:19:49.099959 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/011bdc54-d9ef-4ae9-a16b-5eaf4e97b2cf-combined-ca-bundle\") pod \"horizon-5766b74c9d-wpxpf\" (UID: \"011bdc54-d9ef-4ae9-a16b-5eaf4e97b2cf\") " pod="openstack/horizon-5766b74c9d-wpxpf" Oct 01 15:19:49 crc kubenswrapper[4869]: I1001 15:19:49.100015 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/eb62e045-ca51-4b33-a63d-9c53b247cc91-scripts\") pod \"horizon-5f66f6967d-mnbqz\" (UID: \"eb62e045-ca51-4b33-a63d-9c53b247cc91\") " pod="openstack/horizon-5f66f6967d-mnbqz" Oct 01 15:19:49 crc kubenswrapper[4869]: I1001 15:19:49.100060 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"horizon-tls-certs\" (UniqueName: \"kubernetes.io/secret/eb62e045-ca51-4b33-a63d-9c53b247cc91-horizon-tls-certs\") pod \"horizon-5f66f6967d-mnbqz\" (UID: \"eb62e045-ca51-4b33-a63d-9c53b247cc91\") " pod="openstack/horizon-5f66f6967d-mnbqz" Oct 01 15:19:49 crc kubenswrapper[4869]: I1001 15:19:49.100082 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/011bdc54-d9ef-4ae9-a16b-5eaf4e97b2cf-logs\") pod \"horizon-5766b74c9d-wpxpf\" (UID: \"011bdc54-d9ef-4ae9-a16b-5eaf4e97b2cf\") " pod="openstack/horizon-5766b74c9d-wpxpf" Oct 01 15:19:49 crc kubenswrapper[4869]: I1001 15:19:49.100104 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/011bdc54-d9ef-4ae9-a16b-5eaf4e97b2cf-config-data\") pod \"horizon-5766b74c9d-wpxpf\" (UID: \"011bdc54-d9ef-4ae9-a16b-5eaf4e97b2cf\") " pod="openstack/horizon-5766b74c9d-wpxpf" Oct 01 15:19:49 crc kubenswrapper[4869]: I1001 15:19:49.100121 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/eb62e045-ca51-4b33-a63d-9c53b247cc91-horizon-secret-key\") pod \"horizon-5f66f6967d-mnbqz\" (UID: \"eb62e045-ca51-4b33-a63d-9c53b247cc91\") " pod="openstack/horizon-5f66f6967d-mnbqz" Oct 01 15:19:49 crc kubenswrapper[4869]: I1001 15:19:49.100170 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/011bdc54-d9ef-4ae9-a16b-5eaf4e97b2cf-scripts\") pod \"horizon-5766b74c9d-wpxpf\" (UID: \"011bdc54-d9ef-4ae9-a16b-5eaf4e97b2cf\") " pod="openstack/horizon-5766b74c9d-wpxpf" Oct 01 15:19:49 crc kubenswrapper[4869]: I1001 15:19:49.100225 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-f4x5f\" (UniqueName: \"kubernetes.io/projected/eb62e045-ca51-4b33-a63d-9c53b247cc91-kube-api-access-f4x5f\") pod \"horizon-5f66f6967d-mnbqz\" (UID: \"eb62e045-ca51-4b33-a63d-9c53b247cc91\") " pod="openstack/horizon-5f66f6967d-mnbqz" Oct 01 15:19:49 crc kubenswrapper[4869]: I1001 15:19:49.100269 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/011bdc54-d9ef-4ae9-a16b-5eaf4e97b2cf-horizon-secret-key\") pod \"horizon-5766b74c9d-wpxpf\" (UID: \"011bdc54-d9ef-4ae9-a16b-5eaf4e97b2cf\") " pod="openstack/horizon-5766b74c9d-wpxpf" Oct 01 15:19:49 crc kubenswrapper[4869]: I1001 15:19:49.100306 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/eb62e045-ca51-4b33-a63d-9c53b247cc91-config-data\") pod \"horizon-5f66f6967d-mnbqz\" (UID: \"eb62e045-ca51-4b33-a63d-9c53b247cc91\") " pod="openstack/horizon-5f66f6967d-mnbqz" Oct 01 15:19:49 crc kubenswrapper[4869]: I1001 15:19:49.100331 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/eb62e045-ca51-4b33-a63d-9c53b247cc91-logs\") pod \"horizon-5f66f6967d-mnbqz\" (UID: \"eb62e045-ca51-4b33-a63d-9c53b247cc91\") " pod="openstack/horizon-5f66f6967d-mnbqz" Oct 01 15:19:49 crc kubenswrapper[4869]: I1001 15:19:49.100347 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"horizon-tls-certs\" (UniqueName: \"kubernetes.io/secret/011bdc54-d9ef-4ae9-a16b-5eaf4e97b2cf-horizon-tls-certs\") pod \"horizon-5766b74c9d-wpxpf\" (UID: \"011bdc54-d9ef-4ae9-a16b-5eaf4e97b2cf\") " pod="openstack/horizon-5766b74c9d-wpxpf" Oct 01 15:19:49 crc kubenswrapper[4869]: I1001 15:19:49.100371 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pq6dl\" (UniqueName: \"kubernetes.io/projected/011bdc54-d9ef-4ae9-a16b-5eaf4e97b2cf-kube-api-access-pq6dl\") pod \"horizon-5766b74c9d-wpxpf\" (UID: \"011bdc54-d9ef-4ae9-a16b-5eaf4e97b2cf\") " pod="openstack/horizon-5766b74c9d-wpxpf" Oct 01 15:19:49 crc kubenswrapper[4869]: I1001 15:19:49.100391 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/eb62e045-ca51-4b33-a63d-9c53b247cc91-combined-ca-bundle\") pod \"horizon-5f66f6967d-mnbqz\" (UID: \"eb62e045-ca51-4b33-a63d-9c53b247cc91\") " pod="openstack/horizon-5f66f6967d-mnbqz" Oct 01 15:19:49 crc kubenswrapper[4869]: I1001 15:19:49.101414 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/011bdc54-d9ef-4ae9-a16b-5eaf4e97b2cf-logs\") pod \"horizon-5766b74c9d-wpxpf\" (UID: \"011bdc54-d9ef-4ae9-a16b-5eaf4e97b2cf\") " pod="openstack/horizon-5766b74c9d-wpxpf" Oct 01 15:19:49 crc kubenswrapper[4869]: I1001 15:19:49.102042 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/011bdc54-d9ef-4ae9-a16b-5eaf4e97b2cf-scripts\") pod \"horizon-5766b74c9d-wpxpf\" (UID: \"011bdc54-d9ef-4ae9-a16b-5eaf4e97b2cf\") " pod="openstack/horizon-5766b74c9d-wpxpf" Oct 01 15:19:49 crc kubenswrapper[4869]: I1001 15:19:49.103570 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/011bdc54-d9ef-4ae9-a16b-5eaf4e97b2cf-config-data\") pod \"horizon-5766b74c9d-wpxpf\" (UID: \"011bdc54-d9ef-4ae9-a16b-5eaf4e97b2cf\") " pod="openstack/horizon-5766b74c9d-wpxpf" Oct 01 15:19:49 crc kubenswrapper[4869]: I1001 15:19:49.108602 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/011bdc54-d9ef-4ae9-a16b-5eaf4e97b2cf-horizon-secret-key\") pod \"horizon-5766b74c9d-wpxpf\" (UID: \"011bdc54-d9ef-4ae9-a16b-5eaf4e97b2cf\") " pod="openstack/horizon-5766b74c9d-wpxpf" Oct 01 15:19:49 crc kubenswrapper[4869]: I1001 15:19:49.108658 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"horizon-tls-certs\" (UniqueName: \"kubernetes.io/secret/011bdc54-d9ef-4ae9-a16b-5eaf4e97b2cf-horizon-tls-certs\") pod \"horizon-5766b74c9d-wpxpf\" (UID: \"011bdc54-d9ef-4ae9-a16b-5eaf4e97b2cf\") " pod="openstack/horizon-5766b74c9d-wpxpf" Oct 01 15:19:49 crc kubenswrapper[4869]: I1001 15:19:49.114878 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/011bdc54-d9ef-4ae9-a16b-5eaf4e97b2cf-combined-ca-bundle\") pod \"horizon-5766b74c9d-wpxpf\" (UID: \"011bdc54-d9ef-4ae9-a16b-5eaf4e97b2cf\") " pod="openstack/horizon-5766b74c9d-wpxpf" Oct 01 15:19:49 crc kubenswrapper[4869]: I1001 15:19:49.118276 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pq6dl\" (UniqueName: \"kubernetes.io/projected/011bdc54-d9ef-4ae9-a16b-5eaf4e97b2cf-kube-api-access-pq6dl\") pod \"horizon-5766b74c9d-wpxpf\" (UID: \"011bdc54-d9ef-4ae9-a16b-5eaf4e97b2cf\") " pod="openstack/horizon-5766b74c9d-wpxpf" Oct 01 15:19:49 crc kubenswrapper[4869]: I1001 15:19:49.202240 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"horizon-tls-certs\" (UniqueName: \"kubernetes.io/secret/eb62e045-ca51-4b33-a63d-9c53b247cc91-horizon-tls-certs\") pod \"horizon-5f66f6967d-mnbqz\" (UID: \"eb62e045-ca51-4b33-a63d-9c53b247cc91\") " pod="openstack/horizon-5f66f6967d-mnbqz" Oct 01 15:19:49 crc kubenswrapper[4869]: I1001 15:19:49.202304 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/eb62e045-ca51-4b33-a63d-9c53b247cc91-horizon-secret-key\") pod \"horizon-5f66f6967d-mnbqz\" (UID: \"eb62e045-ca51-4b33-a63d-9c53b247cc91\") " pod="openstack/horizon-5f66f6967d-mnbqz" Oct 01 15:19:49 crc kubenswrapper[4869]: I1001 15:19:49.202339 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-f4x5f\" (UniqueName: \"kubernetes.io/projected/eb62e045-ca51-4b33-a63d-9c53b247cc91-kube-api-access-f4x5f\") pod \"horizon-5f66f6967d-mnbqz\" (UID: \"eb62e045-ca51-4b33-a63d-9c53b247cc91\") " pod="openstack/horizon-5f66f6967d-mnbqz" Oct 01 15:19:49 crc kubenswrapper[4869]: I1001 15:19:49.202391 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/eb62e045-ca51-4b33-a63d-9c53b247cc91-config-data\") pod \"horizon-5f66f6967d-mnbqz\" (UID: \"eb62e045-ca51-4b33-a63d-9c53b247cc91\") " pod="openstack/horizon-5f66f6967d-mnbqz" Oct 01 15:19:49 crc kubenswrapper[4869]: I1001 15:19:49.202418 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/eb62e045-ca51-4b33-a63d-9c53b247cc91-logs\") pod \"horizon-5f66f6967d-mnbqz\" (UID: \"eb62e045-ca51-4b33-a63d-9c53b247cc91\") " pod="openstack/horizon-5f66f6967d-mnbqz" Oct 01 15:19:49 crc kubenswrapper[4869]: I1001 15:19:49.202445 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/eb62e045-ca51-4b33-a63d-9c53b247cc91-combined-ca-bundle\") pod \"horizon-5f66f6967d-mnbqz\" (UID: \"eb62e045-ca51-4b33-a63d-9c53b247cc91\") " pod="openstack/horizon-5f66f6967d-mnbqz" Oct 01 15:19:49 crc kubenswrapper[4869]: I1001 15:19:49.202483 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/eb62e045-ca51-4b33-a63d-9c53b247cc91-scripts\") pod \"horizon-5f66f6967d-mnbqz\" (UID: \"eb62e045-ca51-4b33-a63d-9c53b247cc91\") " pod="openstack/horizon-5f66f6967d-mnbqz" Oct 01 15:19:49 crc kubenswrapper[4869]: I1001 15:19:49.204559 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/eb62e045-ca51-4b33-a63d-9c53b247cc91-logs\") pod \"horizon-5f66f6967d-mnbqz\" (UID: \"eb62e045-ca51-4b33-a63d-9c53b247cc91\") " pod="openstack/horizon-5f66f6967d-mnbqz" Oct 01 15:19:49 crc kubenswrapper[4869]: I1001 15:19:49.204831 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/eb62e045-ca51-4b33-a63d-9c53b247cc91-scripts\") pod \"horizon-5f66f6967d-mnbqz\" (UID: \"eb62e045-ca51-4b33-a63d-9c53b247cc91\") " pod="openstack/horizon-5f66f6967d-mnbqz" Oct 01 15:19:49 crc kubenswrapper[4869]: I1001 15:19:49.205802 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/eb62e045-ca51-4b33-a63d-9c53b247cc91-config-data\") pod \"horizon-5f66f6967d-mnbqz\" (UID: \"eb62e045-ca51-4b33-a63d-9c53b247cc91\") " pod="openstack/horizon-5f66f6967d-mnbqz" Oct 01 15:19:49 crc kubenswrapper[4869]: I1001 15:19:49.207828 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/eb62e045-ca51-4b33-a63d-9c53b247cc91-combined-ca-bundle\") pod \"horizon-5f66f6967d-mnbqz\" (UID: \"eb62e045-ca51-4b33-a63d-9c53b247cc91\") " pod="openstack/horizon-5f66f6967d-mnbqz" Oct 01 15:19:49 crc kubenswrapper[4869]: I1001 15:19:49.207952 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/eb62e045-ca51-4b33-a63d-9c53b247cc91-horizon-secret-key\") pod \"horizon-5f66f6967d-mnbqz\" (UID: \"eb62e045-ca51-4b33-a63d-9c53b247cc91\") " pod="openstack/horizon-5f66f6967d-mnbqz" Oct 01 15:19:49 crc kubenswrapper[4869]: I1001 15:19:49.210777 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-5766b74c9d-wpxpf" Oct 01 15:19:49 crc kubenswrapper[4869]: I1001 15:19:49.212891 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"horizon-tls-certs\" (UniqueName: \"kubernetes.io/secret/eb62e045-ca51-4b33-a63d-9c53b247cc91-horizon-tls-certs\") pod \"horizon-5f66f6967d-mnbqz\" (UID: \"eb62e045-ca51-4b33-a63d-9c53b247cc91\") " pod="openstack/horizon-5f66f6967d-mnbqz" Oct 01 15:19:49 crc kubenswrapper[4869]: I1001 15:19:49.223049 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-f4x5f\" (UniqueName: \"kubernetes.io/projected/eb62e045-ca51-4b33-a63d-9c53b247cc91-kube-api-access-f4x5f\") pod \"horizon-5f66f6967d-mnbqz\" (UID: \"eb62e045-ca51-4b33-a63d-9c53b247cc91\") " pod="openstack/horizon-5f66f6967d-mnbqz" Oct 01 15:19:49 crc kubenswrapper[4869]: I1001 15:19:49.264396 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-5f66f6967d-mnbqz" Oct 01 15:19:53 crc kubenswrapper[4869]: I1001 15:19:53.894141 4869 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-86ddb7fb65-5th7v" podUID="298c162a-4d72-4d7c-bd59-d5d5c8f7cf7f" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.113:5353: connect: connection refused" Oct 01 15:19:58 crc kubenswrapper[4869]: E1001 15:19:58.301122 4869 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-horizon@sha256:9470db6caf5102cf37ddb1f137f17b05ef7119f174f4189beb4839ef7f65730c" Oct 01 15:19:58 crc kubenswrapper[4869]: E1001 15:19:58.302226 4869 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:horizon-log,Image:quay.io/podified-antelope-centos9/openstack-horizon@sha256:9470db6caf5102cf37ddb1f137f17b05ef7119f174f4189beb4839ef7f65730c,Command:[/bin/bash],Args:[-c tail -n+1 -F /var/log/horizon/horizon.log],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:n664h547h5bbh569hcbh677h565h586h9bh587h96h8dhc4h57ch658hf6h688h56ch85h8dh55h68bh5bdh96h85h698h5d6h8bh55h686hbh58cq,ValueFrom:nil,},EnvVar{Name:ENABLE_DESIGNATE,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_HEAT,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_IRONIC,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_MANILA,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_OCTAVIA,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_WATCHER,Value:no,ValueFrom:nil,},EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},EnvVar{Name:UNPACK_THEME,Value:true,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:logs,ReadOnly:false,MountPath:/var/log/horizon,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-zzw47,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*48,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*true,RunAsGroup:*42400,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod horizon-56977fbfb5-99lb5_openstack(84004c40-05d7-4037-b3c3-748ad141fcf7): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Oct 01 15:19:58 crc kubenswrapper[4869]: E1001 15:19:58.305744 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"horizon-log\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\", failed to \"StartContainer\" for \"horizon\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-horizon@sha256:9470db6caf5102cf37ddb1f137f17b05ef7119f174f4189beb4839ef7f65730c\\\"\"]" pod="openstack/horizon-56977fbfb5-99lb5" podUID="84004c40-05d7-4037-b3c3-748ad141fcf7" Oct 01 15:19:58 crc kubenswrapper[4869]: E1001 15:19:58.367594 4869 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-horizon@sha256:9470db6caf5102cf37ddb1f137f17b05ef7119f174f4189beb4839ef7f65730c" Oct 01 15:19:58 crc kubenswrapper[4869]: E1001 15:19:58.367897 4869 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:horizon-log,Image:quay.io/podified-antelope-centos9/openstack-horizon@sha256:9470db6caf5102cf37ddb1f137f17b05ef7119f174f4189beb4839ef7f65730c,Command:[/bin/bash],Args:[-c tail -n+1 -F /var/log/horizon/horizon.log],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:n5dch54bh66bh85h5cfh5f6h59h8dh67fhf5h547hcbh674h55dh87h65ch689h596h686h5b5h54ch65fh55dh687h696h76hd8h698h576h57bh549hf9q,ValueFrom:nil,},EnvVar{Name:ENABLE_DESIGNATE,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_HEAT,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_IRONIC,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_MANILA,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_OCTAVIA,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_WATCHER,Value:no,ValueFrom:nil,},EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},EnvVar{Name:UNPACK_THEME,Value:true,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:logs,ReadOnly:false,MountPath:/var/log/horizon,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-dgr8s,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*48,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*true,RunAsGroup:*42400,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod horizon-695b54fb65-rp2zz_openstack(abaaae88-543a-4735-a3e2-08978a450647): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Oct 01 15:19:58 crc kubenswrapper[4869]: E1001 15:19:58.370588 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"horizon-log\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\", failed to \"StartContainer\" for \"horizon\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-horizon@sha256:9470db6caf5102cf37ddb1f137f17b05ef7119f174f4189beb4839ef7f65730c\\\"\"]" pod="openstack/horizon-695b54fb65-rp2zz" podUID="abaaae88-543a-4735-a3e2-08978a450647" Oct 01 15:19:58 crc kubenswrapper[4869]: I1001 15:19:58.435285 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-3b43-account-create-fgnrl" Oct 01 15:19:58 crc kubenswrapper[4869]: I1001 15:19:58.443437 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-fb9lk" Oct 01 15:19:58 crc kubenswrapper[4869]: I1001 15:19:58.447726 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-6b4b-account-create-8zk2q" Oct 01 15:19:58 crc kubenswrapper[4869]: I1001 15:19:58.591761 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e6e277cf-6fbb-46e6-96c2-004009b93faa-scripts\") pod \"e6e277cf-6fbb-46e6-96c2-004009b93faa\" (UID: \"e6e277cf-6fbb-46e6-96c2-004009b93faa\") " Oct 01 15:19:58 crc kubenswrapper[4869]: I1001 15:19:58.591833 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/e6e277cf-6fbb-46e6-96c2-004009b93faa-credential-keys\") pod \"e6e277cf-6fbb-46e6-96c2-004009b93faa\" (UID: \"e6e277cf-6fbb-46e6-96c2-004009b93faa\") " Oct 01 15:19:58 crc kubenswrapper[4869]: I1001 15:19:58.591870 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-k4nzc\" (UniqueName: \"kubernetes.io/projected/b685c639-4b3f-4597-8b0d-9f03283f18ed-kube-api-access-k4nzc\") pod \"b685c639-4b3f-4597-8b0d-9f03283f18ed\" (UID: \"b685c639-4b3f-4597-8b0d-9f03283f18ed\") " Oct 01 15:19:58 crc kubenswrapper[4869]: I1001 15:19:58.591990 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/e6e277cf-6fbb-46e6-96c2-004009b93faa-fernet-keys\") pod \"e6e277cf-6fbb-46e6-96c2-004009b93faa\" (UID: \"e6e277cf-6fbb-46e6-96c2-004009b93faa\") " Oct 01 15:19:58 crc kubenswrapper[4869]: I1001 15:19:58.592024 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e6e277cf-6fbb-46e6-96c2-004009b93faa-combined-ca-bundle\") pod \"e6e277cf-6fbb-46e6-96c2-004009b93faa\" (UID: \"e6e277cf-6fbb-46e6-96c2-004009b93faa\") " Oct 01 15:19:58 crc kubenswrapper[4869]: I1001 15:19:58.592075 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e6e277cf-6fbb-46e6-96c2-004009b93faa-config-data\") pod \"e6e277cf-6fbb-46e6-96c2-004009b93faa\" (UID: \"e6e277cf-6fbb-46e6-96c2-004009b93faa\") " Oct 01 15:19:58 crc kubenswrapper[4869]: I1001 15:19:58.592121 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-q999b\" (UniqueName: \"kubernetes.io/projected/40fc21e1-9cc1-42cb-84ae-a5edde36cefd-kube-api-access-q999b\") pod \"40fc21e1-9cc1-42cb-84ae-a5edde36cefd\" (UID: \"40fc21e1-9cc1-42cb-84ae-a5edde36cefd\") " Oct 01 15:19:58 crc kubenswrapper[4869]: I1001 15:19:58.592186 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-l4hn8\" (UniqueName: \"kubernetes.io/projected/e6e277cf-6fbb-46e6-96c2-004009b93faa-kube-api-access-l4hn8\") pod \"e6e277cf-6fbb-46e6-96c2-004009b93faa\" (UID: \"e6e277cf-6fbb-46e6-96c2-004009b93faa\") " Oct 01 15:19:58 crc kubenswrapper[4869]: I1001 15:19:58.597705 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b685c639-4b3f-4597-8b0d-9f03283f18ed-kube-api-access-k4nzc" (OuterVolumeSpecName: "kube-api-access-k4nzc") pod "b685c639-4b3f-4597-8b0d-9f03283f18ed" (UID: "b685c639-4b3f-4597-8b0d-9f03283f18ed"). InnerVolumeSpecName "kube-api-access-k4nzc". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 15:19:58 crc kubenswrapper[4869]: I1001 15:19:58.598566 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e6e277cf-6fbb-46e6-96c2-004009b93faa-kube-api-access-l4hn8" (OuterVolumeSpecName: "kube-api-access-l4hn8") pod "e6e277cf-6fbb-46e6-96c2-004009b93faa" (UID: "e6e277cf-6fbb-46e6-96c2-004009b93faa"). InnerVolumeSpecName "kube-api-access-l4hn8". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 15:19:58 crc kubenswrapper[4869]: I1001 15:19:58.598607 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/40fc21e1-9cc1-42cb-84ae-a5edde36cefd-kube-api-access-q999b" (OuterVolumeSpecName: "kube-api-access-q999b") pod "40fc21e1-9cc1-42cb-84ae-a5edde36cefd" (UID: "40fc21e1-9cc1-42cb-84ae-a5edde36cefd"). InnerVolumeSpecName "kube-api-access-q999b". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 15:19:58 crc kubenswrapper[4869]: I1001 15:19:58.598686 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e6e277cf-6fbb-46e6-96c2-004009b93faa-credential-keys" (OuterVolumeSpecName: "credential-keys") pod "e6e277cf-6fbb-46e6-96c2-004009b93faa" (UID: "e6e277cf-6fbb-46e6-96c2-004009b93faa"). InnerVolumeSpecName "credential-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 15:19:58 crc kubenswrapper[4869]: I1001 15:19:58.599156 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e6e277cf-6fbb-46e6-96c2-004009b93faa-fernet-keys" (OuterVolumeSpecName: "fernet-keys") pod "e6e277cf-6fbb-46e6-96c2-004009b93faa" (UID: "e6e277cf-6fbb-46e6-96c2-004009b93faa"). InnerVolumeSpecName "fernet-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 15:19:58 crc kubenswrapper[4869]: I1001 15:19:58.614065 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e6e277cf-6fbb-46e6-96c2-004009b93faa-scripts" (OuterVolumeSpecName: "scripts") pod "e6e277cf-6fbb-46e6-96c2-004009b93faa" (UID: "e6e277cf-6fbb-46e6-96c2-004009b93faa"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 15:19:58 crc kubenswrapper[4869]: I1001 15:19:58.624726 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e6e277cf-6fbb-46e6-96c2-004009b93faa-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "e6e277cf-6fbb-46e6-96c2-004009b93faa" (UID: "e6e277cf-6fbb-46e6-96c2-004009b93faa"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 15:19:58 crc kubenswrapper[4869]: I1001 15:19:58.644250 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e6e277cf-6fbb-46e6-96c2-004009b93faa-config-data" (OuterVolumeSpecName: "config-data") pod "e6e277cf-6fbb-46e6-96c2-004009b93faa" (UID: "e6e277cf-6fbb-46e6-96c2-004009b93faa"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 15:19:58 crc kubenswrapper[4869]: I1001 15:19:58.673306 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-fb9lk" event={"ID":"e6e277cf-6fbb-46e6-96c2-004009b93faa","Type":"ContainerDied","Data":"e842449260587100a1eeded7a1f24f60571f09ed20946e70e30c7b23099d619e"} Oct 01 15:19:58 crc kubenswrapper[4869]: I1001 15:19:58.673341 4869 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="e842449260587100a1eeded7a1f24f60571f09ed20946e70e30c7b23099d619e" Oct 01 15:19:58 crc kubenswrapper[4869]: I1001 15:19:58.673362 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-fb9lk" Oct 01 15:19:58 crc kubenswrapper[4869]: I1001 15:19:58.674946 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-6b4b-account-create-8zk2q" event={"ID":"40fc21e1-9cc1-42cb-84ae-a5edde36cefd","Type":"ContainerDied","Data":"9e9686e1f6fbc80090960ca859a789a0cf99a7c9a15830afed4616856ab5a4fa"} Oct 01 15:19:58 crc kubenswrapper[4869]: I1001 15:19:58.674966 4869 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="9e9686e1f6fbc80090960ca859a789a0cf99a7c9a15830afed4616856ab5a4fa" Oct 01 15:19:58 crc kubenswrapper[4869]: I1001 15:19:58.675015 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-6b4b-account-create-8zk2q" Oct 01 15:19:58 crc kubenswrapper[4869]: I1001 15:19:58.677016 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-3b43-account-create-fgnrl" event={"ID":"b685c639-4b3f-4597-8b0d-9f03283f18ed","Type":"ContainerDied","Data":"1183957ca0c521e2928a6e04f618bf68813ad753f15766c04df6a6006d5fc112"} Oct 01 15:19:58 crc kubenswrapper[4869]: I1001 15:19:58.677101 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-3b43-account-create-fgnrl" Oct 01 15:19:58 crc kubenswrapper[4869]: I1001 15:19:58.677126 4869 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="1183957ca0c521e2928a6e04f618bf68813ad753f15766c04df6a6006d5fc112" Oct 01 15:19:58 crc kubenswrapper[4869]: I1001 15:19:58.693783 4869 reconciler_common.go:293] "Volume detached for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/e6e277cf-6fbb-46e6-96c2-004009b93faa-fernet-keys\") on node \"crc\" DevicePath \"\"" Oct 01 15:19:58 crc kubenswrapper[4869]: I1001 15:19:58.693810 4869 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e6e277cf-6fbb-46e6-96c2-004009b93faa-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 01 15:19:58 crc kubenswrapper[4869]: I1001 15:19:58.693819 4869 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e6e277cf-6fbb-46e6-96c2-004009b93faa-config-data\") on node \"crc\" DevicePath \"\"" Oct 01 15:19:58 crc kubenswrapper[4869]: I1001 15:19:58.693828 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-q999b\" (UniqueName: \"kubernetes.io/projected/40fc21e1-9cc1-42cb-84ae-a5edde36cefd-kube-api-access-q999b\") on node \"crc\" DevicePath \"\"" Oct 01 15:19:58 crc kubenswrapper[4869]: I1001 15:19:58.693838 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-l4hn8\" (UniqueName: \"kubernetes.io/projected/e6e277cf-6fbb-46e6-96c2-004009b93faa-kube-api-access-l4hn8\") on node \"crc\" DevicePath \"\"" Oct 01 15:19:58 crc kubenswrapper[4869]: I1001 15:19:58.693846 4869 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e6e277cf-6fbb-46e6-96c2-004009b93faa-scripts\") on node \"crc\" DevicePath \"\"" Oct 01 15:19:58 crc kubenswrapper[4869]: I1001 15:19:58.693855 4869 reconciler_common.go:293] "Volume detached for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/e6e277cf-6fbb-46e6-96c2-004009b93faa-credential-keys\") on node \"crc\" DevicePath \"\"" Oct 01 15:19:58 crc kubenswrapper[4869]: I1001 15:19:58.693863 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-k4nzc\" (UniqueName: \"kubernetes.io/projected/b685c639-4b3f-4597-8b0d-9f03283f18ed-kube-api-access-k4nzc\") on node \"crc\" DevicePath \"\"" Oct 01 15:19:58 crc kubenswrapper[4869]: I1001 15:19:58.894883 4869 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-86ddb7fb65-5th7v" podUID="298c162a-4d72-4d7c-bd59-d5d5c8f7cf7f" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.113:5353: connect: connection refused" Oct 01 15:19:58 crc kubenswrapper[4869]: I1001 15:19:58.895003 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-86ddb7fb65-5th7v" Oct 01 15:19:59 crc kubenswrapper[4869]: I1001 15:19:59.543432 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-bootstrap-fb9lk"] Oct 01 15:19:59 crc kubenswrapper[4869]: I1001 15:19:59.551471 4869 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-bootstrap-fb9lk"] Oct 01 15:19:59 crc kubenswrapper[4869]: I1001 15:19:59.593033 4869 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e6e277cf-6fbb-46e6-96c2-004009b93faa" path="/var/lib/kubelet/pods/e6e277cf-6fbb-46e6-96c2-004009b93faa/volumes" Oct 01 15:19:59 crc kubenswrapper[4869]: I1001 15:19:59.629278 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-bootstrap-24mhh"] Oct 01 15:19:59 crc kubenswrapper[4869]: E1001 15:19:59.629645 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e6e277cf-6fbb-46e6-96c2-004009b93faa" containerName="keystone-bootstrap" Oct 01 15:19:59 crc kubenswrapper[4869]: I1001 15:19:59.629661 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="e6e277cf-6fbb-46e6-96c2-004009b93faa" containerName="keystone-bootstrap" Oct 01 15:19:59 crc kubenswrapper[4869]: E1001 15:19:59.629682 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="40fc21e1-9cc1-42cb-84ae-a5edde36cefd" containerName="mariadb-account-create" Oct 01 15:19:59 crc kubenswrapper[4869]: I1001 15:19:59.629688 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="40fc21e1-9cc1-42cb-84ae-a5edde36cefd" containerName="mariadb-account-create" Oct 01 15:19:59 crc kubenswrapper[4869]: E1001 15:19:59.629699 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b685c639-4b3f-4597-8b0d-9f03283f18ed" containerName="mariadb-account-create" Oct 01 15:19:59 crc kubenswrapper[4869]: I1001 15:19:59.629704 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="b685c639-4b3f-4597-8b0d-9f03283f18ed" containerName="mariadb-account-create" Oct 01 15:19:59 crc kubenswrapper[4869]: I1001 15:19:59.629872 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="b685c639-4b3f-4597-8b0d-9f03283f18ed" containerName="mariadb-account-create" Oct 01 15:19:59 crc kubenswrapper[4869]: I1001 15:19:59.629882 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="e6e277cf-6fbb-46e6-96c2-004009b93faa" containerName="keystone-bootstrap" Oct 01 15:19:59 crc kubenswrapper[4869]: I1001 15:19:59.629896 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="40fc21e1-9cc1-42cb-84ae-a5edde36cefd" containerName="mariadb-account-create" Oct 01 15:19:59 crc kubenswrapper[4869]: I1001 15:19:59.630528 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-24mhh" Oct 01 15:19:59 crc kubenswrapper[4869]: I1001 15:19:59.634866 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone" Oct 01 15:19:59 crc kubenswrapper[4869]: I1001 15:19:59.635094 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-scripts" Oct 01 15:19:59 crc kubenswrapper[4869]: I1001 15:19:59.635352 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-keystone-dockercfg-vhhr5" Oct 01 15:19:59 crc kubenswrapper[4869]: I1001 15:19:59.637399 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-config-data" Oct 01 15:19:59 crc kubenswrapper[4869]: I1001 15:19:59.637719 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-bootstrap-24mhh"] Oct 01 15:19:59 crc kubenswrapper[4869]: I1001 15:19:59.715186 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/0d6f159a-60ec-48de-87f6-d676877278c6-fernet-keys\") pod \"keystone-bootstrap-24mhh\" (UID: \"0d6f159a-60ec-48de-87f6-d676877278c6\") " pod="openstack/keystone-bootstrap-24mhh" Oct 01 15:19:59 crc kubenswrapper[4869]: I1001 15:19:59.715358 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0d6f159a-60ec-48de-87f6-d676877278c6-config-data\") pod \"keystone-bootstrap-24mhh\" (UID: \"0d6f159a-60ec-48de-87f6-d676877278c6\") " pod="openstack/keystone-bootstrap-24mhh" Oct 01 15:19:59 crc kubenswrapper[4869]: I1001 15:19:59.715476 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/0d6f159a-60ec-48de-87f6-d676877278c6-credential-keys\") pod \"keystone-bootstrap-24mhh\" (UID: \"0d6f159a-60ec-48de-87f6-d676877278c6\") " pod="openstack/keystone-bootstrap-24mhh" Oct 01 15:19:59 crc kubenswrapper[4869]: I1001 15:19:59.715518 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0d6f159a-60ec-48de-87f6-d676877278c6-scripts\") pod \"keystone-bootstrap-24mhh\" (UID: \"0d6f159a-60ec-48de-87f6-d676877278c6\") " pod="openstack/keystone-bootstrap-24mhh" Oct 01 15:19:59 crc kubenswrapper[4869]: I1001 15:19:59.715661 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hp6vj\" (UniqueName: \"kubernetes.io/projected/0d6f159a-60ec-48de-87f6-d676877278c6-kube-api-access-hp6vj\") pod \"keystone-bootstrap-24mhh\" (UID: \"0d6f159a-60ec-48de-87f6-d676877278c6\") " pod="openstack/keystone-bootstrap-24mhh" Oct 01 15:19:59 crc kubenswrapper[4869]: I1001 15:19:59.715840 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0d6f159a-60ec-48de-87f6-d676877278c6-combined-ca-bundle\") pod \"keystone-bootstrap-24mhh\" (UID: \"0d6f159a-60ec-48de-87f6-d676877278c6\") " pod="openstack/keystone-bootstrap-24mhh" Oct 01 15:19:59 crc kubenswrapper[4869]: I1001 15:19:59.817628 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hp6vj\" (UniqueName: \"kubernetes.io/projected/0d6f159a-60ec-48de-87f6-d676877278c6-kube-api-access-hp6vj\") pod \"keystone-bootstrap-24mhh\" (UID: \"0d6f159a-60ec-48de-87f6-d676877278c6\") " pod="openstack/keystone-bootstrap-24mhh" Oct 01 15:19:59 crc kubenswrapper[4869]: I1001 15:19:59.817719 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0d6f159a-60ec-48de-87f6-d676877278c6-combined-ca-bundle\") pod \"keystone-bootstrap-24mhh\" (UID: \"0d6f159a-60ec-48de-87f6-d676877278c6\") " pod="openstack/keystone-bootstrap-24mhh" Oct 01 15:19:59 crc kubenswrapper[4869]: I1001 15:19:59.817751 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/0d6f159a-60ec-48de-87f6-d676877278c6-fernet-keys\") pod \"keystone-bootstrap-24mhh\" (UID: \"0d6f159a-60ec-48de-87f6-d676877278c6\") " pod="openstack/keystone-bootstrap-24mhh" Oct 01 15:19:59 crc kubenswrapper[4869]: I1001 15:19:59.817780 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0d6f159a-60ec-48de-87f6-d676877278c6-config-data\") pod \"keystone-bootstrap-24mhh\" (UID: \"0d6f159a-60ec-48de-87f6-d676877278c6\") " pod="openstack/keystone-bootstrap-24mhh" Oct 01 15:19:59 crc kubenswrapper[4869]: I1001 15:19:59.817835 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/0d6f159a-60ec-48de-87f6-d676877278c6-credential-keys\") pod \"keystone-bootstrap-24mhh\" (UID: \"0d6f159a-60ec-48de-87f6-d676877278c6\") " pod="openstack/keystone-bootstrap-24mhh" Oct 01 15:19:59 crc kubenswrapper[4869]: I1001 15:19:59.817857 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0d6f159a-60ec-48de-87f6-d676877278c6-scripts\") pod \"keystone-bootstrap-24mhh\" (UID: \"0d6f159a-60ec-48de-87f6-d676877278c6\") " pod="openstack/keystone-bootstrap-24mhh" Oct 01 15:19:59 crc kubenswrapper[4869]: I1001 15:19:59.822327 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/0d6f159a-60ec-48de-87f6-d676877278c6-fernet-keys\") pod \"keystone-bootstrap-24mhh\" (UID: \"0d6f159a-60ec-48de-87f6-d676877278c6\") " pod="openstack/keystone-bootstrap-24mhh" Oct 01 15:19:59 crc kubenswrapper[4869]: I1001 15:19:59.822713 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0d6f159a-60ec-48de-87f6-d676877278c6-scripts\") pod \"keystone-bootstrap-24mhh\" (UID: \"0d6f159a-60ec-48de-87f6-d676877278c6\") " pod="openstack/keystone-bootstrap-24mhh" Oct 01 15:19:59 crc kubenswrapper[4869]: I1001 15:19:59.823373 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0d6f159a-60ec-48de-87f6-d676877278c6-combined-ca-bundle\") pod \"keystone-bootstrap-24mhh\" (UID: \"0d6f159a-60ec-48de-87f6-d676877278c6\") " pod="openstack/keystone-bootstrap-24mhh" Oct 01 15:19:59 crc kubenswrapper[4869]: I1001 15:19:59.823382 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/0d6f159a-60ec-48de-87f6-d676877278c6-credential-keys\") pod \"keystone-bootstrap-24mhh\" (UID: \"0d6f159a-60ec-48de-87f6-d676877278c6\") " pod="openstack/keystone-bootstrap-24mhh" Oct 01 15:19:59 crc kubenswrapper[4869]: I1001 15:19:59.824682 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0d6f159a-60ec-48de-87f6-d676877278c6-config-data\") pod \"keystone-bootstrap-24mhh\" (UID: \"0d6f159a-60ec-48de-87f6-d676877278c6\") " pod="openstack/keystone-bootstrap-24mhh" Oct 01 15:19:59 crc kubenswrapper[4869]: I1001 15:19:59.832789 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hp6vj\" (UniqueName: \"kubernetes.io/projected/0d6f159a-60ec-48de-87f6-d676877278c6-kube-api-access-hp6vj\") pod \"keystone-bootstrap-24mhh\" (UID: \"0d6f159a-60ec-48de-87f6-d676877278c6\") " pod="openstack/keystone-bootstrap-24mhh" Oct 01 15:19:59 crc kubenswrapper[4869]: I1001 15:19:59.951349 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-24mhh" Oct 01 15:20:03 crc kubenswrapper[4869]: I1001 15:20:03.893966 4869 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-86ddb7fb65-5th7v" podUID="298c162a-4d72-4d7c-bd59-d5d5c8f7cf7f" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.113:5353: connect: connection refused" Oct 01 15:20:04 crc kubenswrapper[4869]: I1001 15:20:04.289593 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-db-sync-jmffx"] Oct 01 15:20:04 crc kubenswrapper[4869]: I1001 15:20:04.291272 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-sync-jmffx" Oct 01 15:20:04 crc kubenswrapper[4869]: I1001 15:20:04.293810 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-barbican-dockercfg-8f4h6" Oct 01 15:20:04 crc kubenswrapper[4869]: I1001 15:20:04.299060 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-db-sync-jmffx"] Oct 01 15:20:04 crc kubenswrapper[4869]: I1001 15:20:04.299729 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-config-data" Oct 01 15:20:04 crc kubenswrapper[4869]: I1001 15:20:04.400551 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3dfd59b2-2698-41f5-95b7-f3c765173302-combined-ca-bundle\") pod \"barbican-db-sync-jmffx\" (UID: \"3dfd59b2-2698-41f5-95b7-f3c765173302\") " pod="openstack/barbican-db-sync-jmffx" Oct 01 15:20:04 crc kubenswrapper[4869]: I1001 15:20:04.400663 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/3dfd59b2-2698-41f5-95b7-f3c765173302-db-sync-config-data\") pod \"barbican-db-sync-jmffx\" (UID: \"3dfd59b2-2698-41f5-95b7-f3c765173302\") " pod="openstack/barbican-db-sync-jmffx" Oct 01 15:20:04 crc kubenswrapper[4869]: I1001 15:20:04.400759 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nl28q\" (UniqueName: \"kubernetes.io/projected/3dfd59b2-2698-41f5-95b7-f3c765173302-kube-api-access-nl28q\") pod \"barbican-db-sync-jmffx\" (UID: \"3dfd59b2-2698-41f5-95b7-f3c765173302\") " pod="openstack/barbican-db-sync-jmffx" Oct 01 15:20:04 crc kubenswrapper[4869]: I1001 15:20:04.431109 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-db-sync-9fzlp"] Oct 01 15:20:04 crc kubenswrapper[4869]: I1001 15:20:04.432903 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-sync-9fzlp" Oct 01 15:20:04 crc kubenswrapper[4869]: I1001 15:20:04.436023 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-neutron-dockercfg-dgxt5" Oct 01 15:20:04 crc kubenswrapper[4869]: I1001 15:20:04.436319 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-httpd-config" Oct 01 15:20:04 crc kubenswrapper[4869]: I1001 15:20:04.436489 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-config" Oct 01 15:20:04 crc kubenswrapper[4869]: I1001 15:20:04.443603 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-db-sync-9fzlp"] Oct 01 15:20:04 crc kubenswrapper[4869]: I1001 15:20:04.502427 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nl28q\" (UniqueName: \"kubernetes.io/projected/3dfd59b2-2698-41f5-95b7-f3c765173302-kube-api-access-nl28q\") pod \"barbican-db-sync-jmffx\" (UID: \"3dfd59b2-2698-41f5-95b7-f3c765173302\") " pod="openstack/barbican-db-sync-jmffx" Oct 01 15:20:04 crc kubenswrapper[4869]: I1001 15:20:04.503017 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3dfd59b2-2698-41f5-95b7-f3c765173302-combined-ca-bundle\") pod \"barbican-db-sync-jmffx\" (UID: \"3dfd59b2-2698-41f5-95b7-f3c765173302\") " pod="openstack/barbican-db-sync-jmffx" Oct 01 15:20:04 crc kubenswrapper[4869]: I1001 15:20:04.503276 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/560e7cea-ec57-4f2c-b0d3-9ee5ded6f37a-combined-ca-bundle\") pod \"neutron-db-sync-9fzlp\" (UID: \"560e7cea-ec57-4f2c-b0d3-9ee5ded6f37a\") " pod="openstack/neutron-db-sync-9fzlp" Oct 01 15:20:04 crc kubenswrapper[4869]: I1001 15:20:04.503385 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gv4l7\" (UniqueName: \"kubernetes.io/projected/560e7cea-ec57-4f2c-b0d3-9ee5ded6f37a-kube-api-access-gv4l7\") pod \"neutron-db-sync-9fzlp\" (UID: \"560e7cea-ec57-4f2c-b0d3-9ee5ded6f37a\") " pod="openstack/neutron-db-sync-9fzlp" Oct 01 15:20:04 crc kubenswrapper[4869]: I1001 15:20:04.503461 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/3dfd59b2-2698-41f5-95b7-f3c765173302-db-sync-config-data\") pod \"barbican-db-sync-jmffx\" (UID: \"3dfd59b2-2698-41f5-95b7-f3c765173302\") " pod="openstack/barbican-db-sync-jmffx" Oct 01 15:20:04 crc kubenswrapper[4869]: I1001 15:20:04.503554 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/560e7cea-ec57-4f2c-b0d3-9ee5ded6f37a-config\") pod \"neutron-db-sync-9fzlp\" (UID: \"560e7cea-ec57-4f2c-b0d3-9ee5ded6f37a\") " pod="openstack/neutron-db-sync-9fzlp" Oct 01 15:20:04 crc kubenswrapper[4869]: I1001 15:20:04.515910 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3dfd59b2-2698-41f5-95b7-f3c765173302-combined-ca-bundle\") pod \"barbican-db-sync-jmffx\" (UID: \"3dfd59b2-2698-41f5-95b7-f3c765173302\") " pod="openstack/barbican-db-sync-jmffx" Oct 01 15:20:04 crc kubenswrapper[4869]: I1001 15:20:04.518205 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/3dfd59b2-2698-41f5-95b7-f3c765173302-db-sync-config-data\") pod \"barbican-db-sync-jmffx\" (UID: \"3dfd59b2-2698-41f5-95b7-f3c765173302\") " pod="openstack/barbican-db-sync-jmffx" Oct 01 15:20:04 crc kubenswrapper[4869]: I1001 15:20:04.519967 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nl28q\" (UniqueName: \"kubernetes.io/projected/3dfd59b2-2698-41f5-95b7-f3c765173302-kube-api-access-nl28q\") pod \"barbican-db-sync-jmffx\" (UID: \"3dfd59b2-2698-41f5-95b7-f3c765173302\") " pod="openstack/barbican-db-sync-jmffx" Oct 01 15:20:04 crc kubenswrapper[4869]: I1001 15:20:04.605023 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/560e7cea-ec57-4f2c-b0d3-9ee5ded6f37a-config\") pod \"neutron-db-sync-9fzlp\" (UID: \"560e7cea-ec57-4f2c-b0d3-9ee5ded6f37a\") " pod="openstack/neutron-db-sync-9fzlp" Oct 01 15:20:04 crc kubenswrapper[4869]: I1001 15:20:04.605185 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/560e7cea-ec57-4f2c-b0d3-9ee5ded6f37a-combined-ca-bundle\") pod \"neutron-db-sync-9fzlp\" (UID: \"560e7cea-ec57-4f2c-b0d3-9ee5ded6f37a\") " pod="openstack/neutron-db-sync-9fzlp" Oct 01 15:20:04 crc kubenswrapper[4869]: I1001 15:20:04.605234 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gv4l7\" (UniqueName: \"kubernetes.io/projected/560e7cea-ec57-4f2c-b0d3-9ee5ded6f37a-kube-api-access-gv4l7\") pod \"neutron-db-sync-9fzlp\" (UID: \"560e7cea-ec57-4f2c-b0d3-9ee5ded6f37a\") " pod="openstack/neutron-db-sync-9fzlp" Oct 01 15:20:04 crc kubenswrapper[4869]: I1001 15:20:04.609865 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/secret/560e7cea-ec57-4f2c-b0d3-9ee5ded6f37a-config\") pod \"neutron-db-sync-9fzlp\" (UID: \"560e7cea-ec57-4f2c-b0d3-9ee5ded6f37a\") " pod="openstack/neutron-db-sync-9fzlp" Oct 01 15:20:04 crc kubenswrapper[4869]: I1001 15:20:04.610050 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/560e7cea-ec57-4f2c-b0d3-9ee5ded6f37a-combined-ca-bundle\") pod \"neutron-db-sync-9fzlp\" (UID: \"560e7cea-ec57-4f2c-b0d3-9ee5ded6f37a\") " pod="openstack/neutron-db-sync-9fzlp" Oct 01 15:20:04 crc kubenswrapper[4869]: I1001 15:20:04.616763 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-sync-jmffx" Oct 01 15:20:04 crc kubenswrapper[4869]: I1001 15:20:04.621407 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gv4l7\" (UniqueName: \"kubernetes.io/projected/560e7cea-ec57-4f2c-b0d3-9ee5ded6f37a-kube-api-access-gv4l7\") pod \"neutron-db-sync-9fzlp\" (UID: \"560e7cea-ec57-4f2c-b0d3-9ee5ded6f37a\") " pod="openstack/neutron-db-sync-9fzlp" Oct 01 15:20:04 crc kubenswrapper[4869]: I1001 15:20:04.754999 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-sync-9fzlp" Oct 01 15:20:06 crc kubenswrapper[4869]: I1001 15:20:06.515334 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-695b54fb65-rp2zz" Oct 01 15:20:06 crc kubenswrapper[4869]: I1001 15:20:06.523489 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-56977fbfb5-99lb5" Oct 01 15:20:06 crc kubenswrapper[4869]: I1001 15:20:06.638889 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dgr8s\" (UniqueName: \"kubernetes.io/projected/abaaae88-543a-4735-a3e2-08978a450647-kube-api-access-dgr8s\") pod \"abaaae88-543a-4735-a3e2-08978a450647\" (UID: \"abaaae88-543a-4735-a3e2-08978a450647\") " Oct 01 15:20:06 crc kubenswrapper[4869]: I1001 15:20:06.638940 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zzw47\" (UniqueName: \"kubernetes.io/projected/84004c40-05d7-4037-b3c3-748ad141fcf7-kube-api-access-zzw47\") pod \"84004c40-05d7-4037-b3c3-748ad141fcf7\" (UID: \"84004c40-05d7-4037-b3c3-748ad141fcf7\") " Oct 01 15:20:06 crc kubenswrapper[4869]: I1001 15:20:06.638961 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/abaaae88-543a-4735-a3e2-08978a450647-scripts\") pod \"abaaae88-543a-4735-a3e2-08978a450647\" (UID: \"abaaae88-543a-4735-a3e2-08978a450647\") " Oct 01 15:20:06 crc kubenswrapper[4869]: I1001 15:20:06.639487 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/abaaae88-543a-4735-a3e2-08978a450647-scripts" (OuterVolumeSpecName: "scripts") pod "abaaae88-543a-4735-a3e2-08978a450647" (UID: "abaaae88-543a-4735-a3e2-08978a450647"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 15:20:06 crc kubenswrapper[4869]: I1001 15:20:06.639600 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/84004c40-05d7-4037-b3c3-748ad141fcf7-scripts" (OuterVolumeSpecName: "scripts") pod "84004c40-05d7-4037-b3c3-748ad141fcf7" (UID: "84004c40-05d7-4037-b3c3-748ad141fcf7"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 15:20:06 crc kubenswrapper[4869]: I1001 15:20:06.639002 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/84004c40-05d7-4037-b3c3-748ad141fcf7-scripts\") pod \"84004c40-05d7-4037-b3c3-748ad141fcf7\" (UID: \"84004c40-05d7-4037-b3c3-748ad141fcf7\") " Oct 01 15:20:06 crc kubenswrapper[4869]: I1001 15:20:06.639803 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/84004c40-05d7-4037-b3c3-748ad141fcf7-config-data\") pod \"84004c40-05d7-4037-b3c3-748ad141fcf7\" (UID: \"84004c40-05d7-4037-b3c3-748ad141fcf7\") " Oct 01 15:20:06 crc kubenswrapper[4869]: I1001 15:20:06.639923 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/abaaae88-543a-4735-a3e2-08978a450647-config-data\") pod \"abaaae88-543a-4735-a3e2-08978a450647\" (UID: \"abaaae88-543a-4735-a3e2-08978a450647\") " Oct 01 15:20:06 crc kubenswrapper[4869]: I1001 15:20:06.640421 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/abaaae88-543a-4735-a3e2-08978a450647-config-data" (OuterVolumeSpecName: "config-data") pod "abaaae88-543a-4735-a3e2-08978a450647" (UID: "abaaae88-543a-4735-a3e2-08978a450647"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 15:20:06 crc kubenswrapper[4869]: I1001 15:20:06.640471 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/abaaae88-543a-4735-a3e2-08978a450647-logs\") pod \"abaaae88-543a-4735-a3e2-08978a450647\" (UID: \"abaaae88-543a-4735-a3e2-08978a450647\") " Oct 01 15:20:06 crc kubenswrapper[4869]: I1001 15:20:06.640501 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/84004c40-05d7-4037-b3c3-748ad141fcf7-config-data" (OuterVolumeSpecName: "config-data") pod "84004c40-05d7-4037-b3c3-748ad141fcf7" (UID: "84004c40-05d7-4037-b3c3-748ad141fcf7"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 15:20:06 crc kubenswrapper[4869]: I1001 15:20:06.640516 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/84004c40-05d7-4037-b3c3-748ad141fcf7-horizon-secret-key\") pod \"84004c40-05d7-4037-b3c3-748ad141fcf7\" (UID: \"84004c40-05d7-4037-b3c3-748ad141fcf7\") " Oct 01 15:20:06 crc kubenswrapper[4869]: I1001 15:20:06.640642 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/84004c40-05d7-4037-b3c3-748ad141fcf7-logs\") pod \"84004c40-05d7-4037-b3c3-748ad141fcf7\" (UID: \"84004c40-05d7-4037-b3c3-748ad141fcf7\") " Oct 01 15:20:06 crc kubenswrapper[4869]: I1001 15:20:06.640689 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/abaaae88-543a-4735-a3e2-08978a450647-horizon-secret-key\") pod \"abaaae88-543a-4735-a3e2-08978a450647\" (UID: \"abaaae88-543a-4735-a3e2-08978a450647\") " Oct 01 15:20:06 crc kubenswrapper[4869]: I1001 15:20:06.640957 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/84004c40-05d7-4037-b3c3-748ad141fcf7-logs" (OuterVolumeSpecName: "logs") pod "84004c40-05d7-4037-b3c3-748ad141fcf7" (UID: "84004c40-05d7-4037-b3c3-748ad141fcf7"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 15:20:06 crc kubenswrapper[4869]: I1001 15:20:06.641269 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/abaaae88-543a-4735-a3e2-08978a450647-logs" (OuterVolumeSpecName: "logs") pod "abaaae88-543a-4735-a3e2-08978a450647" (UID: "abaaae88-543a-4735-a3e2-08978a450647"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 15:20:06 crc kubenswrapper[4869]: I1001 15:20:06.641925 4869 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/84004c40-05d7-4037-b3c3-748ad141fcf7-config-data\") on node \"crc\" DevicePath \"\"" Oct 01 15:20:06 crc kubenswrapper[4869]: I1001 15:20:06.641953 4869 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/abaaae88-543a-4735-a3e2-08978a450647-config-data\") on node \"crc\" DevicePath \"\"" Oct 01 15:20:06 crc kubenswrapper[4869]: I1001 15:20:06.641966 4869 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/abaaae88-543a-4735-a3e2-08978a450647-logs\") on node \"crc\" DevicePath \"\"" Oct 01 15:20:06 crc kubenswrapper[4869]: I1001 15:20:06.641977 4869 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/84004c40-05d7-4037-b3c3-748ad141fcf7-logs\") on node \"crc\" DevicePath \"\"" Oct 01 15:20:06 crc kubenswrapper[4869]: I1001 15:20:06.641990 4869 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/abaaae88-543a-4735-a3e2-08978a450647-scripts\") on node \"crc\" DevicePath \"\"" Oct 01 15:20:06 crc kubenswrapper[4869]: I1001 15:20:06.642003 4869 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/84004c40-05d7-4037-b3c3-748ad141fcf7-scripts\") on node \"crc\" DevicePath \"\"" Oct 01 15:20:06 crc kubenswrapper[4869]: I1001 15:20:06.642558 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/84004c40-05d7-4037-b3c3-748ad141fcf7-kube-api-access-zzw47" (OuterVolumeSpecName: "kube-api-access-zzw47") pod "84004c40-05d7-4037-b3c3-748ad141fcf7" (UID: "84004c40-05d7-4037-b3c3-748ad141fcf7"). InnerVolumeSpecName "kube-api-access-zzw47". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 15:20:06 crc kubenswrapper[4869]: I1001 15:20:06.642936 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/84004c40-05d7-4037-b3c3-748ad141fcf7-horizon-secret-key" (OuterVolumeSpecName: "horizon-secret-key") pod "84004c40-05d7-4037-b3c3-748ad141fcf7" (UID: "84004c40-05d7-4037-b3c3-748ad141fcf7"). InnerVolumeSpecName "horizon-secret-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 15:20:06 crc kubenswrapper[4869]: I1001 15:20:06.644381 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/abaaae88-543a-4735-a3e2-08978a450647-horizon-secret-key" (OuterVolumeSpecName: "horizon-secret-key") pod "abaaae88-543a-4735-a3e2-08978a450647" (UID: "abaaae88-543a-4735-a3e2-08978a450647"). InnerVolumeSpecName "horizon-secret-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 15:20:06 crc kubenswrapper[4869]: I1001 15:20:06.645349 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/abaaae88-543a-4735-a3e2-08978a450647-kube-api-access-dgr8s" (OuterVolumeSpecName: "kube-api-access-dgr8s") pod "abaaae88-543a-4735-a3e2-08978a450647" (UID: "abaaae88-543a-4735-a3e2-08978a450647"). InnerVolumeSpecName "kube-api-access-dgr8s". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 15:20:06 crc kubenswrapper[4869]: I1001 15:20:06.741322 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-56977fbfb5-99lb5" event={"ID":"84004c40-05d7-4037-b3c3-748ad141fcf7","Type":"ContainerDied","Data":"8406b2566c3b001aff4f6df899aedc6ce4327835d618d4200f75c50d6cc56166"} Oct 01 15:20:06 crc kubenswrapper[4869]: I1001 15:20:06.741396 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-56977fbfb5-99lb5" Oct 01 15:20:06 crc kubenswrapper[4869]: I1001 15:20:06.748401 4869 reconciler_common.go:293] "Volume detached for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/abaaae88-543a-4735-a3e2-08978a450647-horizon-secret-key\") on node \"crc\" DevicePath \"\"" Oct 01 15:20:06 crc kubenswrapper[4869]: I1001 15:20:06.748419 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dgr8s\" (UniqueName: \"kubernetes.io/projected/abaaae88-543a-4735-a3e2-08978a450647-kube-api-access-dgr8s\") on node \"crc\" DevicePath \"\"" Oct 01 15:20:06 crc kubenswrapper[4869]: I1001 15:20:06.748430 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zzw47\" (UniqueName: \"kubernetes.io/projected/84004c40-05d7-4037-b3c3-748ad141fcf7-kube-api-access-zzw47\") on node \"crc\" DevicePath \"\"" Oct 01 15:20:06 crc kubenswrapper[4869]: I1001 15:20:06.748439 4869 reconciler_common.go:293] "Volume detached for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/84004c40-05d7-4037-b3c3-748ad141fcf7-horizon-secret-key\") on node \"crc\" DevicePath \"\"" Oct 01 15:20:06 crc kubenswrapper[4869]: I1001 15:20:06.748859 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-695b54fb65-rp2zz" event={"ID":"abaaae88-543a-4735-a3e2-08978a450647","Type":"ContainerDied","Data":"eb3783afbda1d725ad8856081e5f2a6d95536f52c4c5d12d96f5146b7f8f78c4"} Oct 01 15:20:06 crc kubenswrapper[4869]: I1001 15:20:06.749762 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-695b54fb65-rp2zz" Oct 01 15:20:06 crc kubenswrapper[4869]: I1001 15:20:06.811389 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-56977fbfb5-99lb5"] Oct 01 15:20:06 crc kubenswrapper[4869]: I1001 15:20:06.817143 4869 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/horizon-56977fbfb5-99lb5"] Oct 01 15:20:06 crc kubenswrapper[4869]: I1001 15:20:06.836753 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-695b54fb65-rp2zz"] Oct 01 15:20:06 crc kubenswrapper[4869]: I1001 15:20:06.848711 4869 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/horizon-695b54fb65-rp2zz"] Oct 01 15:20:07 crc kubenswrapper[4869]: I1001 15:20:07.598478 4869 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="84004c40-05d7-4037-b3c3-748ad141fcf7" path="/var/lib/kubelet/pods/84004c40-05d7-4037-b3c3-748ad141fcf7/volumes" Oct 01 15:20:07 crc kubenswrapper[4869]: I1001 15:20:07.599092 4869 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="abaaae88-543a-4735-a3e2-08978a450647" path="/var/lib/kubelet/pods/abaaae88-543a-4735-a3e2-08978a450647/volumes" Oct 01 15:20:07 crc kubenswrapper[4869]: E1001 15:20:07.706067 4869 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-cinder-api@sha256:e318869f706836a0c74c0ad55aab277b1bb7fae0555ae0f03cb28b379b9ce695" Oct 01 15:20:07 crc kubenswrapper[4869]: E1001 15:20:07.706597 4869 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:cinder-db-sync,Image:quay.io/podified-antelope-centos9/openstack-cinder-api@sha256:e318869f706836a0c74c0ad55aab277b1bb7fae0555ae0f03cb28b379b9ce695,Command:[/bin/bash],Args:[-c /usr/local/bin/kolla_set_configs && /usr/local/bin/kolla_start],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:KOLLA_BOOTSTRAP,Value:TRUE,ValueFrom:nil,},EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:etc-machine-id,ReadOnly:true,MountPath:/etc/machine-id,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:scripts,ReadOnly:true,MountPath:/usr/local/bin/container-scripts,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/var/lib/config-data/merged,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/etc/my.cnf,SubPath:my.cnf,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:db-sync-config-data,ReadOnly:true,MountPath:/etc/cinder/cinder.conf.d,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/var/lib/kolla/config_files/config.json,SubPath:db-sync-config.json,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:combined-ca-bundle,ReadOnly:true,MountPath:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem,SubPath:tls-ca-bundle.pem,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-2r52b,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:nil,Privileged:nil,SELinuxOptions:nil,RunAsUser:*0,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:nil,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod cinder-db-sync-slhz2_openstack(f9c5d763-90ad-4611-8cac-193343af1b78): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Oct 01 15:20:07 crc kubenswrapper[4869]: E1001 15:20:07.707889 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"cinder-db-sync\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/cinder-db-sync-slhz2" podUID="f9c5d763-90ad-4611-8cac-193343af1b78" Oct 01 15:20:07 crc kubenswrapper[4869]: I1001 15:20:07.767696 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-86ddb7fb65-5th7v" event={"ID":"298c162a-4d72-4d7c-bd59-d5d5c8f7cf7f","Type":"ContainerDied","Data":"19f7ed2cee0dadca13cdf8d3341006d2c83f2ca0b62746c72ac27d9853441a7d"} Oct 01 15:20:07 crc kubenswrapper[4869]: I1001 15:20:07.767779 4869 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="19f7ed2cee0dadca13cdf8d3341006d2c83f2ca0b62746c72ac27d9853441a7d" Oct 01 15:20:07 crc kubenswrapper[4869]: E1001 15:20:07.807310 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"cinder-db-sync\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-cinder-api@sha256:e318869f706836a0c74c0ad55aab277b1bb7fae0555ae0f03cb28b379b9ce695\\\"\"" pod="openstack/cinder-db-sync-slhz2" podUID="f9c5d763-90ad-4611-8cac-193343af1b78" Oct 01 15:20:07 crc kubenswrapper[4869]: I1001 15:20:07.973118 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-86ddb7fb65-5th7v" Oct 01 15:20:08 crc kubenswrapper[4869]: I1001 15:20:08.067826 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/298c162a-4d72-4d7c-bd59-d5d5c8f7cf7f-ovsdbserver-sb\") pod \"298c162a-4d72-4d7c-bd59-d5d5c8f7cf7f\" (UID: \"298c162a-4d72-4d7c-bd59-d5d5c8f7cf7f\") " Oct 01 15:20:08 crc kubenswrapper[4869]: I1001 15:20:08.067861 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/298c162a-4d72-4d7c-bd59-d5d5c8f7cf7f-dns-svc\") pod \"298c162a-4d72-4d7c-bd59-d5d5c8f7cf7f\" (UID: \"298c162a-4d72-4d7c-bd59-d5d5c8f7cf7f\") " Oct 01 15:20:08 crc kubenswrapper[4869]: I1001 15:20:08.067937 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4btt7\" (UniqueName: \"kubernetes.io/projected/298c162a-4d72-4d7c-bd59-d5d5c8f7cf7f-kube-api-access-4btt7\") pod \"298c162a-4d72-4d7c-bd59-d5d5c8f7cf7f\" (UID: \"298c162a-4d72-4d7c-bd59-d5d5c8f7cf7f\") " Oct 01 15:20:08 crc kubenswrapper[4869]: I1001 15:20:08.068042 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/298c162a-4d72-4d7c-bd59-d5d5c8f7cf7f-ovsdbserver-nb\") pod \"298c162a-4d72-4d7c-bd59-d5d5c8f7cf7f\" (UID: \"298c162a-4d72-4d7c-bd59-d5d5c8f7cf7f\") " Oct 01 15:20:08 crc kubenswrapper[4869]: I1001 15:20:08.068082 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/298c162a-4d72-4d7c-bd59-d5d5c8f7cf7f-config\") pod \"298c162a-4d72-4d7c-bd59-d5d5c8f7cf7f\" (UID: \"298c162a-4d72-4d7c-bd59-d5d5c8f7cf7f\") " Oct 01 15:20:08 crc kubenswrapper[4869]: I1001 15:20:08.073300 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/298c162a-4d72-4d7c-bd59-d5d5c8f7cf7f-kube-api-access-4btt7" (OuterVolumeSpecName: "kube-api-access-4btt7") pod "298c162a-4d72-4d7c-bd59-d5d5c8f7cf7f" (UID: "298c162a-4d72-4d7c-bd59-d5d5c8f7cf7f"). InnerVolumeSpecName "kube-api-access-4btt7". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 15:20:08 crc kubenswrapper[4869]: I1001 15:20:08.112161 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/298c162a-4d72-4d7c-bd59-d5d5c8f7cf7f-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "298c162a-4d72-4d7c-bd59-d5d5c8f7cf7f" (UID: "298c162a-4d72-4d7c-bd59-d5d5c8f7cf7f"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 15:20:08 crc kubenswrapper[4869]: I1001 15:20:08.112858 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/298c162a-4d72-4d7c-bd59-d5d5c8f7cf7f-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "298c162a-4d72-4d7c-bd59-d5d5c8f7cf7f" (UID: "298c162a-4d72-4d7c-bd59-d5d5c8f7cf7f"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 15:20:08 crc kubenswrapper[4869]: I1001 15:20:08.114300 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/298c162a-4d72-4d7c-bd59-d5d5c8f7cf7f-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "298c162a-4d72-4d7c-bd59-d5d5c8f7cf7f" (UID: "298c162a-4d72-4d7c-bd59-d5d5c8f7cf7f"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 15:20:08 crc kubenswrapper[4869]: I1001 15:20:08.115210 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/298c162a-4d72-4d7c-bd59-d5d5c8f7cf7f-config" (OuterVolumeSpecName: "config") pod "298c162a-4d72-4d7c-bd59-d5d5c8f7cf7f" (UID: "298c162a-4d72-4d7c-bd59-d5d5c8f7cf7f"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 15:20:08 crc kubenswrapper[4869]: I1001 15:20:08.170212 4869 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/298c162a-4d72-4d7c-bd59-d5d5c8f7cf7f-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Oct 01 15:20:08 crc kubenswrapper[4869]: I1001 15:20:08.170245 4869 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/298c162a-4d72-4d7c-bd59-d5d5c8f7cf7f-config\") on node \"crc\" DevicePath \"\"" Oct 01 15:20:08 crc kubenswrapper[4869]: I1001 15:20:08.170272 4869 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/298c162a-4d72-4d7c-bd59-d5d5c8f7cf7f-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Oct 01 15:20:08 crc kubenswrapper[4869]: I1001 15:20:08.170281 4869 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/298c162a-4d72-4d7c-bd59-d5d5c8f7cf7f-dns-svc\") on node \"crc\" DevicePath \"\"" Oct 01 15:20:08 crc kubenswrapper[4869]: I1001 15:20:08.170292 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4btt7\" (UniqueName: \"kubernetes.io/projected/298c162a-4d72-4d7c-bd59-d5d5c8f7cf7f-kube-api-access-4btt7\") on node \"crc\" DevicePath \"\"" Oct 01 15:20:08 crc kubenswrapper[4869]: I1001 15:20:08.198852 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-bootstrap-24mhh"] Oct 01 15:20:08 crc kubenswrapper[4869]: W1001 15:20:08.204001 4869 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod0d6f159a_60ec_48de_87f6_d676877278c6.slice/crio-4fab351c43e2f865c25839bdda4a4ef5d4d242a0c474c8f6a3de99835fdd93f9 WatchSource:0}: Error finding container 4fab351c43e2f865c25839bdda4a4ef5d4d242a0c474c8f6a3de99835fdd93f9: Status 404 returned error can't find the container with id 4fab351c43e2f865c25839bdda4a4ef5d4d242a0c474c8f6a3de99835fdd93f9 Oct 01 15:20:08 crc kubenswrapper[4869]: I1001 15:20:08.324735 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-db-sync-jmffx"] Oct 01 15:20:08 crc kubenswrapper[4869]: I1001 15:20:08.332758 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-5766b74c9d-wpxpf"] Oct 01 15:20:08 crc kubenswrapper[4869]: I1001 15:20:08.426029 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-db-sync-9fzlp"] Oct 01 15:20:08 crc kubenswrapper[4869]: I1001 15:20:08.439798 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-5f66f6967d-mnbqz"] Oct 01 15:20:08 crc kubenswrapper[4869]: I1001 15:20:08.451054 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-85845f7997-n9h7g"] Oct 01 15:20:08 crc kubenswrapper[4869]: I1001 15:20:08.776115 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-24mhh" event={"ID":"0d6f159a-60ec-48de-87f6-d676877278c6","Type":"ContainerStarted","Data":"7d6df449e6cafe4a57fc8ee53ec67420f8668a6c47309ba282568772b8dfffda"} Oct 01 15:20:08 crc kubenswrapper[4869]: I1001 15:20:08.776451 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-24mhh" event={"ID":"0d6f159a-60ec-48de-87f6-d676877278c6","Type":"ContainerStarted","Data":"4fab351c43e2f865c25839bdda4a4ef5d4d242a0c474c8f6a3de99835fdd93f9"} Oct 01 15:20:08 crc kubenswrapper[4869]: I1001 15:20:08.778006 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-sync-25k96" event={"ID":"546e76f6-f453-481b-8115-369d6ff9326c","Type":"ContainerStarted","Data":"5f3d74c9f688f4adb962a4cb2f7313ad3a9a7845f406092acd4f11e626a4d230"} Oct 01 15:20:08 crc kubenswrapper[4869]: I1001 15:20:08.779326 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-85845f7997-n9h7g" event={"ID":"b141c51a-44cd-4c2c-be11-6c8b5576a289","Type":"ContainerStarted","Data":"f0e61d29e31f231496b06bc3f60f48eec18267c36c477af60090db3acc47a4dc"} Oct 01 15:20:08 crc kubenswrapper[4869]: I1001 15:20:08.780821 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-sync-9fzlp" event={"ID":"560e7cea-ec57-4f2c-b0d3-9ee5ded6f37a","Type":"ContainerStarted","Data":"346ca77027203d6eadf53d5f1a1d64a58f3c7f0e500aa8c8d4d64c71e76ee52b"} Oct 01 15:20:08 crc kubenswrapper[4869]: I1001 15:20:08.780850 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-sync-9fzlp" event={"ID":"560e7cea-ec57-4f2c-b0d3-9ee5ded6f37a","Type":"ContainerStarted","Data":"ae665cc1791dd6c7102f5680d16b3a30021f40d60c9f7e20585f04fddd336163"} Oct 01 15:20:08 crc kubenswrapper[4869]: I1001 15:20:08.782020 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-sync-jmffx" event={"ID":"3dfd59b2-2698-41f5-95b7-f3c765173302","Type":"ContainerStarted","Data":"8a24ac6578c19fbc54e74de74857994841f50cd9441036315d960546c2644b3e"} Oct 01 15:20:08 crc kubenswrapper[4869]: I1001 15:20:08.782965 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-5f66f6967d-mnbqz" event={"ID":"eb62e045-ca51-4b33-a63d-9c53b247cc91","Type":"ContainerStarted","Data":"5d0635a64b969c638aa7d2e15b553aa5c63a743d829774cfe61e5038c48e982b"} Oct 01 15:20:08 crc kubenswrapper[4869]: I1001 15:20:08.784351 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"cc0dd8d7-8230-4c07-87b1-6f4fc5e2e631","Type":"ContainerStarted","Data":"e908a2914b310dca9c1c19937855f20362a2902fd309d74542f25cd91fef47d9"} Oct 01 15:20:08 crc kubenswrapper[4869]: I1001 15:20:08.787241 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-5766b74c9d-wpxpf" event={"ID":"011bdc54-d9ef-4ae9-a16b-5eaf4e97b2cf","Type":"ContainerStarted","Data":"c1f6b4084c9e3c40d69d22c80de2e3b38bb0088828476cf3ff96e1c75d405d0e"} Oct 01 15:20:08 crc kubenswrapper[4869]: I1001 15:20:08.787291 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-86ddb7fb65-5th7v" Oct 01 15:20:08 crc kubenswrapper[4869]: I1001 15:20:08.792751 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-bootstrap-24mhh" podStartSLOduration=9.792729493 podStartE2EDuration="9.792729493s" podCreationTimestamp="2025-10-01 15:19:59 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-01 15:20:08.789334428 +0000 UTC m=+917.936177574" watchObservedRunningTime="2025-10-01 15:20:08.792729493 +0000 UTC m=+917.939572649" Oct 01 15:20:08 crc kubenswrapper[4869]: I1001 15:20:08.820012 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-db-sync-9fzlp" podStartSLOduration=4.819985982 podStartE2EDuration="4.819985982s" podCreationTimestamp="2025-10-01 15:20:04 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-01 15:20:08.806984553 +0000 UTC m=+917.953827669" watchObservedRunningTime="2025-10-01 15:20:08.819985982 +0000 UTC m=+917.966829138" Oct 01 15:20:08 crc kubenswrapper[4869]: I1001 15:20:08.825876 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/placement-db-sync-25k96" podStartSLOduration=3.161673353 podStartE2EDuration="31.82585718s" podCreationTimestamp="2025-10-01 15:19:37 +0000 UTC" firstStartedPulling="2025-10-01 15:19:38.981173957 +0000 UTC m=+888.128017073" lastFinishedPulling="2025-10-01 15:20:07.645357744 +0000 UTC m=+916.792200900" observedRunningTime="2025-10-01 15:20:08.82069458 +0000 UTC m=+917.967537736" watchObservedRunningTime="2025-10-01 15:20:08.82585718 +0000 UTC m=+917.972700336" Oct 01 15:20:08 crc kubenswrapper[4869]: I1001 15:20:08.845649 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-86ddb7fb65-5th7v"] Oct 01 15:20:08 crc kubenswrapper[4869]: I1001 15:20:08.857717 4869 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-86ddb7fb65-5th7v"] Oct 01 15:20:09 crc kubenswrapper[4869]: I1001 15:20:09.594105 4869 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="298c162a-4d72-4d7c-bd59-d5d5c8f7cf7f" path="/var/lib/kubelet/pods/298c162a-4d72-4d7c-bd59-d5d5c8f7cf7f/volumes" Oct 01 15:20:09 crc kubenswrapper[4869]: I1001 15:20:09.797894 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-5f66f6967d-mnbqz" event={"ID":"eb62e045-ca51-4b33-a63d-9c53b247cc91","Type":"ContainerStarted","Data":"d0b2518856db669d3aa4483066b2c2c9bb2f272c6d373e445f4018fb6e02aba7"} Oct 01 15:20:09 crc kubenswrapper[4869]: I1001 15:20:09.797933 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-5f66f6967d-mnbqz" event={"ID":"eb62e045-ca51-4b33-a63d-9c53b247cc91","Type":"ContainerStarted","Data":"413534386b60ce41b80e735c5411a6700b85a3c9d8729e26b2d57b095f75cc4b"} Oct 01 15:20:09 crc kubenswrapper[4869]: I1001 15:20:09.800781 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-5766b74c9d-wpxpf" event={"ID":"011bdc54-d9ef-4ae9-a16b-5eaf4e97b2cf","Type":"ContainerStarted","Data":"26d24bc7aa47d955a5fcbe67784e963e6b5f386b86c229c48e9a56720466c88e"} Oct 01 15:20:09 crc kubenswrapper[4869]: I1001 15:20:09.800817 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-5766b74c9d-wpxpf" event={"ID":"011bdc54-d9ef-4ae9-a16b-5eaf4e97b2cf","Type":"ContainerStarted","Data":"848fa104b788b0a643bef9f8bc271d7b678ee782e58d1385dc06d84691994654"} Oct 01 15:20:09 crc kubenswrapper[4869]: I1001 15:20:09.803741 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-85845f7997-n9h7g" event={"ID":"b141c51a-44cd-4c2c-be11-6c8b5576a289","Type":"ContainerStarted","Data":"03a3bc893c21f1cd1e12eb54fbe6056ca2c18b130dbae8c65059fb06dbe819b4"} Oct 01 15:20:09 crc kubenswrapper[4869]: I1001 15:20:09.803787 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-85845f7997-n9h7g" event={"ID":"b141c51a-44cd-4c2c-be11-6c8b5576a289","Type":"ContainerStarted","Data":"5337d199ab90cec38c8fb6bb0815ad515f9dc13f7c13dc9fc845f1f713e29768"} Oct 01 15:20:09 crc kubenswrapper[4869]: I1001 15:20:09.803847 4869 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/horizon-85845f7997-n9h7g" podUID="b141c51a-44cd-4c2c-be11-6c8b5576a289" containerName="horizon-log" containerID="cri-o://5337d199ab90cec38c8fb6bb0815ad515f9dc13f7c13dc9fc845f1f713e29768" gracePeriod=30 Oct 01 15:20:09 crc kubenswrapper[4869]: I1001 15:20:09.803872 4869 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/horizon-85845f7997-n9h7g" podUID="b141c51a-44cd-4c2c-be11-6c8b5576a289" containerName="horizon" containerID="cri-o://03a3bc893c21f1cd1e12eb54fbe6056ca2c18b130dbae8c65059fb06dbe819b4" gracePeriod=30 Oct 01 15:20:09 crc kubenswrapper[4869]: I1001 15:20:09.831848 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/horizon-5f66f6967d-mnbqz" podStartSLOduration=21.381879843 podStartE2EDuration="21.831821447s" podCreationTimestamp="2025-10-01 15:19:48 +0000 UTC" firstStartedPulling="2025-10-01 15:20:08.452247834 +0000 UTC m=+917.599090950" lastFinishedPulling="2025-10-01 15:20:08.902189438 +0000 UTC m=+918.049032554" observedRunningTime="2025-10-01 15:20:09.820661335 +0000 UTC m=+918.967504461" watchObservedRunningTime="2025-10-01 15:20:09.831821447 +0000 UTC m=+918.978664563" Oct 01 15:20:09 crc kubenswrapper[4869]: I1001 15:20:09.865420 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/horizon-85845f7997-n9h7g" podStartSLOduration=27.467157197 podStartE2EDuration="27.865394465s" podCreationTimestamp="2025-10-01 15:19:42 +0000 UTC" firstStartedPulling="2025-10-01 15:20:08.462190735 +0000 UTC m=+917.609033851" lastFinishedPulling="2025-10-01 15:20:08.860427993 +0000 UTC m=+918.007271119" observedRunningTime="2025-10-01 15:20:09.836491045 +0000 UTC m=+918.983334201" watchObservedRunningTime="2025-10-01 15:20:09.865394465 +0000 UTC m=+919.012237591" Oct 01 15:20:09 crc kubenswrapper[4869]: I1001 15:20:09.872625 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/horizon-5766b74c9d-wpxpf" podStartSLOduration=21.239832845 podStartE2EDuration="21.872604827s" podCreationTimestamp="2025-10-01 15:19:48 +0000 UTC" firstStartedPulling="2025-10-01 15:20:08.335532486 +0000 UTC m=+917.482375602" lastFinishedPulling="2025-10-01 15:20:08.968304458 +0000 UTC m=+918.115147584" observedRunningTime="2025-10-01 15:20:09.855096135 +0000 UTC m=+919.001939261" watchObservedRunningTime="2025-10-01 15:20:09.872604827 +0000 UTC m=+919.019447953" Oct 01 15:20:12 crc kubenswrapper[4869]: I1001 15:20:12.601997 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/horizon-85845f7997-n9h7g" Oct 01 15:20:12 crc kubenswrapper[4869]: I1001 15:20:12.829569 4869 generic.go:334] "Generic (PLEG): container finished" podID="0d6f159a-60ec-48de-87f6-d676877278c6" containerID="7d6df449e6cafe4a57fc8ee53ec67420f8668a6c47309ba282568772b8dfffda" exitCode=0 Oct 01 15:20:12 crc kubenswrapper[4869]: I1001 15:20:12.829639 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-24mhh" event={"ID":"0d6f159a-60ec-48de-87f6-d676877278c6","Type":"ContainerDied","Data":"7d6df449e6cafe4a57fc8ee53ec67420f8668a6c47309ba282568772b8dfffda"} Oct 01 15:20:12 crc kubenswrapper[4869]: I1001 15:20:12.832322 4869 generic.go:334] "Generic (PLEG): container finished" podID="546e76f6-f453-481b-8115-369d6ff9326c" containerID="5f3d74c9f688f4adb962a4cb2f7313ad3a9a7845f406092acd4f11e626a4d230" exitCode=0 Oct 01 15:20:12 crc kubenswrapper[4869]: I1001 15:20:12.832378 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-sync-25k96" event={"ID":"546e76f6-f453-481b-8115-369d6ff9326c","Type":"ContainerDied","Data":"5f3d74c9f688f4adb962a4cb2f7313ad3a9a7845f406092acd4f11e626a4d230"} Oct 01 15:20:13 crc kubenswrapper[4869]: I1001 15:20:13.354253 4869 patch_prober.go:28] interesting pod/machine-config-daemon-c86m8 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 01 15:20:13 crc kubenswrapper[4869]: I1001 15:20:13.354381 4869 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-c86m8" podUID="a4b64f7f-0b03-4f47-965b-9fde048b735c" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 01 15:20:13 crc kubenswrapper[4869]: I1001 15:20:13.354430 4869 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-c86m8" Oct 01 15:20:13 crc kubenswrapper[4869]: I1001 15:20:13.355539 4869 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"4be1a24ad49d8ad1b9a1395c62f6541610f0ea2bbd6f24d661f794435b423dc8"} pod="openshift-machine-config-operator/machine-config-daemon-c86m8" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Oct 01 15:20:13 crc kubenswrapper[4869]: I1001 15:20:13.355634 4869 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-c86m8" podUID="a4b64f7f-0b03-4f47-965b-9fde048b735c" containerName="machine-config-daemon" containerID="cri-o://4be1a24ad49d8ad1b9a1395c62f6541610f0ea2bbd6f24d661f794435b423dc8" gracePeriod=600 Oct 01 15:20:13 crc kubenswrapper[4869]: I1001 15:20:13.843716 4869 generic.go:334] "Generic (PLEG): container finished" podID="a4b64f7f-0b03-4f47-965b-9fde048b735c" containerID="4be1a24ad49d8ad1b9a1395c62f6541610f0ea2bbd6f24d661f794435b423dc8" exitCode=0 Oct 01 15:20:13 crc kubenswrapper[4869]: I1001 15:20:13.843942 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-c86m8" event={"ID":"a4b64f7f-0b03-4f47-965b-9fde048b735c","Type":"ContainerDied","Data":"4be1a24ad49d8ad1b9a1395c62f6541610f0ea2bbd6f24d661f794435b423dc8"} Oct 01 15:20:13 crc kubenswrapper[4869]: I1001 15:20:13.844386 4869 scope.go:117] "RemoveContainer" containerID="aedd256e8a7e9adcb3428c1dfb846efe5c6adc26f622dd82be7a88d857fb712b" Oct 01 15:20:14 crc kubenswrapper[4869]: I1001 15:20:14.128531 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-sync-25k96" Oct 01 15:20:14 crc kubenswrapper[4869]: I1001 15:20:14.185009 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/546e76f6-f453-481b-8115-369d6ff9326c-scripts\") pod \"546e76f6-f453-481b-8115-369d6ff9326c\" (UID: \"546e76f6-f453-481b-8115-369d6ff9326c\") " Oct 01 15:20:14 crc kubenswrapper[4869]: I1001 15:20:14.185179 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/546e76f6-f453-481b-8115-369d6ff9326c-logs\") pod \"546e76f6-f453-481b-8115-369d6ff9326c\" (UID: \"546e76f6-f453-481b-8115-369d6ff9326c\") " Oct 01 15:20:14 crc kubenswrapper[4869]: I1001 15:20:14.185295 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9rx94\" (UniqueName: \"kubernetes.io/projected/546e76f6-f453-481b-8115-369d6ff9326c-kube-api-access-9rx94\") pod \"546e76f6-f453-481b-8115-369d6ff9326c\" (UID: \"546e76f6-f453-481b-8115-369d6ff9326c\") " Oct 01 15:20:14 crc kubenswrapper[4869]: I1001 15:20:14.185430 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/546e76f6-f453-481b-8115-369d6ff9326c-combined-ca-bundle\") pod \"546e76f6-f453-481b-8115-369d6ff9326c\" (UID: \"546e76f6-f453-481b-8115-369d6ff9326c\") " Oct 01 15:20:14 crc kubenswrapper[4869]: I1001 15:20:14.185504 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/546e76f6-f453-481b-8115-369d6ff9326c-config-data\") pod \"546e76f6-f453-481b-8115-369d6ff9326c\" (UID: \"546e76f6-f453-481b-8115-369d6ff9326c\") " Oct 01 15:20:14 crc kubenswrapper[4869]: I1001 15:20:14.186187 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/546e76f6-f453-481b-8115-369d6ff9326c-logs" (OuterVolumeSpecName: "logs") pod "546e76f6-f453-481b-8115-369d6ff9326c" (UID: "546e76f6-f453-481b-8115-369d6ff9326c"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 15:20:14 crc kubenswrapper[4869]: I1001 15:20:14.191868 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/546e76f6-f453-481b-8115-369d6ff9326c-kube-api-access-9rx94" (OuterVolumeSpecName: "kube-api-access-9rx94") pod "546e76f6-f453-481b-8115-369d6ff9326c" (UID: "546e76f6-f453-481b-8115-369d6ff9326c"). InnerVolumeSpecName "kube-api-access-9rx94". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 15:20:14 crc kubenswrapper[4869]: I1001 15:20:14.204088 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/546e76f6-f453-481b-8115-369d6ff9326c-scripts" (OuterVolumeSpecName: "scripts") pod "546e76f6-f453-481b-8115-369d6ff9326c" (UID: "546e76f6-f453-481b-8115-369d6ff9326c"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 15:20:14 crc kubenswrapper[4869]: I1001 15:20:14.228391 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/546e76f6-f453-481b-8115-369d6ff9326c-config-data" (OuterVolumeSpecName: "config-data") pod "546e76f6-f453-481b-8115-369d6ff9326c" (UID: "546e76f6-f453-481b-8115-369d6ff9326c"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 15:20:14 crc kubenswrapper[4869]: I1001 15:20:14.231507 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/546e76f6-f453-481b-8115-369d6ff9326c-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "546e76f6-f453-481b-8115-369d6ff9326c" (UID: "546e76f6-f453-481b-8115-369d6ff9326c"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 15:20:14 crc kubenswrapper[4869]: I1001 15:20:14.231939 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-24mhh" Oct 01 15:20:14 crc kubenswrapper[4869]: I1001 15:20:14.287891 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0d6f159a-60ec-48de-87f6-d676877278c6-config-data\") pod \"0d6f159a-60ec-48de-87f6-d676877278c6\" (UID: \"0d6f159a-60ec-48de-87f6-d676877278c6\") " Oct 01 15:20:14 crc kubenswrapper[4869]: I1001 15:20:14.287946 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/0d6f159a-60ec-48de-87f6-d676877278c6-fernet-keys\") pod \"0d6f159a-60ec-48de-87f6-d676877278c6\" (UID: \"0d6f159a-60ec-48de-87f6-d676877278c6\") " Oct 01 15:20:14 crc kubenswrapper[4869]: I1001 15:20:14.288011 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/0d6f159a-60ec-48de-87f6-d676877278c6-credential-keys\") pod \"0d6f159a-60ec-48de-87f6-d676877278c6\" (UID: \"0d6f159a-60ec-48de-87f6-d676877278c6\") " Oct 01 15:20:14 crc kubenswrapper[4869]: I1001 15:20:14.288053 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0d6f159a-60ec-48de-87f6-d676877278c6-scripts\") pod \"0d6f159a-60ec-48de-87f6-d676877278c6\" (UID: \"0d6f159a-60ec-48de-87f6-d676877278c6\") " Oct 01 15:20:14 crc kubenswrapper[4869]: I1001 15:20:14.288116 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hp6vj\" (UniqueName: \"kubernetes.io/projected/0d6f159a-60ec-48de-87f6-d676877278c6-kube-api-access-hp6vj\") pod \"0d6f159a-60ec-48de-87f6-d676877278c6\" (UID: \"0d6f159a-60ec-48de-87f6-d676877278c6\") " Oct 01 15:20:14 crc kubenswrapper[4869]: I1001 15:20:14.288132 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0d6f159a-60ec-48de-87f6-d676877278c6-combined-ca-bundle\") pod \"0d6f159a-60ec-48de-87f6-d676877278c6\" (UID: \"0d6f159a-60ec-48de-87f6-d676877278c6\") " Oct 01 15:20:14 crc kubenswrapper[4869]: I1001 15:20:14.288493 4869 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/546e76f6-f453-481b-8115-369d6ff9326c-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 01 15:20:14 crc kubenswrapper[4869]: I1001 15:20:14.288510 4869 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/546e76f6-f453-481b-8115-369d6ff9326c-config-data\") on node \"crc\" DevicePath \"\"" Oct 01 15:20:14 crc kubenswrapper[4869]: I1001 15:20:14.288518 4869 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/546e76f6-f453-481b-8115-369d6ff9326c-scripts\") on node \"crc\" DevicePath \"\"" Oct 01 15:20:14 crc kubenswrapper[4869]: I1001 15:20:14.288528 4869 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/546e76f6-f453-481b-8115-369d6ff9326c-logs\") on node \"crc\" DevicePath \"\"" Oct 01 15:20:14 crc kubenswrapper[4869]: I1001 15:20:14.288537 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9rx94\" (UniqueName: \"kubernetes.io/projected/546e76f6-f453-481b-8115-369d6ff9326c-kube-api-access-9rx94\") on node \"crc\" DevicePath \"\"" Oct 01 15:20:14 crc kubenswrapper[4869]: I1001 15:20:14.291740 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0d6f159a-60ec-48de-87f6-d676877278c6-credential-keys" (OuterVolumeSpecName: "credential-keys") pod "0d6f159a-60ec-48de-87f6-d676877278c6" (UID: "0d6f159a-60ec-48de-87f6-d676877278c6"). InnerVolumeSpecName "credential-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 15:20:14 crc kubenswrapper[4869]: I1001 15:20:14.292514 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0d6f159a-60ec-48de-87f6-d676877278c6-kube-api-access-hp6vj" (OuterVolumeSpecName: "kube-api-access-hp6vj") pod "0d6f159a-60ec-48de-87f6-d676877278c6" (UID: "0d6f159a-60ec-48de-87f6-d676877278c6"). InnerVolumeSpecName "kube-api-access-hp6vj". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 15:20:14 crc kubenswrapper[4869]: I1001 15:20:14.292516 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0d6f159a-60ec-48de-87f6-d676877278c6-fernet-keys" (OuterVolumeSpecName: "fernet-keys") pod "0d6f159a-60ec-48de-87f6-d676877278c6" (UID: "0d6f159a-60ec-48de-87f6-d676877278c6"). InnerVolumeSpecName "fernet-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 15:20:14 crc kubenswrapper[4869]: I1001 15:20:14.302604 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0d6f159a-60ec-48de-87f6-d676877278c6-scripts" (OuterVolumeSpecName: "scripts") pod "0d6f159a-60ec-48de-87f6-d676877278c6" (UID: "0d6f159a-60ec-48de-87f6-d676877278c6"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 15:20:14 crc kubenswrapper[4869]: I1001 15:20:14.309889 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0d6f159a-60ec-48de-87f6-d676877278c6-config-data" (OuterVolumeSpecName: "config-data") pod "0d6f159a-60ec-48de-87f6-d676877278c6" (UID: "0d6f159a-60ec-48de-87f6-d676877278c6"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 15:20:14 crc kubenswrapper[4869]: I1001 15:20:14.325870 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0d6f159a-60ec-48de-87f6-d676877278c6-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "0d6f159a-60ec-48de-87f6-d676877278c6" (UID: "0d6f159a-60ec-48de-87f6-d676877278c6"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 15:20:14 crc kubenswrapper[4869]: I1001 15:20:14.391172 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hp6vj\" (UniqueName: \"kubernetes.io/projected/0d6f159a-60ec-48de-87f6-d676877278c6-kube-api-access-hp6vj\") on node \"crc\" DevicePath \"\"" Oct 01 15:20:14 crc kubenswrapper[4869]: I1001 15:20:14.391245 4869 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0d6f159a-60ec-48de-87f6-d676877278c6-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 01 15:20:14 crc kubenswrapper[4869]: I1001 15:20:14.391287 4869 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0d6f159a-60ec-48de-87f6-d676877278c6-config-data\") on node \"crc\" DevicePath \"\"" Oct 01 15:20:14 crc kubenswrapper[4869]: I1001 15:20:14.391304 4869 reconciler_common.go:293] "Volume detached for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/0d6f159a-60ec-48de-87f6-d676877278c6-fernet-keys\") on node \"crc\" DevicePath \"\"" Oct 01 15:20:14 crc kubenswrapper[4869]: I1001 15:20:14.391322 4869 reconciler_common.go:293] "Volume detached for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/0d6f159a-60ec-48de-87f6-d676877278c6-credential-keys\") on node \"crc\" DevicePath \"\"" Oct 01 15:20:14 crc kubenswrapper[4869]: I1001 15:20:14.391338 4869 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0d6f159a-60ec-48de-87f6-d676877278c6-scripts\") on node \"crc\" DevicePath \"\"" Oct 01 15:20:14 crc kubenswrapper[4869]: I1001 15:20:14.854049 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-24mhh" event={"ID":"0d6f159a-60ec-48de-87f6-d676877278c6","Type":"ContainerDied","Data":"4fab351c43e2f865c25839bdda4a4ef5d4d242a0c474c8f6a3de99835fdd93f9"} Oct 01 15:20:14 crc kubenswrapper[4869]: I1001 15:20:14.854107 4869 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="4fab351c43e2f865c25839bdda4a4ef5d4d242a0c474c8f6a3de99835fdd93f9" Oct 01 15:20:14 crc kubenswrapper[4869]: I1001 15:20:14.854121 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-24mhh" Oct 01 15:20:14 crc kubenswrapper[4869]: I1001 15:20:14.855928 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-sync-25k96" event={"ID":"546e76f6-f453-481b-8115-369d6ff9326c","Type":"ContainerDied","Data":"6f72994f966da5a03597a2acba01ed115457273eb1801981333b602664360083"} Oct 01 15:20:14 crc kubenswrapper[4869]: I1001 15:20:14.855962 4869 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="6f72994f966da5a03597a2acba01ed115457273eb1801981333b602664360083" Oct 01 15:20:14 crc kubenswrapper[4869]: I1001 15:20:14.855933 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-sync-25k96" Oct 01 15:20:14 crc kubenswrapper[4869]: I1001 15:20:14.858782 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-c86m8" event={"ID":"a4b64f7f-0b03-4f47-965b-9fde048b735c","Type":"ContainerStarted","Data":"0674a6010e4abaf43ad0d52524028fcd0e0d167c67609073a8bff51bfec2aabf"} Oct 01 15:20:14 crc kubenswrapper[4869]: I1001 15:20:14.860244 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"cc0dd8d7-8230-4c07-87b1-6f4fc5e2e631","Type":"ContainerStarted","Data":"2a4d444a3d5e23bf38c8e2f1225cef437847d97521691a2eccf4ae028317772c"} Oct 01 15:20:14 crc kubenswrapper[4869]: I1001 15:20:14.862574 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-sync-jmffx" event={"ID":"3dfd59b2-2698-41f5-95b7-f3c765173302","Type":"ContainerStarted","Data":"9886ef7bb5e4513d1bc4817813e0abdd353b827af210635d1572fa784cb072ad"} Oct 01 15:20:14 crc kubenswrapper[4869]: I1001 15:20:14.931572 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-db-sync-jmffx" podStartSLOduration=5.656446419 podStartE2EDuration="10.931551481s" podCreationTimestamp="2025-10-01 15:20:04 +0000 UTC" firstStartedPulling="2025-10-01 15:20:08.346813131 +0000 UTC m=+917.493656247" lastFinishedPulling="2025-10-01 15:20:13.621918193 +0000 UTC m=+922.768761309" observedRunningTime="2025-10-01 15:20:14.899172783 +0000 UTC m=+924.046015899" watchObservedRunningTime="2025-10-01 15:20:14.931551481 +0000 UTC m=+924.078394607" Oct 01 15:20:14 crc kubenswrapper[4869]: I1001 15:20:14.967741 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-6b94b79f97-gtm9w"] Oct 01 15:20:14 crc kubenswrapper[4869]: E1001 15:20:14.968252 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="298c162a-4d72-4d7c-bd59-d5d5c8f7cf7f" containerName="init" Oct 01 15:20:14 crc kubenswrapper[4869]: I1001 15:20:14.968339 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="298c162a-4d72-4d7c-bd59-d5d5c8f7cf7f" containerName="init" Oct 01 15:20:14 crc kubenswrapper[4869]: E1001 15:20:14.968399 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="298c162a-4d72-4d7c-bd59-d5d5c8f7cf7f" containerName="dnsmasq-dns" Oct 01 15:20:14 crc kubenswrapper[4869]: I1001 15:20:14.968452 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="298c162a-4d72-4d7c-bd59-d5d5c8f7cf7f" containerName="dnsmasq-dns" Oct 01 15:20:14 crc kubenswrapper[4869]: E1001 15:20:14.968509 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="546e76f6-f453-481b-8115-369d6ff9326c" containerName="placement-db-sync" Oct 01 15:20:14 crc kubenswrapper[4869]: I1001 15:20:14.968559 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="546e76f6-f453-481b-8115-369d6ff9326c" containerName="placement-db-sync" Oct 01 15:20:14 crc kubenswrapper[4869]: E1001 15:20:14.968608 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0d6f159a-60ec-48de-87f6-d676877278c6" containerName="keystone-bootstrap" Oct 01 15:20:14 crc kubenswrapper[4869]: I1001 15:20:14.968657 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="0d6f159a-60ec-48de-87f6-d676877278c6" containerName="keystone-bootstrap" Oct 01 15:20:14 crc kubenswrapper[4869]: I1001 15:20:14.968884 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="298c162a-4d72-4d7c-bd59-d5d5c8f7cf7f" containerName="dnsmasq-dns" Oct 01 15:20:14 crc kubenswrapper[4869]: I1001 15:20:14.969180 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="0d6f159a-60ec-48de-87f6-d676877278c6" containerName="keystone-bootstrap" Oct 01 15:20:14 crc kubenswrapper[4869]: I1001 15:20:14.969243 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="546e76f6-f453-481b-8115-369d6ff9326c" containerName="placement-db-sync" Oct 01 15:20:14 crc kubenswrapper[4869]: I1001 15:20:14.969832 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-6b94b79f97-gtm9w" Oct 01 15:20:14 crc kubenswrapper[4869]: I1001 15:20:14.977087 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-keystone-internal-svc" Oct 01 15:20:14 crc kubenswrapper[4869]: I1001 15:20:14.977447 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone" Oct 01 15:20:14 crc kubenswrapper[4869]: I1001 15:20:14.977597 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-scripts" Oct 01 15:20:14 crc kubenswrapper[4869]: I1001 15:20:14.977751 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-keystone-public-svc" Oct 01 15:20:14 crc kubenswrapper[4869]: I1001 15:20:14.978050 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-config-data" Oct 01 15:20:14 crc kubenswrapper[4869]: I1001 15:20:14.978666 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-keystone-dockercfg-vhhr5" Oct 01 15:20:14 crc kubenswrapper[4869]: I1001 15:20:14.988733 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-6b94b79f97-gtm9w"] Oct 01 15:20:15 crc kubenswrapper[4869]: I1001 15:20:15.062738 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/placement-8454446974-2h6ft"] Oct 01 15:20:15 crc kubenswrapper[4869]: I1001 15:20:15.066975 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-8454446974-2h6ft" Oct 01 15:20:15 crc kubenswrapper[4869]: I1001 15:20:15.069452 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-placement-public-svc" Oct 01 15:20:15 crc kubenswrapper[4869]: I1001 15:20:15.069665 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-config-data" Oct 01 15:20:15 crc kubenswrapper[4869]: I1001 15:20:15.069777 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-placement-dockercfg-qj7j6" Oct 01 15:20:15 crc kubenswrapper[4869]: I1001 15:20:15.073677 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-scripts" Oct 01 15:20:15 crc kubenswrapper[4869]: I1001 15:20:15.077219 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-placement-internal-svc" Oct 01 15:20:15 crc kubenswrapper[4869]: I1001 15:20:15.085666 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-8454446974-2h6ft"] Oct 01 15:20:15 crc kubenswrapper[4869]: I1001 15:20:15.113620 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/543a3ce0-c8bc-45e3-bc25-617c4e65c08f-credential-keys\") pod \"keystone-6b94b79f97-gtm9w\" (UID: \"543a3ce0-c8bc-45e3-bc25-617c4e65c08f\") " pod="openstack/keystone-6b94b79f97-gtm9w" Oct 01 15:20:15 crc kubenswrapper[4869]: I1001 15:20:15.113928 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/543a3ce0-c8bc-45e3-bc25-617c4e65c08f-combined-ca-bundle\") pod \"keystone-6b94b79f97-gtm9w\" (UID: \"543a3ce0-c8bc-45e3-bc25-617c4e65c08f\") " pod="openstack/keystone-6b94b79f97-gtm9w" Oct 01 15:20:15 crc kubenswrapper[4869]: I1001 15:20:15.113961 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/543a3ce0-c8bc-45e3-bc25-617c4e65c08f-config-data\") pod \"keystone-6b94b79f97-gtm9w\" (UID: \"543a3ce0-c8bc-45e3-bc25-617c4e65c08f\") " pod="openstack/keystone-6b94b79f97-gtm9w" Oct 01 15:20:15 crc kubenswrapper[4869]: I1001 15:20:15.114045 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/543a3ce0-c8bc-45e3-bc25-617c4e65c08f-scripts\") pod \"keystone-6b94b79f97-gtm9w\" (UID: \"543a3ce0-c8bc-45e3-bc25-617c4e65c08f\") " pod="openstack/keystone-6b94b79f97-gtm9w" Oct 01 15:20:15 crc kubenswrapper[4869]: I1001 15:20:15.114074 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/543a3ce0-c8bc-45e3-bc25-617c4e65c08f-internal-tls-certs\") pod \"keystone-6b94b79f97-gtm9w\" (UID: \"543a3ce0-c8bc-45e3-bc25-617c4e65c08f\") " pod="openstack/keystone-6b94b79f97-gtm9w" Oct 01 15:20:15 crc kubenswrapper[4869]: I1001 15:20:15.114097 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tsktk\" (UniqueName: \"kubernetes.io/projected/543a3ce0-c8bc-45e3-bc25-617c4e65c08f-kube-api-access-tsktk\") pod \"keystone-6b94b79f97-gtm9w\" (UID: \"543a3ce0-c8bc-45e3-bc25-617c4e65c08f\") " pod="openstack/keystone-6b94b79f97-gtm9w" Oct 01 15:20:15 crc kubenswrapper[4869]: I1001 15:20:15.114134 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/543a3ce0-c8bc-45e3-bc25-617c4e65c08f-fernet-keys\") pod \"keystone-6b94b79f97-gtm9w\" (UID: \"543a3ce0-c8bc-45e3-bc25-617c4e65c08f\") " pod="openstack/keystone-6b94b79f97-gtm9w" Oct 01 15:20:15 crc kubenswrapper[4869]: I1001 15:20:15.114183 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/543a3ce0-c8bc-45e3-bc25-617c4e65c08f-public-tls-certs\") pod \"keystone-6b94b79f97-gtm9w\" (UID: \"543a3ce0-c8bc-45e3-bc25-617c4e65c08f\") " pod="openstack/keystone-6b94b79f97-gtm9w" Oct 01 15:20:15 crc kubenswrapper[4869]: I1001 15:20:15.215374 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/543a3ce0-c8bc-45e3-bc25-617c4e65c08f-scripts\") pod \"keystone-6b94b79f97-gtm9w\" (UID: \"543a3ce0-c8bc-45e3-bc25-617c4e65c08f\") " pod="openstack/keystone-6b94b79f97-gtm9w" Oct 01 15:20:15 crc kubenswrapper[4869]: I1001 15:20:15.215426 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/543a3ce0-c8bc-45e3-bc25-617c4e65c08f-internal-tls-certs\") pod \"keystone-6b94b79f97-gtm9w\" (UID: \"543a3ce0-c8bc-45e3-bc25-617c4e65c08f\") " pod="openstack/keystone-6b94b79f97-gtm9w" Oct 01 15:20:15 crc kubenswrapper[4869]: I1001 15:20:15.215459 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tsktk\" (UniqueName: \"kubernetes.io/projected/543a3ce0-c8bc-45e3-bc25-617c4e65c08f-kube-api-access-tsktk\") pod \"keystone-6b94b79f97-gtm9w\" (UID: \"543a3ce0-c8bc-45e3-bc25-617c4e65c08f\") " pod="openstack/keystone-6b94b79f97-gtm9w" Oct 01 15:20:15 crc kubenswrapper[4869]: I1001 15:20:15.215490 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/77f02e38-9109-4d44-b448-b29e38a252d1-combined-ca-bundle\") pod \"placement-8454446974-2h6ft\" (UID: \"77f02e38-9109-4d44-b448-b29e38a252d1\") " pod="openstack/placement-8454446974-2h6ft" Oct 01 15:20:15 crc kubenswrapper[4869]: I1001 15:20:15.215874 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/543a3ce0-c8bc-45e3-bc25-617c4e65c08f-fernet-keys\") pod \"keystone-6b94b79f97-gtm9w\" (UID: \"543a3ce0-c8bc-45e3-bc25-617c4e65c08f\") " pod="openstack/keystone-6b94b79f97-gtm9w" Oct 01 15:20:15 crc kubenswrapper[4869]: I1001 15:20:15.216565 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/77f02e38-9109-4d44-b448-b29e38a252d1-public-tls-certs\") pod \"placement-8454446974-2h6ft\" (UID: \"77f02e38-9109-4d44-b448-b29e38a252d1\") " pod="openstack/placement-8454446974-2h6ft" Oct 01 15:20:15 crc kubenswrapper[4869]: I1001 15:20:15.216623 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/543a3ce0-c8bc-45e3-bc25-617c4e65c08f-public-tls-certs\") pod \"keystone-6b94b79f97-gtm9w\" (UID: \"543a3ce0-c8bc-45e3-bc25-617c4e65c08f\") " pod="openstack/keystone-6b94b79f97-gtm9w" Oct 01 15:20:15 crc kubenswrapper[4869]: I1001 15:20:15.216730 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/543a3ce0-c8bc-45e3-bc25-617c4e65c08f-credential-keys\") pod \"keystone-6b94b79f97-gtm9w\" (UID: \"543a3ce0-c8bc-45e3-bc25-617c4e65c08f\") " pod="openstack/keystone-6b94b79f97-gtm9w" Oct 01 15:20:15 crc kubenswrapper[4869]: I1001 15:20:15.216763 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mfcz2\" (UniqueName: \"kubernetes.io/projected/77f02e38-9109-4d44-b448-b29e38a252d1-kube-api-access-mfcz2\") pod \"placement-8454446974-2h6ft\" (UID: \"77f02e38-9109-4d44-b448-b29e38a252d1\") " pod="openstack/placement-8454446974-2h6ft" Oct 01 15:20:15 crc kubenswrapper[4869]: I1001 15:20:15.216803 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/543a3ce0-c8bc-45e3-bc25-617c4e65c08f-combined-ca-bundle\") pod \"keystone-6b94b79f97-gtm9w\" (UID: \"543a3ce0-c8bc-45e3-bc25-617c4e65c08f\") " pod="openstack/keystone-6b94b79f97-gtm9w" Oct 01 15:20:15 crc kubenswrapper[4869]: I1001 15:20:15.216834 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/543a3ce0-c8bc-45e3-bc25-617c4e65c08f-config-data\") pod \"keystone-6b94b79f97-gtm9w\" (UID: \"543a3ce0-c8bc-45e3-bc25-617c4e65c08f\") " pod="openstack/keystone-6b94b79f97-gtm9w" Oct 01 15:20:15 crc kubenswrapper[4869]: I1001 15:20:15.216864 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/77f02e38-9109-4d44-b448-b29e38a252d1-config-data\") pod \"placement-8454446974-2h6ft\" (UID: \"77f02e38-9109-4d44-b448-b29e38a252d1\") " pod="openstack/placement-8454446974-2h6ft" Oct 01 15:20:15 crc kubenswrapper[4869]: I1001 15:20:15.216891 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/77f02e38-9109-4d44-b448-b29e38a252d1-logs\") pod \"placement-8454446974-2h6ft\" (UID: \"77f02e38-9109-4d44-b448-b29e38a252d1\") " pod="openstack/placement-8454446974-2h6ft" Oct 01 15:20:15 crc kubenswrapper[4869]: I1001 15:20:15.216919 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/77f02e38-9109-4d44-b448-b29e38a252d1-scripts\") pod \"placement-8454446974-2h6ft\" (UID: \"77f02e38-9109-4d44-b448-b29e38a252d1\") " pod="openstack/placement-8454446974-2h6ft" Oct 01 15:20:15 crc kubenswrapper[4869]: I1001 15:20:15.216998 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/77f02e38-9109-4d44-b448-b29e38a252d1-internal-tls-certs\") pod \"placement-8454446974-2h6ft\" (UID: \"77f02e38-9109-4d44-b448-b29e38a252d1\") " pod="openstack/placement-8454446974-2h6ft" Oct 01 15:20:15 crc kubenswrapper[4869]: I1001 15:20:15.221622 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/543a3ce0-c8bc-45e3-bc25-617c4e65c08f-public-tls-certs\") pod \"keystone-6b94b79f97-gtm9w\" (UID: \"543a3ce0-c8bc-45e3-bc25-617c4e65c08f\") " pod="openstack/keystone-6b94b79f97-gtm9w" Oct 01 15:20:15 crc kubenswrapper[4869]: I1001 15:20:15.223653 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/543a3ce0-c8bc-45e3-bc25-617c4e65c08f-internal-tls-certs\") pod \"keystone-6b94b79f97-gtm9w\" (UID: \"543a3ce0-c8bc-45e3-bc25-617c4e65c08f\") " pod="openstack/keystone-6b94b79f97-gtm9w" Oct 01 15:20:15 crc kubenswrapper[4869]: I1001 15:20:15.224158 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/543a3ce0-c8bc-45e3-bc25-617c4e65c08f-config-data\") pod \"keystone-6b94b79f97-gtm9w\" (UID: \"543a3ce0-c8bc-45e3-bc25-617c4e65c08f\") " pod="openstack/keystone-6b94b79f97-gtm9w" Oct 01 15:20:15 crc kubenswrapper[4869]: I1001 15:20:15.224577 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/543a3ce0-c8bc-45e3-bc25-617c4e65c08f-scripts\") pod \"keystone-6b94b79f97-gtm9w\" (UID: \"543a3ce0-c8bc-45e3-bc25-617c4e65c08f\") " pod="openstack/keystone-6b94b79f97-gtm9w" Oct 01 15:20:15 crc kubenswrapper[4869]: I1001 15:20:15.225361 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/543a3ce0-c8bc-45e3-bc25-617c4e65c08f-fernet-keys\") pod \"keystone-6b94b79f97-gtm9w\" (UID: \"543a3ce0-c8bc-45e3-bc25-617c4e65c08f\") " pod="openstack/keystone-6b94b79f97-gtm9w" Oct 01 15:20:15 crc kubenswrapper[4869]: I1001 15:20:15.229205 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/543a3ce0-c8bc-45e3-bc25-617c4e65c08f-credential-keys\") pod \"keystone-6b94b79f97-gtm9w\" (UID: \"543a3ce0-c8bc-45e3-bc25-617c4e65c08f\") " pod="openstack/keystone-6b94b79f97-gtm9w" Oct 01 15:20:15 crc kubenswrapper[4869]: I1001 15:20:15.230771 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/543a3ce0-c8bc-45e3-bc25-617c4e65c08f-combined-ca-bundle\") pod \"keystone-6b94b79f97-gtm9w\" (UID: \"543a3ce0-c8bc-45e3-bc25-617c4e65c08f\") " pod="openstack/keystone-6b94b79f97-gtm9w" Oct 01 15:20:15 crc kubenswrapper[4869]: I1001 15:20:15.233327 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tsktk\" (UniqueName: \"kubernetes.io/projected/543a3ce0-c8bc-45e3-bc25-617c4e65c08f-kube-api-access-tsktk\") pod \"keystone-6b94b79f97-gtm9w\" (UID: \"543a3ce0-c8bc-45e3-bc25-617c4e65c08f\") " pod="openstack/keystone-6b94b79f97-gtm9w" Oct 01 15:20:15 crc kubenswrapper[4869]: I1001 15:20:15.299348 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-6b94b79f97-gtm9w" Oct 01 15:20:15 crc kubenswrapper[4869]: I1001 15:20:15.319244 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mfcz2\" (UniqueName: \"kubernetes.io/projected/77f02e38-9109-4d44-b448-b29e38a252d1-kube-api-access-mfcz2\") pod \"placement-8454446974-2h6ft\" (UID: \"77f02e38-9109-4d44-b448-b29e38a252d1\") " pod="openstack/placement-8454446974-2h6ft" Oct 01 15:20:15 crc kubenswrapper[4869]: I1001 15:20:15.319313 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/77f02e38-9109-4d44-b448-b29e38a252d1-config-data\") pod \"placement-8454446974-2h6ft\" (UID: \"77f02e38-9109-4d44-b448-b29e38a252d1\") " pod="openstack/placement-8454446974-2h6ft" Oct 01 15:20:15 crc kubenswrapper[4869]: I1001 15:20:15.319330 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/77f02e38-9109-4d44-b448-b29e38a252d1-logs\") pod \"placement-8454446974-2h6ft\" (UID: \"77f02e38-9109-4d44-b448-b29e38a252d1\") " pod="openstack/placement-8454446974-2h6ft" Oct 01 15:20:15 crc kubenswrapper[4869]: I1001 15:20:15.319350 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/77f02e38-9109-4d44-b448-b29e38a252d1-scripts\") pod \"placement-8454446974-2h6ft\" (UID: \"77f02e38-9109-4d44-b448-b29e38a252d1\") " pod="openstack/placement-8454446974-2h6ft" Oct 01 15:20:15 crc kubenswrapper[4869]: I1001 15:20:15.319387 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/77f02e38-9109-4d44-b448-b29e38a252d1-internal-tls-certs\") pod \"placement-8454446974-2h6ft\" (UID: \"77f02e38-9109-4d44-b448-b29e38a252d1\") " pod="openstack/placement-8454446974-2h6ft" Oct 01 15:20:15 crc kubenswrapper[4869]: I1001 15:20:15.319450 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/77f02e38-9109-4d44-b448-b29e38a252d1-combined-ca-bundle\") pod \"placement-8454446974-2h6ft\" (UID: \"77f02e38-9109-4d44-b448-b29e38a252d1\") " pod="openstack/placement-8454446974-2h6ft" Oct 01 15:20:15 crc kubenswrapper[4869]: I1001 15:20:15.319506 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/77f02e38-9109-4d44-b448-b29e38a252d1-public-tls-certs\") pod \"placement-8454446974-2h6ft\" (UID: \"77f02e38-9109-4d44-b448-b29e38a252d1\") " pod="openstack/placement-8454446974-2h6ft" Oct 01 15:20:15 crc kubenswrapper[4869]: I1001 15:20:15.320196 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/77f02e38-9109-4d44-b448-b29e38a252d1-logs\") pod \"placement-8454446974-2h6ft\" (UID: \"77f02e38-9109-4d44-b448-b29e38a252d1\") " pod="openstack/placement-8454446974-2h6ft" Oct 01 15:20:15 crc kubenswrapper[4869]: I1001 15:20:15.323688 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/77f02e38-9109-4d44-b448-b29e38a252d1-scripts\") pod \"placement-8454446974-2h6ft\" (UID: \"77f02e38-9109-4d44-b448-b29e38a252d1\") " pod="openstack/placement-8454446974-2h6ft" Oct 01 15:20:15 crc kubenswrapper[4869]: I1001 15:20:15.323801 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/77f02e38-9109-4d44-b448-b29e38a252d1-public-tls-certs\") pod \"placement-8454446974-2h6ft\" (UID: \"77f02e38-9109-4d44-b448-b29e38a252d1\") " pod="openstack/placement-8454446974-2h6ft" Oct 01 15:20:15 crc kubenswrapper[4869]: I1001 15:20:15.323920 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/77f02e38-9109-4d44-b448-b29e38a252d1-combined-ca-bundle\") pod \"placement-8454446974-2h6ft\" (UID: \"77f02e38-9109-4d44-b448-b29e38a252d1\") " pod="openstack/placement-8454446974-2h6ft" Oct 01 15:20:15 crc kubenswrapper[4869]: I1001 15:20:15.325410 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/77f02e38-9109-4d44-b448-b29e38a252d1-config-data\") pod \"placement-8454446974-2h6ft\" (UID: \"77f02e38-9109-4d44-b448-b29e38a252d1\") " pod="openstack/placement-8454446974-2h6ft" Oct 01 15:20:15 crc kubenswrapper[4869]: I1001 15:20:15.327942 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/77f02e38-9109-4d44-b448-b29e38a252d1-internal-tls-certs\") pod \"placement-8454446974-2h6ft\" (UID: \"77f02e38-9109-4d44-b448-b29e38a252d1\") " pod="openstack/placement-8454446974-2h6ft" Oct 01 15:20:15 crc kubenswrapper[4869]: I1001 15:20:15.336806 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mfcz2\" (UniqueName: \"kubernetes.io/projected/77f02e38-9109-4d44-b448-b29e38a252d1-kube-api-access-mfcz2\") pod \"placement-8454446974-2h6ft\" (UID: \"77f02e38-9109-4d44-b448-b29e38a252d1\") " pod="openstack/placement-8454446974-2h6ft" Oct 01 15:20:15 crc kubenswrapper[4869]: I1001 15:20:15.392092 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-8454446974-2h6ft" Oct 01 15:20:15 crc kubenswrapper[4869]: I1001 15:20:15.823298 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-6b94b79f97-gtm9w"] Oct 01 15:20:15 crc kubenswrapper[4869]: W1001 15:20:15.830393 4869 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod543a3ce0_c8bc_45e3_bc25_617c4e65c08f.slice/crio-a4f645e8aace57b821d1cb8bbc339a39ac1fc086c9564fcc968695905547111b WatchSource:0}: Error finding container a4f645e8aace57b821d1cb8bbc339a39ac1fc086c9564fcc968695905547111b: Status 404 returned error can't find the container with id a4f645e8aace57b821d1cb8bbc339a39ac1fc086c9564fcc968695905547111b Oct 01 15:20:15 crc kubenswrapper[4869]: I1001 15:20:15.883446 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-6b94b79f97-gtm9w" event={"ID":"543a3ce0-c8bc-45e3-bc25-617c4e65c08f","Type":"ContainerStarted","Data":"a4f645e8aace57b821d1cb8bbc339a39ac1fc086c9564fcc968695905547111b"} Oct 01 15:20:15 crc kubenswrapper[4869]: I1001 15:20:15.997557 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-8454446974-2h6ft"] Oct 01 15:20:16 crc kubenswrapper[4869]: W1001 15:20:16.002045 4869 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod77f02e38_9109_4d44_b448_b29e38a252d1.slice/crio-702724f57d6b5f5ee7fce66ee6cb978884c0992adc4ce594db1c4131eb6cef17 WatchSource:0}: Error finding container 702724f57d6b5f5ee7fce66ee6cb978884c0992adc4ce594db1c4131eb6cef17: Status 404 returned error can't find the container with id 702724f57d6b5f5ee7fce66ee6cb978884c0992adc4ce594db1c4131eb6cef17 Oct 01 15:20:16 crc kubenswrapper[4869]: I1001 15:20:16.895296 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-6b94b79f97-gtm9w" event={"ID":"543a3ce0-c8bc-45e3-bc25-617c4e65c08f","Type":"ContainerStarted","Data":"f4ab15860f4feec9890f141f15c97091a412f231e2fdc63455b835c2af9646b1"} Oct 01 15:20:16 crc kubenswrapper[4869]: I1001 15:20:16.896807 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/keystone-6b94b79f97-gtm9w" Oct 01 15:20:16 crc kubenswrapper[4869]: I1001 15:20:16.906134 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-8454446974-2h6ft" event={"ID":"77f02e38-9109-4d44-b448-b29e38a252d1","Type":"ContainerStarted","Data":"2d79033d294a31eff99d7c4488570309fa88102aaaefd3309d6a03c381442a83"} Oct 01 15:20:16 crc kubenswrapper[4869]: I1001 15:20:16.906204 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-8454446974-2h6ft" event={"ID":"77f02e38-9109-4d44-b448-b29e38a252d1","Type":"ContainerStarted","Data":"5fe9f7e32e6b3f4246f81f901f7e911aee1b7fa0a13ef5873ca2dea96f5e26f2"} Oct 01 15:20:16 crc kubenswrapper[4869]: I1001 15:20:16.906217 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-8454446974-2h6ft" event={"ID":"77f02e38-9109-4d44-b448-b29e38a252d1","Type":"ContainerStarted","Data":"702724f57d6b5f5ee7fce66ee6cb978884c0992adc4ce594db1c4131eb6cef17"} Oct 01 15:20:16 crc kubenswrapper[4869]: I1001 15:20:16.906602 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/placement-8454446974-2h6ft" Oct 01 15:20:16 crc kubenswrapper[4869]: I1001 15:20:16.906686 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/placement-8454446974-2h6ft" Oct 01 15:20:16 crc kubenswrapper[4869]: I1001 15:20:16.916340 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-6b94b79f97-gtm9w" podStartSLOduration=2.9163271699999997 podStartE2EDuration="2.91632717s" podCreationTimestamp="2025-10-01 15:20:14 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-01 15:20:16.913708254 +0000 UTC m=+926.060551370" watchObservedRunningTime="2025-10-01 15:20:16.91632717 +0000 UTC m=+926.063170286" Oct 01 15:20:16 crc kubenswrapper[4869]: I1001 15:20:16.933973 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/placement-8454446974-2h6ft" podStartSLOduration=1.933952795 podStartE2EDuration="1.933952795s" podCreationTimestamp="2025-10-01 15:20:15 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-01 15:20:16.93135929 +0000 UTC m=+926.078202426" watchObservedRunningTime="2025-10-01 15:20:16.933952795 +0000 UTC m=+926.080795911" Oct 01 15:20:18 crc kubenswrapper[4869]: I1001 15:20:18.924406 4869 generic.go:334] "Generic (PLEG): container finished" podID="3dfd59b2-2698-41f5-95b7-f3c765173302" containerID="9886ef7bb5e4513d1bc4817813e0abdd353b827af210635d1572fa784cb072ad" exitCode=0 Oct 01 15:20:18 crc kubenswrapper[4869]: I1001 15:20:18.924508 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-sync-jmffx" event={"ID":"3dfd59b2-2698-41f5-95b7-f3c765173302","Type":"ContainerDied","Data":"9886ef7bb5e4513d1bc4817813e0abdd353b827af210635d1572fa784cb072ad"} Oct 01 15:20:19 crc kubenswrapper[4869]: I1001 15:20:19.211803 4869 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/horizon-5766b74c9d-wpxpf" Oct 01 15:20:19 crc kubenswrapper[4869]: I1001 15:20:19.211843 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/horizon-5766b74c9d-wpxpf" Oct 01 15:20:19 crc kubenswrapper[4869]: I1001 15:20:19.214305 4869 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/horizon-5766b74c9d-wpxpf" podUID="011bdc54-d9ef-4ae9-a16b-5eaf4e97b2cf" containerName="horizon" probeResult="failure" output="Get \"https://10.217.0.140:8443/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.0.140:8443: connect: connection refused" Oct 01 15:20:19 crc kubenswrapper[4869]: I1001 15:20:19.268430 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/horizon-5f66f6967d-mnbqz" Oct 01 15:20:19 crc kubenswrapper[4869]: I1001 15:20:19.269020 4869 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/horizon-5f66f6967d-mnbqz" Oct 01 15:20:19 crc kubenswrapper[4869]: I1001 15:20:19.272805 4869 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/horizon-5f66f6967d-mnbqz" podUID="eb62e045-ca51-4b33-a63d-9c53b247cc91" containerName="horizon" probeResult="failure" output="Get \"https://10.217.0.141:8443/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.0.141:8443: connect: connection refused" Oct 01 15:20:20 crc kubenswrapper[4869]: I1001 15:20:20.636278 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-sync-jmffx" Oct 01 15:20:20 crc kubenswrapper[4869]: I1001 15:20:20.732154 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nl28q\" (UniqueName: \"kubernetes.io/projected/3dfd59b2-2698-41f5-95b7-f3c765173302-kube-api-access-nl28q\") pod \"3dfd59b2-2698-41f5-95b7-f3c765173302\" (UID: \"3dfd59b2-2698-41f5-95b7-f3c765173302\") " Oct 01 15:20:20 crc kubenswrapper[4869]: I1001 15:20:20.732299 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3dfd59b2-2698-41f5-95b7-f3c765173302-combined-ca-bundle\") pod \"3dfd59b2-2698-41f5-95b7-f3c765173302\" (UID: \"3dfd59b2-2698-41f5-95b7-f3c765173302\") " Oct 01 15:20:20 crc kubenswrapper[4869]: I1001 15:20:20.732384 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/3dfd59b2-2698-41f5-95b7-f3c765173302-db-sync-config-data\") pod \"3dfd59b2-2698-41f5-95b7-f3c765173302\" (UID: \"3dfd59b2-2698-41f5-95b7-f3c765173302\") " Oct 01 15:20:20 crc kubenswrapper[4869]: I1001 15:20:20.751051 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3dfd59b2-2698-41f5-95b7-f3c765173302-kube-api-access-nl28q" (OuterVolumeSpecName: "kube-api-access-nl28q") pod "3dfd59b2-2698-41f5-95b7-f3c765173302" (UID: "3dfd59b2-2698-41f5-95b7-f3c765173302"). InnerVolumeSpecName "kube-api-access-nl28q". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 15:20:20 crc kubenswrapper[4869]: I1001 15:20:20.751167 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3dfd59b2-2698-41f5-95b7-f3c765173302-db-sync-config-data" (OuterVolumeSpecName: "db-sync-config-data") pod "3dfd59b2-2698-41f5-95b7-f3c765173302" (UID: "3dfd59b2-2698-41f5-95b7-f3c765173302"). InnerVolumeSpecName "db-sync-config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 15:20:20 crc kubenswrapper[4869]: I1001 15:20:20.756134 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3dfd59b2-2698-41f5-95b7-f3c765173302-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "3dfd59b2-2698-41f5-95b7-f3c765173302" (UID: "3dfd59b2-2698-41f5-95b7-f3c765173302"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 15:20:20 crc kubenswrapper[4869]: I1001 15:20:20.837039 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nl28q\" (UniqueName: \"kubernetes.io/projected/3dfd59b2-2698-41f5-95b7-f3c765173302-kube-api-access-nl28q\") on node \"crc\" DevicePath \"\"" Oct 01 15:20:20 crc kubenswrapper[4869]: I1001 15:20:20.837094 4869 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3dfd59b2-2698-41f5-95b7-f3c765173302-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 01 15:20:20 crc kubenswrapper[4869]: I1001 15:20:20.837104 4869 reconciler_common.go:293] "Volume detached for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/3dfd59b2-2698-41f5-95b7-f3c765173302-db-sync-config-data\") on node \"crc\" DevicePath \"\"" Oct 01 15:20:20 crc kubenswrapper[4869]: I1001 15:20:20.944198 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-sync-jmffx" event={"ID":"3dfd59b2-2698-41f5-95b7-f3c765173302","Type":"ContainerDied","Data":"8a24ac6578c19fbc54e74de74857994841f50cd9441036315d960546c2644b3e"} Oct 01 15:20:20 crc kubenswrapper[4869]: I1001 15:20:20.944304 4869 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="8a24ac6578c19fbc54e74de74857994841f50cd9441036315d960546c2644b3e" Oct 01 15:20:20 crc kubenswrapper[4869]: I1001 15:20:20.944395 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-sync-jmffx" Oct 01 15:20:20 crc kubenswrapper[4869]: I1001 15:20:20.948601 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"cc0dd8d7-8230-4c07-87b1-6f4fc5e2e631","Type":"ContainerStarted","Data":"99d0e76b22b29e4947bfb92dfa7c9861448ea38250b4b8538bda3b9c670ddc17"} Oct 01 15:20:21 crc kubenswrapper[4869]: I1001 15:20:21.316168 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-worker-7b96966b79-mtqf2"] Oct 01 15:20:21 crc kubenswrapper[4869]: E1001 15:20:21.316811 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3dfd59b2-2698-41f5-95b7-f3c765173302" containerName="barbican-db-sync" Oct 01 15:20:21 crc kubenswrapper[4869]: I1001 15:20:21.316826 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="3dfd59b2-2698-41f5-95b7-f3c765173302" containerName="barbican-db-sync" Oct 01 15:20:21 crc kubenswrapper[4869]: I1001 15:20:21.316993 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="3dfd59b2-2698-41f5-95b7-f3c765173302" containerName="barbican-db-sync" Oct 01 15:20:21 crc kubenswrapper[4869]: I1001 15:20:21.320819 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-worker-7b96966b79-mtqf2" Oct 01 15:20:21 crc kubenswrapper[4869]: I1001 15:20:21.323280 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-config-data" Oct 01 15:20:21 crc kubenswrapper[4869]: I1001 15:20:21.323446 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-worker-config-data" Oct 01 15:20:21 crc kubenswrapper[4869]: I1001 15:20:21.324978 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-barbican-dockercfg-8f4h6" Oct 01 15:20:21 crc kubenswrapper[4869]: I1001 15:20:21.335167 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-worker-7b96966b79-mtqf2"] Oct 01 15:20:21 crc kubenswrapper[4869]: I1001 15:20:21.415428 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-keystone-listener-6d6689c8d-7r6k4"] Oct 01 15:20:21 crc kubenswrapper[4869]: I1001 15:20:21.416974 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-keystone-listener-6d6689c8d-7r6k4" Oct 01 15:20:21 crc kubenswrapper[4869]: I1001 15:20:21.421089 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-keystone-listener-config-data" Oct 01 15:20:21 crc kubenswrapper[4869]: I1001 15:20:21.428785 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-keystone-listener-6d6689c8d-7r6k4"] Oct 01 15:20:21 crc kubenswrapper[4869]: I1001 15:20:21.455208 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b0cf993c-1e4c-425b-8266-e087119e45b2-logs\") pod \"barbican-worker-7b96966b79-mtqf2\" (UID: \"b0cf993c-1e4c-425b-8266-e087119e45b2\") " pod="openstack/barbican-worker-7b96966b79-mtqf2" Oct 01 15:20:21 crc kubenswrapper[4869]: I1001 15:20:21.455247 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b0cf993c-1e4c-425b-8266-e087119e45b2-combined-ca-bundle\") pod \"barbican-worker-7b96966b79-mtqf2\" (UID: \"b0cf993c-1e4c-425b-8266-e087119e45b2\") " pod="openstack/barbican-worker-7b96966b79-mtqf2" Oct 01 15:20:21 crc kubenswrapper[4869]: I1001 15:20:21.455292 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b0cf993c-1e4c-425b-8266-e087119e45b2-config-data\") pod \"barbican-worker-7b96966b79-mtqf2\" (UID: \"b0cf993c-1e4c-425b-8266-e087119e45b2\") " pod="openstack/barbican-worker-7b96966b79-mtqf2" Oct 01 15:20:21 crc kubenswrapper[4869]: I1001 15:20:21.455324 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/b0cf993c-1e4c-425b-8266-e087119e45b2-config-data-custom\") pod \"barbican-worker-7b96966b79-mtqf2\" (UID: \"b0cf993c-1e4c-425b-8266-e087119e45b2\") " pod="openstack/barbican-worker-7b96966b79-mtqf2" Oct 01 15:20:21 crc kubenswrapper[4869]: I1001 15:20:21.455375 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2jvcp\" (UniqueName: \"kubernetes.io/projected/b0cf993c-1e4c-425b-8266-e087119e45b2-kube-api-access-2jvcp\") pod \"barbican-worker-7b96966b79-mtqf2\" (UID: \"b0cf993c-1e4c-425b-8266-e087119e45b2\") " pod="openstack/barbican-worker-7b96966b79-mtqf2" Oct 01 15:20:21 crc kubenswrapper[4869]: I1001 15:20:21.532318 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-64dff96bf7-6fjfz"] Oct 01 15:20:21 crc kubenswrapper[4869]: I1001 15:20:21.537598 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-64dff96bf7-6fjfz" Oct 01 15:20:21 crc kubenswrapper[4869]: I1001 15:20:21.557875 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/af712290-aa09-4efa-80ac-6f655752332e-config-data-custom\") pod \"barbican-keystone-listener-6d6689c8d-7r6k4\" (UID: \"af712290-aa09-4efa-80ac-6f655752332e\") " pod="openstack/barbican-keystone-listener-6d6689c8d-7r6k4" Oct 01 15:20:21 crc kubenswrapper[4869]: I1001 15:20:21.557924 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/af712290-aa09-4efa-80ac-6f655752332e-combined-ca-bundle\") pod \"barbican-keystone-listener-6d6689c8d-7r6k4\" (UID: \"af712290-aa09-4efa-80ac-6f655752332e\") " pod="openstack/barbican-keystone-listener-6d6689c8d-7r6k4" Oct 01 15:20:21 crc kubenswrapper[4869]: I1001 15:20:21.557953 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b0cf993c-1e4c-425b-8266-e087119e45b2-logs\") pod \"barbican-worker-7b96966b79-mtqf2\" (UID: \"b0cf993c-1e4c-425b-8266-e087119e45b2\") " pod="openstack/barbican-worker-7b96966b79-mtqf2" Oct 01 15:20:21 crc kubenswrapper[4869]: I1001 15:20:21.557969 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b0cf993c-1e4c-425b-8266-e087119e45b2-combined-ca-bundle\") pod \"barbican-worker-7b96966b79-mtqf2\" (UID: \"b0cf993c-1e4c-425b-8266-e087119e45b2\") " pod="openstack/barbican-worker-7b96966b79-mtqf2" Oct 01 15:20:21 crc kubenswrapper[4869]: I1001 15:20:21.557993 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b0cf993c-1e4c-425b-8266-e087119e45b2-config-data\") pod \"barbican-worker-7b96966b79-mtqf2\" (UID: \"b0cf993c-1e4c-425b-8266-e087119e45b2\") " pod="openstack/barbican-worker-7b96966b79-mtqf2" Oct 01 15:20:21 crc kubenswrapper[4869]: I1001 15:20:21.558021 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/af712290-aa09-4efa-80ac-6f655752332e-config-data\") pod \"barbican-keystone-listener-6d6689c8d-7r6k4\" (UID: \"af712290-aa09-4efa-80ac-6f655752332e\") " pod="openstack/barbican-keystone-listener-6d6689c8d-7r6k4" Oct 01 15:20:21 crc kubenswrapper[4869]: I1001 15:20:21.558036 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/b0cf993c-1e4c-425b-8266-e087119e45b2-config-data-custom\") pod \"barbican-worker-7b96966b79-mtqf2\" (UID: \"b0cf993c-1e4c-425b-8266-e087119e45b2\") " pod="openstack/barbican-worker-7b96966b79-mtqf2" Oct 01 15:20:21 crc kubenswrapper[4869]: I1001 15:20:21.558052 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xrv2k\" (UniqueName: \"kubernetes.io/projected/af712290-aa09-4efa-80ac-6f655752332e-kube-api-access-xrv2k\") pod \"barbican-keystone-listener-6d6689c8d-7r6k4\" (UID: \"af712290-aa09-4efa-80ac-6f655752332e\") " pod="openstack/barbican-keystone-listener-6d6689c8d-7r6k4" Oct 01 15:20:21 crc kubenswrapper[4869]: I1001 15:20:21.558105 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2jvcp\" (UniqueName: \"kubernetes.io/projected/b0cf993c-1e4c-425b-8266-e087119e45b2-kube-api-access-2jvcp\") pod \"barbican-worker-7b96966b79-mtqf2\" (UID: \"b0cf993c-1e4c-425b-8266-e087119e45b2\") " pod="openstack/barbican-worker-7b96966b79-mtqf2" Oct 01 15:20:21 crc kubenswrapper[4869]: I1001 15:20:21.558121 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/af712290-aa09-4efa-80ac-6f655752332e-logs\") pod \"barbican-keystone-listener-6d6689c8d-7r6k4\" (UID: \"af712290-aa09-4efa-80ac-6f655752332e\") " pod="openstack/barbican-keystone-listener-6d6689c8d-7r6k4" Oct 01 15:20:21 crc kubenswrapper[4869]: I1001 15:20:21.558633 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b0cf993c-1e4c-425b-8266-e087119e45b2-logs\") pod \"barbican-worker-7b96966b79-mtqf2\" (UID: \"b0cf993c-1e4c-425b-8266-e087119e45b2\") " pod="openstack/barbican-worker-7b96966b79-mtqf2" Oct 01 15:20:21 crc kubenswrapper[4869]: I1001 15:20:21.561203 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-64dff96bf7-6fjfz"] Oct 01 15:20:21 crc kubenswrapper[4869]: I1001 15:20:21.568867 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/b0cf993c-1e4c-425b-8266-e087119e45b2-config-data-custom\") pod \"barbican-worker-7b96966b79-mtqf2\" (UID: \"b0cf993c-1e4c-425b-8266-e087119e45b2\") " pod="openstack/barbican-worker-7b96966b79-mtqf2" Oct 01 15:20:21 crc kubenswrapper[4869]: I1001 15:20:21.589073 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b0cf993c-1e4c-425b-8266-e087119e45b2-combined-ca-bundle\") pod \"barbican-worker-7b96966b79-mtqf2\" (UID: \"b0cf993c-1e4c-425b-8266-e087119e45b2\") " pod="openstack/barbican-worker-7b96966b79-mtqf2" Oct 01 15:20:21 crc kubenswrapper[4869]: I1001 15:20:21.589379 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b0cf993c-1e4c-425b-8266-e087119e45b2-config-data\") pod \"barbican-worker-7b96966b79-mtqf2\" (UID: \"b0cf993c-1e4c-425b-8266-e087119e45b2\") " pod="openstack/barbican-worker-7b96966b79-mtqf2" Oct 01 15:20:21 crc kubenswrapper[4869]: I1001 15:20:21.625094 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2jvcp\" (UniqueName: \"kubernetes.io/projected/b0cf993c-1e4c-425b-8266-e087119e45b2-kube-api-access-2jvcp\") pod \"barbican-worker-7b96966b79-mtqf2\" (UID: \"b0cf993c-1e4c-425b-8266-e087119e45b2\") " pod="openstack/barbican-worker-7b96966b79-mtqf2" Oct 01 15:20:21 crc kubenswrapper[4869]: I1001 15:20:21.660285 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/11b54042-d5f6-43e4-92c7-560bc7f55f33-config\") pod \"dnsmasq-dns-64dff96bf7-6fjfz\" (UID: \"11b54042-d5f6-43e4-92c7-560bc7f55f33\") " pod="openstack/dnsmasq-dns-64dff96bf7-6fjfz" Oct 01 15:20:21 crc kubenswrapper[4869]: I1001 15:20:21.660347 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/11b54042-d5f6-43e4-92c7-560bc7f55f33-dns-svc\") pod \"dnsmasq-dns-64dff96bf7-6fjfz\" (UID: \"11b54042-d5f6-43e4-92c7-560bc7f55f33\") " pod="openstack/dnsmasq-dns-64dff96bf7-6fjfz" Oct 01 15:20:21 crc kubenswrapper[4869]: I1001 15:20:21.660401 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/af712290-aa09-4efa-80ac-6f655752332e-config-data-custom\") pod \"barbican-keystone-listener-6d6689c8d-7r6k4\" (UID: \"af712290-aa09-4efa-80ac-6f655752332e\") " pod="openstack/barbican-keystone-listener-6d6689c8d-7r6k4" Oct 01 15:20:21 crc kubenswrapper[4869]: I1001 15:20:21.660423 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dm8k6\" (UniqueName: \"kubernetes.io/projected/11b54042-d5f6-43e4-92c7-560bc7f55f33-kube-api-access-dm8k6\") pod \"dnsmasq-dns-64dff96bf7-6fjfz\" (UID: \"11b54042-d5f6-43e4-92c7-560bc7f55f33\") " pod="openstack/dnsmasq-dns-64dff96bf7-6fjfz" Oct 01 15:20:21 crc kubenswrapper[4869]: I1001 15:20:21.660446 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/af712290-aa09-4efa-80ac-6f655752332e-combined-ca-bundle\") pod \"barbican-keystone-listener-6d6689c8d-7r6k4\" (UID: \"af712290-aa09-4efa-80ac-6f655752332e\") " pod="openstack/barbican-keystone-listener-6d6689c8d-7r6k4" Oct 01 15:20:21 crc kubenswrapper[4869]: I1001 15:20:21.660498 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/af712290-aa09-4efa-80ac-6f655752332e-config-data\") pod \"barbican-keystone-listener-6d6689c8d-7r6k4\" (UID: \"af712290-aa09-4efa-80ac-6f655752332e\") " pod="openstack/barbican-keystone-listener-6d6689c8d-7r6k4" Oct 01 15:20:21 crc kubenswrapper[4869]: I1001 15:20:21.660516 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xrv2k\" (UniqueName: \"kubernetes.io/projected/af712290-aa09-4efa-80ac-6f655752332e-kube-api-access-xrv2k\") pod \"barbican-keystone-listener-6d6689c8d-7r6k4\" (UID: \"af712290-aa09-4efa-80ac-6f655752332e\") " pod="openstack/barbican-keystone-listener-6d6689c8d-7r6k4" Oct 01 15:20:21 crc kubenswrapper[4869]: I1001 15:20:21.660561 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/11b54042-d5f6-43e4-92c7-560bc7f55f33-ovsdbserver-sb\") pod \"dnsmasq-dns-64dff96bf7-6fjfz\" (UID: \"11b54042-d5f6-43e4-92c7-560bc7f55f33\") " pod="openstack/dnsmasq-dns-64dff96bf7-6fjfz" Oct 01 15:20:21 crc kubenswrapper[4869]: I1001 15:20:21.660601 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/11b54042-d5f6-43e4-92c7-560bc7f55f33-ovsdbserver-nb\") pod \"dnsmasq-dns-64dff96bf7-6fjfz\" (UID: \"11b54042-d5f6-43e4-92c7-560bc7f55f33\") " pod="openstack/dnsmasq-dns-64dff96bf7-6fjfz" Oct 01 15:20:21 crc kubenswrapper[4869]: I1001 15:20:21.660632 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/af712290-aa09-4efa-80ac-6f655752332e-logs\") pod \"barbican-keystone-listener-6d6689c8d-7r6k4\" (UID: \"af712290-aa09-4efa-80ac-6f655752332e\") " pod="openstack/barbican-keystone-listener-6d6689c8d-7r6k4" Oct 01 15:20:21 crc kubenswrapper[4869]: I1001 15:20:21.660952 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/af712290-aa09-4efa-80ac-6f655752332e-logs\") pod \"barbican-keystone-listener-6d6689c8d-7r6k4\" (UID: \"af712290-aa09-4efa-80ac-6f655752332e\") " pod="openstack/barbican-keystone-listener-6d6689c8d-7r6k4" Oct 01 15:20:21 crc kubenswrapper[4869]: I1001 15:20:21.669451 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/af712290-aa09-4efa-80ac-6f655752332e-config-data-custom\") pod \"barbican-keystone-listener-6d6689c8d-7r6k4\" (UID: \"af712290-aa09-4efa-80ac-6f655752332e\") " pod="openstack/barbican-keystone-listener-6d6689c8d-7r6k4" Oct 01 15:20:21 crc kubenswrapper[4869]: I1001 15:20:21.683051 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/af712290-aa09-4efa-80ac-6f655752332e-config-data\") pod \"barbican-keystone-listener-6d6689c8d-7r6k4\" (UID: \"af712290-aa09-4efa-80ac-6f655752332e\") " pod="openstack/barbican-keystone-listener-6d6689c8d-7r6k4" Oct 01 15:20:21 crc kubenswrapper[4869]: I1001 15:20:21.692541 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/af712290-aa09-4efa-80ac-6f655752332e-combined-ca-bundle\") pod \"barbican-keystone-listener-6d6689c8d-7r6k4\" (UID: \"af712290-aa09-4efa-80ac-6f655752332e\") " pod="openstack/barbican-keystone-listener-6d6689c8d-7r6k4" Oct 01 15:20:21 crc kubenswrapper[4869]: I1001 15:20:21.692768 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xrv2k\" (UniqueName: \"kubernetes.io/projected/af712290-aa09-4efa-80ac-6f655752332e-kube-api-access-xrv2k\") pod \"barbican-keystone-listener-6d6689c8d-7r6k4\" (UID: \"af712290-aa09-4efa-80ac-6f655752332e\") " pod="openstack/barbican-keystone-listener-6d6689c8d-7r6k4" Oct 01 15:20:21 crc kubenswrapper[4869]: I1001 15:20:21.692874 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-worker-7b96966b79-mtqf2" Oct 01 15:20:21 crc kubenswrapper[4869]: I1001 15:20:21.745760 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-api-6647d584b4-vsdfk"] Oct 01 15:20:21 crc kubenswrapper[4869]: I1001 15:20:21.747117 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-6647d584b4-vsdfk" Oct 01 15:20:21 crc kubenswrapper[4869]: I1001 15:20:21.751792 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-api-config-data" Oct 01 15:20:21 crc kubenswrapper[4869]: I1001 15:20:21.767742 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dm8k6\" (UniqueName: \"kubernetes.io/projected/11b54042-d5f6-43e4-92c7-560bc7f55f33-kube-api-access-dm8k6\") pod \"dnsmasq-dns-64dff96bf7-6fjfz\" (UID: \"11b54042-d5f6-43e4-92c7-560bc7f55f33\") " pod="openstack/dnsmasq-dns-64dff96bf7-6fjfz" Oct 01 15:20:21 crc kubenswrapper[4869]: I1001 15:20:21.768117 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/11b54042-d5f6-43e4-92c7-560bc7f55f33-ovsdbserver-sb\") pod \"dnsmasq-dns-64dff96bf7-6fjfz\" (UID: \"11b54042-d5f6-43e4-92c7-560bc7f55f33\") " pod="openstack/dnsmasq-dns-64dff96bf7-6fjfz" Oct 01 15:20:21 crc kubenswrapper[4869]: I1001 15:20:21.768166 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/11b54042-d5f6-43e4-92c7-560bc7f55f33-ovsdbserver-nb\") pod \"dnsmasq-dns-64dff96bf7-6fjfz\" (UID: \"11b54042-d5f6-43e4-92c7-560bc7f55f33\") " pod="openstack/dnsmasq-dns-64dff96bf7-6fjfz" Oct 01 15:20:21 crc kubenswrapper[4869]: I1001 15:20:21.768232 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/11b54042-d5f6-43e4-92c7-560bc7f55f33-config\") pod \"dnsmasq-dns-64dff96bf7-6fjfz\" (UID: \"11b54042-d5f6-43e4-92c7-560bc7f55f33\") " pod="openstack/dnsmasq-dns-64dff96bf7-6fjfz" Oct 01 15:20:21 crc kubenswrapper[4869]: I1001 15:20:21.768345 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/11b54042-d5f6-43e4-92c7-560bc7f55f33-dns-svc\") pod \"dnsmasq-dns-64dff96bf7-6fjfz\" (UID: \"11b54042-d5f6-43e4-92c7-560bc7f55f33\") " pod="openstack/dnsmasq-dns-64dff96bf7-6fjfz" Oct 01 15:20:21 crc kubenswrapper[4869]: I1001 15:20:21.769147 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/11b54042-d5f6-43e4-92c7-560bc7f55f33-dns-svc\") pod \"dnsmasq-dns-64dff96bf7-6fjfz\" (UID: \"11b54042-d5f6-43e4-92c7-560bc7f55f33\") " pod="openstack/dnsmasq-dns-64dff96bf7-6fjfz" Oct 01 15:20:21 crc kubenswrapper[4869]: I1001 15:20:21.771492 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/11b54042-d5f6-43e4-92c7-560bc7f55f33-config\") pod \"dnsmasq-dns-64dff96bf7-6fjfz\" (UID: \"11b54042-d5f6-43e4-92c7-560bc7f55f33\") " pod="openstack/dnsmasq-dns-64dff96bf7-6fjfz" Oct 01 15:20:21 crc kubenswrapper[4869]: I1001 15:20:21.772203 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/11b54042-d5f6-43e4-92c7-560bc7f55f33-ovsdbserver-nb\") pod \"dnsmasq-dns-64dff96bf7-6fjfz\" (UID: \"11b54042-d5f6-43e4-92c7-560bc7f55f33\") " pod="openstack/dnsmasq-dns-64dff96bf7-6fjfz" Oct 01 15:20:21 crc kubenswrapper[4869]: I1001 15:20:21.774638 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-api-6647d584b4-vsdfk"] Oct 01 15:20:21 crc kubenswrapper[4869]: I1001 15:20:21.779636 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/11b54042-d5f6-43e4-92c7-560bc7f55f33-ovsdbserver-sb\") pod \"dnsmasq-dns-64dff96bf7-6fjfz\" (UID: \"11b54042-d5f6-43e4-92c7-560bc7f55f33\") " pod="openstack/dnsmasq-dns-64dff96bf7-6fjfz" Oct 01 15:20:21 crc kubenswrapper[4869]: I1001 15:20:21.797608 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dm8k6\" (UniqueName: \"kubernetes.io/projected/11b54042-d5f6-43e4-92c7-560bc7f55f33-kube-api-access-dm8k6\") pod \"dnsmasq-dns-64dff96bf7-6fjfz\" (UID: \"11b54042-d5f6-43e4-92c7-560bc7f55f33\") " pod="openstack/dnsmasq-dns-64dff96bf7-6fjfz" Oct 01 15:20:21 crc kubenswrapper[4869]: I1001 15:20:21.828114 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-keystone-listener-6d6689c8d-7r6k4" Oct 01 15:20:21 crc kubenswrapper[4869]: I1001 15:20:21.871557 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/a59f97ca-2d70-422f-ae93-81ec595b43aa-config-data-custom\") pod \"barbican-api-6647d584b4-vsdfk\" (UID: \"a59f97ca-2d70-422f-ae93-81ec595b43aa\") " pod="openstack/barbican-api-6647d584b4-vsdfk" Oct 01 15:20:21 crc kubenswrapper[4869]: I1001 15:20:21.871861 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a59f97ca-2d70-422f-ae93-81ec595b43aa-config-data\") pod \"barbican-api-6647d584b4-vsdfk\" (UID: \"a59f97ca-2d70-422f-ae93-81ec595b43aa\") " pod="openstack/barbican-api-6647d584b4-vsdfk" Oct 01 15:20:21 crc kubenswrapper[4869]: I1001 15:20:21.871971 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a59f97ca-2d70-422f-ae93-81ec595b43aa-logs\") pod \"barbican-api-6647d584b4-vsdfk\" (UID: \"a59f97ca-2d70-422f-ae93-81ec595b43aa\") " pod="openstack/barbican-api-6647d584b4-vsdfk" Oct 01 15:20:21 crc kubenswrapper[4869]: I1001 15:20:21.872012 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-46hj2\" (UniqueName: \"kubernetes.io/projected/a59f97ca-2d70-422f-ae93-81ec595b43aa-kube-api-access-46hj2\") pod \"barbican-api-6647d584b4-vsdfk\" (UID: \"a59f97ca-2d70-422f-ae93-81ec595b43aa\") " pod="openstack/barbican-api-6647d584b4-vsdfk" Oct 01 15:20:21 crc kubenswrapper[4869]: I1001 15:20:21.872091 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a59f97ca-2d70-422f-ae93-81ec595b43aa-combined-ca-bundle\") pod \"barbican-api-6647d584b4-vsdfk\" (UID: \"a59f97ca-2d70-422f-ae93-81ec595b43aa\") " pod="openstack/barbican-api-6647d584b4-vsdfk" Oct 01 15:20:21 crc kubenswrapper[4869]: I1001 15:20:21.973578 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a59f97ca-2d70-422f-ae93-81ec595b43aa-combined-ca-bundle\") pod \"barbican-api-6647d584b4-vsdfk\" (UID: \"a59f97ca-2d70-422f-ae93-81ec595b43aa\") " pod="openstack/barbican-api-6647d584b4-vsdfk" Oct 01 15:20:21 crc kubenswrapper[4869]: I1001 15:20:21.973671 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/a59f97ca-2d70-422f-ae93-81ec595b43aa-config-data-custom\") pod \"barbican-api-6647d584b4-vsdfk\" (UID: \"a59f97ca-2d70-422f-ae93-81ec595b43aa\") " pod="openstack/barbican-api-6647d584b4-vsdfk" Oct 01 15:20:21 crc kubenswrapper[4869]: I1001 15:20:21.973710 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a59f97ca-2d70-422f-ae93-81ec595b43aa-config-data\") pod \"barbican-api-6647d584b4-vsdfk\" (UID: \"a59f97ca-2d70-422f-ae93-81ec595b43aa\") " pod="openstack/barbican-api-6647d584b4-vsdfk" Oct 01 15:20:21 crc kubenswrapper[4869]: I1001 15:20:21.973760 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a59f97ca-2d70-422f-ae93-81ec595b43aa-logs\") pod \"barbican-api-6647d584b4-vsdfk\" (UID: \"a59f97ca-2d70-422f-ae93-81ec595b43aa\") " pod="openstack/barbican-api-6647d584b4-vsdfk" Oct 01 15:20:21 crc kubenswrapper[4869]: I1001 15:20:21.973785 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-46hj2\" (UniqueName: \"kubernetes.io/projected/a59f97ca-2d70-422f-ae93-81ec595b43aa-kube-api-access-46hj2\") pod \"barbican-api-6647d584b4-vsdfk\" (UID: \"a59f97ca-2d70-422f-ae93-81ec595b43aa\") " pod="openstack/barbican-api-6647d584b4-vsdfk" Oct 01 15:20:21 crc kubenswrapper[4869]: I1001 15:20:21.976370 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a59f97ca-2d70-422f-ae93-81ec595b43aa-logs\") pod \"barbican-api-6647d584b4-vsdfk\" (UID: \"a59f97ca-2d70-422f-ae93-81ec595b43aa\") " pod="openstack/barbican-api-6647d584b4-vsdfk" Oct 01 15:20:21 crc kubenswrapper[4869]: I1001 15:20:21.978817 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a59f97ca-2d70-422f-ae93-81ec595b43aa-config-data\") pod \"barbican-api-6647d584b4-vsdfk\" (UID: \"a59f97ca-2d70-422f-ae93-81ec595b43aa\") " pod="openstack/barbican-api-6647d584b4-vsdfk" Oct 01 15:20:21 crc kubenswrapper[4869]: I1001 15:20:21.980187 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a59f97ca-2d70-422f-ae93-81ec595b43aa-combined-ca-bundle\") pod \"barbican-api-6647d584b4-vsdfk\" (UID: \"a59f97ca-2d70-422f-ae93-81ec595b43aa\") " pod="openstack/barbican-api-6647d584b4-vsdfk" Oct 01 15:20:21 crc kubenswrapper[4869]: I1001 15:20:21.983357 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/a59f97ca-2d70-422f-ae93-81ec595b43aa-config-data-custom\") pod \"barbican-api-6647d584b4-vsdfk\" (UID: \"a59f97ca-2d70-422f-ae93-81ec595b43aa\") " pod="openstack/barbican-api-6647d584b4-vsdfk" Oct 01 15:20:21 crc kubenswrapper[4869]: I1001 15:20:21.996143 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-46hj2\" (UniqueName: \"kubernetes.io/projected/a59f97ca-2d70-422f-ae93-81ec595b43aa-kube-api-access-46hj2\") pod \"barbican-api-6647d584b4-vsdfk\" (UID: \"a59f97ca-2d70-422f-ae93-81ec595b43aa\") " pod="openstack/barbican-api-6647d584b4-vsdfk" Oct 01 15:20:22 crc kubenswrapper[4869]: I1001 15:20:22.044603 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-64dff96bf7-6fjfz" Oct 01 15:20:22 crc kubenswrapper[4869]: I1001 15:20:22.077790 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-6647d584b4-vsdfk" Oct 01 15:20:22 crc kubenswrapper[4869]: I1001 15:20:22.211858 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-worker-7b96966b79-mtqf2"] Oct 01 15:20:22 crc kubenswrapper[4869]: W1001 15:20:22.222499 4869 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podb0cf993c_1e4c_425b_8266_e087119e45b2.slice/crio-7962be025760ebf028dcd4d04d9f0dc6017d7dff6cfb42781b1d559188bc6af4 WatchSource:0}: Error finding container 7962be025760ebf028dcd4d04d9f0dc6017d7dff6cfb42781b1d559188bc6af4: Status 404 returned error can't find the container with id 7962be025760ebf028dcd4d04d9f0dc6017d7dff6cfb42781b1d559188bc6af4 Oct 01 15:20:22 crc kubenswrapper[4869]: I1001 15:20:22.319531 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-keystone-listener-6d6689c8d-7r6k4"] Oct 01 15:20:22 crc kubenswrapper[4869]: I1001 15:20:22.640002 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-64dff96bf7-6fjfz"] Oct 01 15:20:22 crc kubenswrapper[4869]: W1001 15:20:22.643145 4869 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod11b54042_d5f6_43e4_92c7_560bc7f55f33.slice/crio-9a264b0d244a16c8b3d5bd5813baeaac86de5874b9c8df55b50d38dec371b441 WatchSource:0}: Error finding container 9a264b0d244a16c8b3d5bd5813baeaac86de5874b9c8df55b50d38dec371b441: Status 404 returned error can't find the container with id 9a264b0d244a16c8b3d5bd5813baeaac86de5874b9c8df55b50d38dec371b441 Oct 01 15:20:22 crc kubenswrapper[4869]: I1001 15:20:22.725403 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-api-6647d584b4-vsdfk"] Oct 01 15:20:23 crc kubenswrapper[4869]: I1001 15:20:23.002803 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-keystone-listener-6d6689c8d-7r6k4" event={"ID":"af712290-aa09-4efa-80ac-6f655752332e","Type":"ContainerStarted","Data":"33e04e66814b8917ea59d81c87981101f4a57534d202c678a4ec6d8524375060"} Oct 01 15:20:23 crc kubenswrapper[4869]: I1001 15:20:23.029467 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-sync-slhz2" event={"ID":"f9c5d763-90ad-4611-8cac-193343af1b78","Type":"ContainerStarted","Data":"a0388f97bcf8bcce90875c04ef6844d95dab9ed796f85800020fc72658f7804b"} Oct 01 15:20:23 crc kubenswrapper[4869]: I1001 15:20:23.041852 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-6647d584b4-vsdfk" event={"ID":"a59f97ca-2d70-422f-ae93-81ec595b43aa","Type":"ContainerStarted","Data":"fd763179891c7ac07764b590c98ced89ccaf481990b12d5588e073446dcf0a6b"} Oct 01 15:20:23 crc kubenswrapper[4869]: I1001 15:20:23.041917 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-6647d584b4-vsdfk" event={"ID":"a59f97ca-2d70-422f-ae93-81ec595b43aa","Type":"ContainerStarted","Data":"6573c00e28b352e685cd57276693cd1f5aff349f12947c3cc00136fb400ba3ff"} Oct 01 15:20:23 crc kubenswrapper[4869]: I1001 15:20:23.051524 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-64dff96bf7-6fjfz" event={"ID":"11b54042-d5f6-43e4-92c7-560bc7f55f33","Type":"ContainerStarted","Data":"b052c9b9c466e296ca9425673c7545b5dc3b0ff495308382b40e23a030ff5492"} Oct 01 15:20:23 crc kubenswrapper[4869]: I1001 15:20:23.051565 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-64dff96bf7-6fjfz" event={"ID":"11b54042-d5f6-43e4-92c7-560bc7f55f33","Type":"ContainerStarted","Data":"9a264b0d244a16c8b3d5bd5813baeaac86de5874b9c8df55b50d38dec371b441"} Oct 01 15:20:23 crc kubenswrapper[4869]: I1001 15:20:23.057334 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-db-sync-slhz2" podStartSLOduration=3.9482877629999997 podStartE2EDuration="47.057317303s" podCreationTimestamp="2025-10-01 15:19:36 +0000 UTC" firstStartedPulling="2025-10-01 15:19:38.089942137 +0000 UTC m=+887.236785253" lastFinishedPulling="2025-10-01 15:20:21.198971667 +0000 UTC m=+930.345814793" observedRunningTime="2025-10-01 15:20:23.050567213 +0000 UTC m=+932.197410339" watchObservedRunningTime="2025-10-01 15:20:23.057317303 +0000 UTC m=+932.204160419" Oct 01 15:20:23 crc kubenswrapper[4869]: I1001 15:20:23.058157 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-worker-7b96966b79-mtqf2" event={"ID":"b0cf993c-1e4c-425b-8266-e087119e45b2","Type":"ContainerStarted","Data":"7962be025760ebf028dcd4d04d9f0dc6017d7dff6cfb42781b1d559188bc6af4"} Oct 01 15:20:24 crc kubenswrapper[4869]: I1001 15:20:24.044394 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-api-57c668579d-grs6h"] Oct 01 15:20:24 crc kubenswrapper[4869]: I1001 15:20:24.046119 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-57c668579d-grs6h" Oct 01 15:20:24 crc kubenswrapper[4869]: I1001 15:20:24.048909 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-barbican-public-svc" Oct 01 15:20:24 crc kubenswrapper[4869]: I1001 15:20:24.050512 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-barbican-internal-svc" Oct 01 15:20:24 crc kubenswrapper[4869]: I1001 15:20:24.058881 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-api-57c668579d-grs6h"] Oct 01 15:20:24 crc kubenswrapper[4869]: I1001 15:20:24.087168 4869 generic.go:334] "Generic (PLEG): container finished" podID="11b54042-d5f6-43e4-92c7-560bc7f55f33" containerID="b052c9b9c466e296ca9425673c7545b5dc3b0ff495308382b40e23a030ff5492" exitCode=0 Oct 01 15:20:24 crc kubenswrapper[4869]: I1001 15:20:24.087259 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-64dff96bf7-6fjfz" event={"ID":"11b54042-d5f6-43e4-92c7-560bc7f55f33","Type":"ContainerDied","Data":"b052c9b9c466e296ca9425673c7545b5dc3b0ff495308382b40e23a030ff5492"} Oct 01 15:20:24 crc kubenswrapper[4869]: I1001 15:20:24.087285 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-64dff96bf7-6fjfz" event={"ID":"11b54042-d5f6-43e4-92c7-560bc7f55f33","Type":"ContainerStarted","Data":"b08d81afcf91560c9ccf6b61bf61d95bdc867e3dea0228e68a281990f5c2ed83"} Oct 01 15:20:24 crc kubenswrapper[4869]: I1001 15:20:24.088347 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-64dff96bf7-6fjfz" Oct 01 15:20:24 crc kubenswrapper[4869]: I1001 15:20:24.095115 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-6647d584b4-vsdfk" event={"ID":"a59f97ca-2d70-422f-ae93-81ec595b43aa","Type":"ContainerStarted","Data":"443ba4fc8389996332bbb96dcf8220fe8bbf57edce9d0d1214f191d9aa34cf6e"} Oct 01 15:20:24 crc kubenswrapper[4869]: I1001 15:20:24.095620 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/barbican-api-6647d584b4-vsdfk" Oct 01 15:20:24 crc kubenswrapper[4869]: I1001 15:20:24.095648 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/barbican-api-6647d584b4-vsdfk" Oct 01 15:20:24 crc kubenswrapper[4869]: I1001 15:20:24.111800 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-64dff96bf7-6fjfz" podStartSLOduration=3.111783015 podStartE2EDuration="3.111783015s" podCreationTimestamp="2025-10-01 15:20:21 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-01 15:20:24.104348847 +0000 UTC m=+933.251191973" watchObservedRunningTime="2025-10-01 15:20:24.111783015 +0000 UTC m=+933.258626131" Oct 01 15:20:24 crc kubenswrapper[4869]: I1001 15:20:24.127149 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bbb60a0d-b89d-4ab8-aa53-fb753317799f-combined-ca-bundle\") pod \"barbican-api-57c668579d-grs6h\" (UID: \"bbb60a0d-b89d-4ab8-aa53-fb753317799f\") " pod="openstack/barbican-api-57c668579d-grs6h" Oct 01 15:20:24 crc kubenswrapper[4869]: I1001 15:20:24.127214 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/bbb60a0d-b89d-4ab8-aa53-fb753317799f-config-data-custom\") pod \"barbican-api-57c668579d-grs6h\" (UID: \"bbb60a0d-b89d-4ab8-aa53-fb753317799f\") " pod="openstack/barbican-api-57c668579d-grs6h" Oct 01 15:20:24 crc kubenswrapper[4869]: I1001 15:20:24.127769 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/bbb60a0d-b89d-4ab8-aa53-fb753317799f-public-tls-certs\") pod \"barbican-api-57c668579d-grs6h\" (UID: \"bbb60a0d-b89d-4ab8-aa53-fb753317799f\") " pod="openstack/barbican-api-57c668579d-grs6h" Oct 01 15:20:24 crc kubenswrapper[4869]: I1001 15:20:24.127813 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4zc2h\" (UniqueName: \"kubernetes.io/projected/bbb60a0d-b89d-4ab8-aa53-fb753317799f-kube-api-access-4zc2h\") pod \"barbican-api-57c668579d-grs6h\" (UID: \"bbb60a0d-b89d-4ab8-aa53-fb753317799f\") " pod="openstack/barbican-api-57c668579d-grs6h" Oct 01 15:20:24 crc kubenswrapper[4869]: I1001 15:20:24.127872 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/bbb60a0d-b89d-4ab8-aa53-fb753317799f-logs\") pod \"barbican-api-57c668579d-grs6h\" (UID: \"bbb60a0d-b89d-4ab8-aa53-fb753317799f\") " pod="openstack/barbican-api-57c668579d-grs6h" Oct 01 15:20:24 crc kubenswrapper[4869]: I1001 15:20:24.127897 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/bbb60a0d-b89d-4ab8-aa53-fb753317799f-config-data\") pod \"barbican-api-57c668579d-grs6h\" (UID: \"bbb60a0d-b89d-4ab8-aa53-fb753317799f\") " pod="openstack/barbican-api-57c668579d-grs6h" Oct 01 15:20:24 crc kubenswrapper[4869]: I1001 15:20:24.127928 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/bbb60a0d-b89d-4ab8-aa53-fb753317799f-internal-tls-certs\") pod \"barbican-api-57c668579d-grs6h\" (UID: \"bbb60a0d-b89d-4ab8-aa53-fb753317799f\") " pod="openstack/barbican-api-57c668579d-grs6h" Oct 01 15:20:24 crc kubenswrapper[4869]: I1001 15:20:24.130668 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-api-6647d584b4-vsdfk" podStartSLOduration=3.130658522 podStartE2EDuration="3.130658522s" podCreationTimestamp="2025-10-01 15:20:21 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-01 15:20:24.124337562 +0000 UTC m=+933.271180678" watchObservedRunningTime="2025-10-01 15:20:24.130658522 +0000 UTC m=+933.277501638" Oct 01 15:20:24 crc kubenswrapper[4869]: I1001 15:20:24.229643 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/bbb60a0d-b89d-4ab8-aa53-fb753317799f-config-data\") pod \"barbican-api-57c668579d-grs6h\" (UID: \"bbb60a0d-b89d-4ab8-aa53-fb753317799f\") " pod="openstack/barbican-api-57c668579d-grs6h" Oct 01 15:20:24 crc kubenswrapper[4869]: I1001 15:20:24.229705 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/bbb60a0d-b89d-4ab8-aa53-fb753317799f-internal-tls-certs\") pod \"barbican-api-57c668579d-grs6h\" (UID: \"bbb60a0d-b89d-4ab8-aa53-fb753317799f\") " pod="openstack/barbican-api-57c668579d-grs6h" Oct 01 15:20:24 crc kubenswrapper[4869]: I1001 15:20:24.230273 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bbb60a0d-b89d-4ab8-aa53-fb753317799f-combined-ca-bundle\") pod \"barbican-api-57c668579d-grs6h\" (UID: \"bbb60a0d-b89d-4ab8-aa53-fb753317799f\") " pod="openstack/barbican-api-57c668579d-grs6h" Oct 01 15:20:24 crc kubenswrapper[4869]: I1001 15:20:24.230332 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/bbb60a0d-b89d-4ab8-aa53-fb753317799f-config-data-custom\") pod \"barbican-api-57c668579d-grs6h\" (UID: \"bbb60a0d-b89d-4ab8-aa53-fb753317799f\") " pod="openstack/barbican-api-57c668579d-grs6h" Oct 01 15:20:24 crc kubenswrapper[4869]: I1001 15:20:24.230762 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/bbb60a0d-b89d-4ab8-aa53-fb753317799f-public-tls-certs\") pod \"barbican-api-57c668579d-grs6h\" (UID: \"bbb60a0d-b89d-4ab8-aa53-fb753317799f\") " pod="openstack/barbican-api-57c668579d-grs6h" Oct 01 15:20:24 crc kubenswrapper[4869]: I1001 15:20:24.230812 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4zc2h\" (UniqueName: \"kubernetes.io/projected/bbb60a0d-b89d-4ab8-aa53-fb753317799f-kube-api-access-4zc2h\") pod \"barbican-api-57c668579d-grs6h\" (UID: \"bbb60a0d-b89d-4ab8-aa53-fb753317799f\") " pod="openstack/barbican-api-57c668579d-grs6h" Oct 01 15:20:24 crc kubenswrapper[4869]: I1001 15:20:24.230889 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/bbb60a0d-b89d-4ab8-aa53-fb753317799f-logs\") pod \"barbican-api-57c668579d-grs6h\" (UID: \"bbb60a0d-b89d-4ab8-aa53-fb753317799f\") " pod="openstack/barbican-api-57c668579d-grs6h" Oct 01 15:20:24 crc kubenswrapper[4869]: I1001 15:20:24.231226 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/bbb60a0d-b89d-4ab8-aa53-fb753317799f-logs\") pod \"barbican-api-57c668579d-grs6h\" (UID: \"bbb60a0d-b89d-4ab8-aa53-fb753317799f\") " pod="openstack/barbican-api-57c668579d-grs6h" Oct 01 15:20:24 crc kubenswrapper[4869]: I1001 15:20:24.234984 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bbb60a0d-b89d-4ab8-aa53-fb753317799f-combined-ca-bundle\") pod \"barbican-api-57c668579d-grs6h\" (UID: \"bbb60a0d-b89d-4ab8-aa53-fb753317799f\") " pod="openstack/barbican-api-57c668579d-grs6h" Oct 01 15:20:24 crc kubenswrapper[4869]: I1001 15:20:24.236356 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/bbb60a0d-b89d-4ab8-aa53-fb753317799f-config-data\") pod \"barbican-api-57c668579d-grs6h\" (UID: \"bbb60a0d-b89d-4ab8-aa53-fb753317799f\") " pod="openstack/barbican-api-57c668579d-grs6h" Oct 01 15:20:24 crc kubenswrapper[4869]: I1001 15:20:24.239842 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/bbb60a0d-b89d-4ab8-aa53-fb753317799f-public-tls-certs\") pod \"barbican-api-57c668579d-grs6h\" (UID: \"bbb60a0d-b89d-4ab8-aa53-fb753317799f\") " pod="openstack/barbican-api-57c668579d-grs6h" Oct 01 15:20:24 crc kubenswrapper[4869]: I1001 15:20:24.243735 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/bbb60a0d-b89d-4ab8-aa53-fb753317799f-config-data-custom\") pod \"barbican-api-57c668579d-grs6h\" (UID: \"bbb60a0d-b89d-4ab8-aa53-fb753317799f\") " pod="openstack/barbican-api-57c668579d-grs6h" Oct 01 15:20:24 crc kubenswrapper[4869]: I1001 15:20:24.251595 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/bbb60a0d-b89d-4ab8-aa53-fb753317799f-internal-tls-certs\") pod \"barbican-api-57c668579d-grs6h\" (UID: \"bbb60a0d-b89d-4ab8-aa53-fb753317799f\") " pod="openstack/barbican-api-57c668579d-grs6h" Oct 01 15:20:24 crc kubenswrapper[4869]: I1001 15:20:24.252426 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4zc2h\" (UniqueName: \"kubernetes.io/projected/bbb60a0d-b89d-4ab8-aa53-fb753317799f-kube-api-access-4zc2h\") pod \"barbican-api-57c668579d-grs6h\" (UID: \"bbb60a0d-b89d-4ab8-aa53-fb753317799f\") " pod="openstack/barbican-api-57c668579d-grs6h" Oct 01 15:20:24 crc kubenswrapper[4869]: I1001 15:20:24.369789 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-57c668579d-grs6h" Oct 01 15:20:24 crc kubenswrapper[4869]: I1001 15:20:24.860804 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-api-57c668579d-grs6h"] Oct 01 15:20:24 crc kubenswrapper[4869]: W1001 15:20:24.866423 4869 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podbbb60a0d_b89d_4ab8_aa53_fb753317799f.slice/crio-90cefee3041b63e35ae93adfa2a90d6fa596e25fdb5804511dd219e9231fe911 WatchSource:0}: Error finding container 90cefee3041b63e35ae93adfa2a90d6fa596e25fdb5804511dd219e9231fe911: Status 404 returned error can't find the container with id 90cefee3041b63e35ae93adfa2a90d6fa596e25fdb5804511dd219e9231fe911 Oct 01 15:20:25 crc kubenswrapper[4869]: I1001 15:20:25.106583 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-57c668579d-grs6h" event={"ID":"bbb60a0d-b89d-4ab8-aa53-fb753317799f","Type":"ContainerStarted","Data":"1c17f9886ba144c7018109816acd161763920cbed7a50f25b9b6b14eb774f260"} Oct 01 15:20:25 crc kubenswrapper[4869]: I1001 15:20:25.106625 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-57c668579d-grs6h" event={"ID":"bbb60a0d-b89d-4ab8-aa53-fb753317799f","Type":"ContainerStarted","Data":"90cefee3041b63e35ae93adfa2a90d6fa596e25fdb5804511dd219e9231fe911"} Oct 01 15:20:25 crc kubenswrapper[4869]: I1001 15:20:25.115176 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-worker-7b96966b79-mtqf2" event={"ID":"b0cf993c-1e4c-425b-8266-e087119e45b2","Type":"ContainerStarted","Data":"9745a168766d2919365f83326702435e6db66a72e627cc66ea90e52958e5e3e2"} Oct 01 15:20:25 crc kubenswrapper[4869]: I1001 15:20:25.115215 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-worker-7b96966b79-mtqf2" event={"ID":"b0cf993c-1e4c-425b-8266-e087119e45b2","Type":"ContainerStarted","Data":"404b07594b49c8e35da542dbb7e4aa0758730dc504faf4a8ca7b462f7a41eba4"} Oct 01 15:20:25 crc kubenswrapper[4869]: I1001 15:20:25.123255 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-keystone-listener-6d6689c8d-7r6k4" event={"ID":"af712290-aa09-4efa-80ac-6f655752332e","Type":"ContainerStarted","Data":"789c2a738e34f718c5a55d3f9f7530c72ff0c930da9eacbd59b5b4f1517d0c3e"} Oct 01 15:20:25 crc kubenswrapper[4869]: I1001 15:20:25.123291 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-keystone-listener-6d6689c8d-7r6k4" event={"ID":"af712290-aa09-4efa-80ac-6f655752332e","Type":"ContainerStarted","Data":"e70e7ce2d3a46910cf72613e523e1e0be030a1f644e5b18fe9a2e73849f421c7"} Oct 01 15:20:25 crc kubenswrapper[4869]: I1001 15:20:25.147196 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-worker-7b96966b79-mtqf2" podStartSLOduration=1.9629463390000002 podStartE2EDuration="4.147174595s" podCreationTimestamp="2025-10-01 15:20:21 +0000 UTC" firstStartedPulling="2025-10-01 15:20:22.235401074 +0000 UTC m=+931.382244190" lastFinishedPulling="2025-10-01 15:20:24.41962933 +0000 UTC m=+933.566472446" observedRunningTime="2025-10-01 15:20:25.132398972 +0000 UTC m=+934.279242118" watchObservedRunningTime="2025-10-01 15:20:25.147174595 +0000 UTC m=+934.294017711" Oct 01 15:20:25 crc kubenswrapper[4869]: I1001 15:20:25.149160 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-keystone-listener-6d6689c8d-7r6k4" podStartSLOduration=2.072782233 podStartE2EDuration="4.149151045s" podCreationTimestamp="2025-10-01 15:20:21 +0000 UTC" firstStartedPulling="2025-10-01 15:20:22.343460653 +0000 UTC m=+931.490303769" lastFinishedPulling="2025-10-01 15:20:24.419829465 +0000 UTC m=+933.566672581" observedRunningTime="2025-10-01 15:20:25.147713349 +0000 UTC m=+934.294556465" watchObservedRunningTime="2025-10-01 15:20:25.149151045 +0000 UTC m=+934.295994181" Oct 01 15:20:26 crc kubenswrapper[4869]: I1001 15:20:26.132531 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-57c668579d-grs6h" event={"ID":"bbb60a0d-b89d-4ab8-aa53-fb753317799f","Type":"ContainerStarted","Data":"7a9a5b920976af68f276ee67133272bed46e166e91f347a76710d97040510400"} Oct 01 15:20:26 crc kubenswrapper[4869]: I1001 15:20:26.157155 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-api-57c668579d-grs6h" podStartSLOduration=2.156769125 podStartE2EDuration="2.156769125s" podCreationTimestamp="2025-10-01 15:20:24 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-01 15:20:26.15378924 +0000 UTC m=+935.300632396" watchObservedRunningTime="2025-10-01 15:20:26.156769125 +0000 UTC m=+935.303612251" Oct 01 15:20:27 crc kubenswrapper[4869]: I1001 15:20:27.141590 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/barbican-api-57c668579d-grs6h" Oct 01 15:20:27 crc kubenswrapper[4869]: I1001 15:20:27.141937 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/barbican-api-57c668579d-grs6h" Oct 01 15:20:28 crc kubenswrapper[4869]: I1001 15:20:28.749032 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/barbican-api-6647d584b4-vsdfk" Oct 01 15:20:29 crc kubenswrapper[4869]: I1001 15:20:29.159803 4869 generic.go:334] "Generic (PLEG): container finished" podID="f9c5d763-90ad-4611-8cac-193343af1b78" containerID="a0388f97bcf8bcce90875c04ef6844d95dab9ed796f85800020fc72658f7804b" exitCode=0 Oct 01 15:20:29 crc kubenswrapper[4869]: I1001 15:20:29.159874 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-sync-slhz2" event={"ID":"f9c5d763-90ad-4611-8cac-193343af1b78","Type":"ContainerDied","Data":"a0388f97bcf8bcce90875c04ef6844d95dab9ed796f85800020fc72658f7804b"} Oct 01 15:20:29 crc kubenswrapper[4869]: I1001 15:20:29.212064 4869 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/horizon-5766b74c9d-wpxpf" podUID="011bdc54-d9ef-4ae9-a16b-5eaf4e97b2cf" containerName="horizon" probeResult="failure" output="Get \"https://10.217.0.140:8443/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.0.140:8443: connect: connection refused" Oct 01 15:20:29 crc kubenswrapper[4869]: I1001 15:20:29.266876 4869 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/horizon-5f66f6967d-mnbqz" podUID="eb62e045-ca51-4b33-a63d-9c53b247cc91" containerName="horizon" probeResult="failure" output="Get \"https://10.217.0.141:8443/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.0.141:8443: connect: connection refused" Oct 01 15:20:30 crc kubenswrapper[4869]: I1001 15:20:30.473667 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/barbican-api-6647d584b4-vsdfk" Oct 01 15:20:31 crc kubenswrapper[4869]: I1001 15:20:31.074569 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-sync-slhz2" Oct 01 15:20:31 crc kubenswrapper[4869]: I1001 15:20:31.169589 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f9c5d763-90ad-4611-8cac-193343af1b78-scripts\") pod \"f9c5d763-90ad-4611-8cac-193343af1b78\" (UID: \"f9c5d763-90ad-4611-8cac-193343af1b78\") " Oct 01 15:20:31 crc kubenswrapper[4869]: I1001 15:20:31.169691 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f9c5d763-90ad-4611-8cac-193343af1b78-combined-ca-bundle\") pod \"f9c5d763-90ad-4611-8cac-193343af1b78\" (UID: \"f9c5d763-90ad-4611-8cac-193343af1b78\") " Oct 01 15:20:31 crc kubenswrapper[4869]: I1001 15:20:31.169794 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/f9c5d763-90ad-4611-8cac-193343af1b78-db-sync-config-data\") pod \"f9c5d763-90ad-4611-8cac-193343af1b78\" (UID: \"f9c5d763-90ad-4611-8cac-193343af1b78\") " Oct 01 15:20:31 crc kubenswrapper[4869]: I1001 15:20:31.169865 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f9c5d763-90ad-4611-8cac-193343af1b78-config-data\") pod \"f9c5d763-90ad-4611-8cac-193343af1b78\" (UID: \"f9c5d763-90ad-4611-8cac-193343af1b78\") " Oct 01 15:20:31 crc kubenswrapper[4869]: I1001 15:20:31.169909 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2r52b\" (UniqueName: \"kubernetes.io/projected/f9c5d763-90ad-4611-8cac-193343af1b78-kube-api-access-2r52b\") pod \"f9c5d763-90ad-4611-8cac-193343af1b78\" (UID: \"f9c5d763-90ad-4611-8cac-193343af1b78\") " Oct 01 15:20:31 crc kubenswrapper[4869]: I1001 15:20:31.169928 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/f9c5d763-90ad-4611-8cac-193343af1b78-etc-machine-id\") pod \"f9c5d763-90ad-4611-8cac-193343af1b78\" (UID: \"f9c5d763-90ad-4611-8cac-193343af1b78\") " Oct 01 15:20:31 crc kubenswrapper[4869]: I1001 15:20:31.170361 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f9c5d763-90ad-4611-8cac-193343af1b78-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "f9c5d763-90ad-4611-8cac-193343af1b78" (UID: "f9c5d763-90ad-4611-8cac-193343af1b78"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 01 15:20:31 crc kubenswrapper[4869]: I1001 15:20:31.179482 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f9c5d763-90ad-4611-8cac-193343af1b78-scripts" (OuterVolumeSpecName: "scripts") pod "f9c5d763-90ad-4611-8cac-193343af1b78" (UID: "f9c5d763-90ad-4611-8cac-193343af1b78"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 15:20:31 crc kubenswrapper[4869]: I1001 15:20:31.186304 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-sync-slhz2" event={"ID":"f9c5d763-90ad-4611-8cac-193343af1b78","Type":"ContainerDied","Data":"87f0430d9a77a41624a7e692546232b10783a736f8a1277ca16159fc4795f254"} Oct 01 15:20:31 crc kubenswrapper[4869]: I1001 15:20:31.186342 4869 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="87f0430d9a77a41624a7e692546232b10783a736f8a1277ca16159fc4795f254" Oct 01 15:20:31 crc kubenswrapper[4869]: I1001 15:20:31.186399 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-sync-slhz2" Oct 01 15:20:31 crc kubenswrapper[4869]: I1001 15:20:31.190610 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f9c5d763-90ad-4611-8cac-193343af1b78-db-sync-config-data" (OuterVolumeSpecName: "db-sync-config-data") pod "f9c5d763-90ad-4611-8cac-193343af1b78" (UID: "f9c5d763-90ad-4611-8cac-193343af1b78"). InnerVolumeSpecName "db-sync-config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 15:20:31 crc kubenswrapper[4869]: I1001 15:20:31.190746 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f9c5d763-90ad-4611-8cac-193343af1b78-kube-api-access-2r52b" (OuterVolumeSpecName: "kube-api-access-2r52b") pod "f9c5d763-90ad-4611-8cac-193343af1b78" (UID: "f9c5d763-90ad-4611-8cac-193343af1b78"). InnerVolumeSpecName "kube-api-access-2r52b". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 15:20:31 crc kubenswrapper[4869]: I1001 15:20:31.194098 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"cc0dd8d7-8230-4c07-87b1-6f4fc5e2e631","Type":"ContainerStarted","Data":"d4ca17658569506170532a80a1c1f9c873d3f66d15512d2b00799f03d85ea88b"} Oct 01 15:20:31 crc kubenswrapper[4869]: I1001 15:20:31.194401 4869 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="cc0dd8d7-8230-4c07-87b1-6f4fc5e2e631" containerName="ceilometer-central-agent" containerID="cri-o://e908a2914b310dca9c1c19937855f20362a2902fd309d74542f25cd91fef47d9" gracePeriod=30 Oct 01 15:20:31 crc kubenswrapper[4869]: I1001 15:20:31.194782 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Oct 01 15:20:31 crc kubenswrapper[4869]: I1001 15:20:31.195201 4869 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="cc0dd8d7-8230-4c07-87b1-6f4fc5e2e631" containerName="proxy-httpd" containerID="cri-o://d4ca17658569506170532a80a1c1f9c873d3f66d15512d2b00799f03d85ea88b" gracePeriod=30 Oct 01 15:20:31 crc kubenswrapper[4869]: I1001 15:20:31.195311 4869 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="cc0dd8d7-8230-4c07-87b1-6f4fc5e2e631" containerName="sg-core" containerID="cri-o://99d0e76b22b29e4947bfb92dfa7c9861448ea38250b4b8538bda3b9c670ddc17" gracePeriod=30 Oct 01 15:20:31 crc kubenswrapper[4869]: I1001 15:20:31.195857 4869 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="cc0dd8d7-8230-4c07-87b1-6f4fc5e2e631" containerName="ceilometer-notification-agent" containerID="cri-o://2a4d444a3d5e23bf38c8e2f1225cef437847d97521691a2eccf4ae028317772c" gracePeriod=30 Oct 01 15:20:31 crc kubenswrapper[4869]: I1001 15:20:31.213093 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f9c5d763-90ad-4611-8cac-193343af1b78-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "f9c5d763-90ad-4611-8cac-193343af1b78" (UID: "f9c5d763-90ad-4611-8cac-193343af1b78"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 15:20:31 crc kubenswrapper[4869]: I1001 15:20:31.234759 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=2.48758649 podStartE2EDuration="55.234741528s" podCreationTimestamp="2025-10-01 15:19:36 +0000 UTC" firstStartedPulling="2025-10-01 15:19:38.203609028 +0000 UTC m=+887.350452134" lastFinishedPulling="2025-10-01 15:20:30.950764056 +0000 UTC m=+940.097607172" observedRunningTime="2025-10-01 15:20:31.227930116 +0000 UTC m=+940.374773252" watchObservedRunningTime="2025-10-01 15:20:31.234741528 +0000 UTC m=+940.381584644" Oct 01 15:20:31 crc kubenswrapper[4869]: I1001 15:20:31.264432 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f9c5d763-90ad-4611-8cac-193343af1b78-config-data" (OuterVolumeSpecName: "config-data") pod "f9c5d763-90ad-4611-8cac-193343af1b78" (UID: "f9c5d763-90ad-4611-8cac-193343af1b78"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 15:20:31 crc kubenswrapper[4869]: I1001 15:20:31.272458 4869 reconciler_common.go:293] "Volume detached for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/f9c5d763-90ad-4611-8cac-193343af1b78-db-sync-config-data\") on node \"crc\" DevicePath \"\"" Oct 01 15:20:31 crc kubenswrapper[4869]: I1001 15:20:31.272499 4869 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f9c5d763-90ad-4611-8cac-193343af1b78-config-data\") on node \"crc\" DevicePath \"\"" Oct 01 15:20:31 crc kubenswrapper[4869]: I1001 15:20:31.272513 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2r52b\" (UniqueName: \"kubernetes.io/projected/f9c5d763-90ad-4611-8cac-193343af1b78-kube-api-access-2r52b\") on node \"crc\" DevicePath \"\"" Oct 01 15:20:31 crc kubenswrapper[4869]: I1001 15:20:31.272530 4869 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/f9c5d763-90ad-4611-8cac-193343af1b78-etc-machine-id\") on node \"crc\" DevicePath \"\"" Oct 01 15:20:31 crc kubenswrapper[4869]: I1001 15:20:31.272542 4869 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f9c5d763-90ad-4611-8cac-193343af1b78-scripts\") on node \"crc\" DevicePath \"\"" Oct 01 15:20:31 crc kubenswrapper[4869]: I1001 15:20:31.272554 4869 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f9c5d763-90ad-4611-8cac-193343af1b78-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 01 15:20:31 crc kubenswrapper[4869]: I1001 15:20:31.465025 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-scheduler-0"] Oct 01 15:20:31 crc kubenswrapper[4869]: E1001 15:20:31.465857 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f9c5d763-90ad-4611-8cac-193343af1b78" containerName="cinder-db-sync" Oct 01 15:20:31 crc kubenswrapper[4869]: I1001 15:20:31.465880 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="f9c5d763-90ad-4611-8cac-193343af1b78" containerName="cinder-db-sync" Oct 01 15:20:31 crc kubenswrapper[4869]: I1001 15:20:31.466119 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="f9c5d763-90ad-4611-8cac-193343af1b78" containerName="cinder-db-sync" Oct 01 15:20:31 crc kubenswrapper[4869]: I1001 15:20:31.467259 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Oct 01 15:20:31 crc kubenswrapper[4869]: I1001 15:20:31.472916 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-scheduler-config-data" Oct 01 15:20:31 crc kubenswrapper[4869]: I1001 15:20:31.494190 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-scheduler-0"] Oct 01 15:20:31 crc kubenswrapper[4869]: I1001 15:20:31.574726 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-64dff96bf7-6fjfz"] Oct 01 15:20:31 crc kubenswrapper[4869]: I1001 15:20:31.574942 4869 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-64dff96bf7-6fjfz" podUID="11b54042-d5f6-43e4-92c7-560bc7f55f33" containerName="dnsmasq-dns" containerID="cri-o://b08d81afcf91560c9ccf6b61bf61d95bdc867e3dea0228e68a281990f5c2ed83" gracePeriod=10 Oct 01 15:20:31 crc kubenswrapper[4869]: I1001 15:20:31.577530 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/8cf4ad0d-8c54-45ba-b36b-269f68d9e46d-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"8cf4ad0d-8c54-45ba-b36b-269f68d9e46d\") " pod="openstack/cinder-scheduler-0" Oct 01 15:20:31 crc kubenswrapper[4869]: I1001 15:20:31.577605 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8cf4ad0d-8c54-45ba-b36b-269f68d9e46d-scripts\") pod \"cinder-scheduler-0\" (UID: \"8cf4ad0d-8c54-45ba-b36b-269f68d9e46d\") " pod="openstack/cinder-scheduler-0" Oct 01 15:20:31 crc kubenswrapper[4869]: I1001 15:20:31.577658 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rqv66\" (UniqueName: \"kubernetes.io/projected/8cf4ad0d-8c54-45ba-b36b-269f68d9e46d-kube-api-access-rqv66\") pod \"cinder-scheduler-0\" (UID: \"8cf4ad0d-8c54-45ba-b36b-269f68d9e46d\") " pod="openstack/cinder-scheduler-0" Oct 01 15:20:31 crc kubenswrapper[4869]: I1001 15:20:31.577694 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8cf4ad0d-8c54-45ba-b36b-269f68d9e46d-config-data\") pod \"cinder-scheduler-0\" (UID: \"8cf4ad0d-8c54-45ba-b36b-269f68d9e46d\") " pod="openstack/cinder-scheduler-0" Oct 01 15:20:31 crc kubenswrapper[4869]: I1001 15:20:31.577721 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8cf4ad0d-8c54-45ba-b36b-269f68d9e46d-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"8cf4ad0d-8c54-45ba-b36b-269f68d9e46d\") " pod="openstack/cinder-scheduler-0" Oct 01 15:20:31 crc kubenswrapper[4869]: I1001 15:20:31.577754 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/8cf4ad0d-8c54-45ba-b36b-269f68d9e46d-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"8cf4ad0d-8c54-45ba-b36b-269f68d9e46d\") " pod="openstack/cinder-scheduler-0" Oct 01 15:20:31 crc kubenswrapper[4869]: I1001 15:20:31.577872 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-64dff96bf7-6fjfz" Oct 01 15:20:31 crc kubenswrapper[4869]: I1001 15:20:31.629771 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-858794c669-gjc6c"] Oct 01 15:20:31 crc kubenswrapper[4869]: I1001 15:20:31.631142 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-858794c669-gjc6c" Oct 01 15:20:31 crc kubenswrapper[4869]: I1001 15:20:31.643454 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-858794c669-gjc6c"] Oct 01 15:20:31 crc kubenswrapper[4869]: I1001 15:20:31.680018 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/31ce96c3-bdba-442d-b7f8-dee4694e0e35-config\") pod \"dnsmasq-dns-858794c669-gjc6c\" (UID: \"31ce96c3-bdba-442d-b7f8-dee4694e0e35\") " pod="openstack/dnsmasq-dns-858794c669-gjc6c" Oct 01 15:20:31 crc kubenswrapper[4869]: I1001 15:20:31.680066 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rqv66\" (UniqueName: \"kubernetes.io/projected/8cf4ad0d-8c54-45ba-b36b-269f68d9e46d-kube-api-access-rqv66\") pod \"cinder-scheduler-0\" (UID: \"8cf4ad0d-8c54-45ba-b36b-269f68d9e46d\") " pod="openstack/cinder-scheduler-0" Oct 01 15:20:31 crc kubenswrapper[4869]: I1001 15:20:31.680087 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8cf4ad0d-8c54-45ba-b36b-269f68d9e46d-config-data\") pod \"cinder-scheduler-0\" (UID: \"8cf4ad0d-8c54-45ba-b36b-269f68d9e46d\") " pod="openstack/cinder-scheduler-0" Oct 01 15:20:31 crc kubenswrapper[4869]: I1001 15:20:31.680107 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8cf4ad0d-8c54-45ba-b36b-269f68d9e46d-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"8cf4ad0d-8c54-45ba-b36b-269f68d9e46d\") " pod="openstack/cinder-scheduler-0" Oct 01 15:20:31 crc kubenswrapper[4869]: I1001 15:20:31.680143 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/8cf4ad0d-8c54-45ba-b36b-269f68d9e46d-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"8cf4ad0d-8c54-45ba-b36b-269f68d9e46d\") " pod="openstack/cinder-scheduler-0" Oct 01 15:20:31 crc kubenswrapper[4869]: I1001 15:20:31.680217 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/31ce96c3-bdba-442d-b7f8-dee4694e0e35-ovsdbserver-nb\") pod \"dnsmasq-dns-858794c669-gjc6c\" (UID: \"31ce96c3-bdba-442d-b7f8-dee4694e0e35\") " pod="openstack/dnsmasq-dns-858794c669-gjc6c" Oct 01 15:20:31 crc kubenswrapper[4869]: I1001 15:20:31.680250 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/31ce96c3-bdba-442d-b7f8-dee4694e0e35-dns-svc\") pod \"dnsmasq-dns-858794c669-gjc6c\" (UID: \"31ce96c3-bdba-442d-b7f8-dee4694e0e35\") " pod="openstack/dnsmasq-dns-858794c669-gjc6c" Oct 01 15:20:31 crc kubenswrapper[4869]: I1001 15:20:31.680370 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/31ce96c3-bdba-442d-b7f8-dee4694e0e35-ovsdbserver-sb\") pod \"dnsmasq-dns-858794c669-gjc6c\" (UID: \"31ce96c3-bdba-442d-b7f8-dee4694e0e35\") " pod="openstack/dnsmasq-dns-858794c669-gjc6c" Oct 01 15:20:31 crc kubenswrapper[4869]: I1001 15:20:31.680400 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/8cf4ad0d-8c54-45ba-b36b-269f68d9e46d-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"8cf4ad0d-8c54-45ba-b36b-269f68d9e46d\") " pod="openstack/cinder-scheduler-0" Oct 01 15:20:31 crc kubenswrapper[4869]: I1001 15:20:31.680423 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dfxs9\" (UniqueName: \"kubernetes.io/projected/31ce96c3-bdba-442d-b7f8-dee4694e0e35-kube-api-access-dfxs9\") pod \"dnsmasq-dns-858794c669-gjc6c\" (UID: \"31ce96c3-bdba-442d-b7f8-dee4694e0e35\") " pod="openstack/dnsmasq-dns-858794c669-gjc6c" Oct 01 15:20:31 crc kubenswrapper[4869]: I1001 15:20:31.680515 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8cf4ad0d-8c54-45ba-b36b-269f68d9e46d-scripts\") pod \"cinder-scheduler-0\" (UID: \"8cf4ad0d-8c54-45ba-b36b-269f68d9e46d\") " pod="openstack/cinder-scheduler-0" Oct 01 15:20:31 crc kubenswrapper[4869]: I1001 15:20:31.683921 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/8cf4ad0d-8c54-45ba-b36b-269f68d9e46d-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"8cf4ad0d-8c54-45ba-b36b-269f68d9e46d\") " pod="openstack/cinder-scheduler-0" Oct 01 15:20:31 crc kubenswrapper[4869]: I1001 15:20:31.699103 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8cf4ad0d-8c54-45ba-b36b-269f68d9e46d-config-data\") pod \"cinder-scheduler-0\" (UID: \"8cf4ad0d-8c54-45ba-b36b-269f68d9e46d\") " pod="openstack/cinder-scheduler-0" Oct 01 15:20:31 crc kubenswrapper[4869]: I1001 15:20:31.705790 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8cf4ad0d-8c54-45ba-b36b-269f68d9e46d-scripts\") pod \"cinder-scheduler-0\" (UID: \"8cf4ad0d-8c54-45ba-b36b-269f68d9e46d\") " pod="openstack/cinder-scheduler-0" Oct 01 15:20:31 crc kubenswrapper[4869]: I1001 15:20:31.705865 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-api-0"] Oct 01 15:20:31 crc kubenswrapper[4869]: I1001 15:20:31.707625 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Oct 01 15:20:31 crc kubenswrapper[4869]: I1001 15:20:31.708355 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/8cf4ad0d-8c54-45ba-b36b-269f68d9e46d-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"8cf4ad0d-8c54-45ba-b36b-269f68d9e46d\") " pod="openstack/cinder-scheduler-0" Oct 01 15:20:31 crc kubenswrapper[4869]: I1001 15:20:31.709009 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8cf4ad0d-8c54-45ba-b36b-269f68d9e46d-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"8cf4ad0d-8c54-45ba-b36b-269f68d9e46d\") " pod="openstack/cinder-scheduler-0" Oct 01 15:20:31 crc kubenswrapper[4869]: I1001 15:20:31.710145 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-api-config-data" Oct 01 15:20:31 crc kubenswrapper[4869]: I1001 15:20:31.746594 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rqv66\" (UniqueName: \"kubernetes.io/projected/8cf4ad0d-8c54-45ba-b36b-269f68d9e46d-kube-api-access-rqv66\") pod \"cinder-scheduler-0\" (UID: \"8cf4ad0d-8c54-45ba-b36b-269f68d9e46d\") " pod="openstack/cinder-scheduler-0" Oct 01 15:20:31 crc kubenswrapper[4869]: I1001 15:20:31.795774 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/923e59ee-478b-4a7f-b7ab-4a4cff74c301-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"923e59ee-478b-4a7f-b7ab-4a4cff74c301\") " pod="openstack/cinder-api-0" Oct 01 15:20:31 crc kubenswrapper[4869]: I1001 15:20:31.795876 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hf8cn\" (UniqueName: \"kubernetes.io/projected/923e59ee-478b-4a7f-b7ab-4a4cff74c301-kube-api-access-hf8cn\") pod \"cinder-api-0\" (UID: \"923e59ee-478b-4a7f-b7ab-4a4cff74c301\") " pod="openstack/cinder-api-0" Oct 01 15:20:31 crc kubenswrapper[4869]: I1001 15:20:31.795912 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/31ce96c3-bdba-442d-b7f8-dee4694e0e35-ovsdbserver-sb\") pod \"dnsmasq-dns-858794c669-gjc6c\" (UID: \"31ce96c3-bdba-442d-b7f8-dee4694e0e35\") " pod="openstack/dnsmasq-dns-858794c669-gjc6c" Oct 01 15:20:31 crc kubenswrapper[4869]: I1001 15:20:31.795944 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dfxs9\" (UniqueName: \"kubernetes.io/projected/31ce96c3-bdba-442d-b7f8-dee4694e0e35-kube-api-access-dfxs9\") pod \"dnsmasq-dns-858794c669-gjc6c\" (UID: \"31ce96c3-bdba-442d-b7f8-dee4694e0e35\") " pod="openstack/dnsmasq-dns-858794c669-gjc6c" Oct 01 15:20:31 crc kubenswrapper[4869]: I1001 15:20:31.795990 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/923e59ee-478b-4a7f-b7ab-4a4cff74c301-scripts\") pod \"cinder-api-0\" (UID: \"923e59ee-478b-4a7f-b7ab-4a4cff74c301\") " pod="openstack/cinder-api-0" Oct 01 15:20:31 crc kubenswrapper[4869]: I1001 15:20:31.796023 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/923e59ee-478b-4a7f-b7ab-4a4cff74c301-config-data\") pod \"cinder-api-0\" (UID: \"923e59ee-478b-4a7f-b7ab-4a4cff74c301\") " pod="openstack/cinder-api-0" Oct 01 15:20:31 crc kubenswrapper[4869]: I1001 15:20:31.796076 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/923e59ee-478b-4a7f-b7ab-4a4cff74c301-etc-machine-id\") pod \"cinder-api-0\" (UID: \"923e59ee-478b-4a7f-b7ab-4a4cff74c301\") " pod="openstack/cinder-api-0" Oct 01 15:20:31 crc kubenswrapper[4869]: I1001 15:20:31.796107 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/31ce96c3-bdba-442d-b7f8-dee4694e0e35-config\") pod \"dnsmasq-dns-858794c669-gjc6c\" (UID: \"31ce96c3-bdba-442d-b7f8-dee4694e0e35\") " pod="openstack/dnsmasq-dns-858794c669-gjc6c" Oct 01 15:20:31 crc kubenswrapper[4869]: I1001 15:20:31.796128 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/923e59ee-478b-4a7f-b7ab-4a4cff74c301-logs\") pod \"cinder-api-0\" (UID: \"923e59ee-478b-4a7f-b7ab-4a4cff74c301\") " pod="openstack/cinder-api-0" Oct 01 15:20:31 crc kubenswrapper[4869]: I1001 15:20:31.796182 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/923e59ee-478b-4a7f-b7ab-4a4cff74c301-config-data-custom\") pod \"cinder-api-0\" (UID: \"923e59ee-478b-4a7f-b7ab-4a4cff74c301\") " pod="openstack/cinder-api-0" Oct 01 15:20:31 crc kubenswrapper[4869]: I1001 15:20:31.796220 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/31ce96c3-bdba-442d-b7f8-dee4694e0e35-ovsdbserver-nb\") pod \"dnsmasq-dns-858794c669-gjc6c\" (UID: \"31ce96c3-bdba-442d-b7f8-dee4694e0e35\") " pod="openstack/dnsmasq-dns-858794c669-gjc6c" Oct 01 15:20:31 crc kubenswrapper[4869]: I1001 15:20:31.796271 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/31ce96c3-bdba-442d-b7f8-dee4694e0e35-dns-svc\") pod \"dnsmasq-dns-858794c669-gjc6c\" (UID: \"31ce96c3-bdba-442d-b7f8-dee4694e0e35\") " pod="openstack/dnsmasq-dns-858794c669-gjc6c" Oct 01 15:20:31 crc kubenswrapper[4869]: I1001 15:20:31.797343 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/31ce96c3-bdba-442d-b7f8-dee4694e0e35-dns-svc\") pod \"dnsmasq-dns-858794c669-gjc6c\" (UID: \"31ce96c3-bdba-442d-b7f8-dee4694e0e35\") " pod="openstack/dnsmasq-dns-858794c669-gjc6c" Oct 01 15:20:31 crc kubenswrapper[4869]: I1001 15:20:31.797395 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/31ce96c3-bdba-442d-b7f8-dee4694e0e35-ovsdbserver-sb\") pod \"dnsmasq-dns-858794c669-gjc6c\" (UID: \"31ce96c3-bdba-442d-b7f8-dee4694e0e35\") " pod="openstack/dnsmasq-dns-858794c669-gjc6c" Oct 01 15:20:31 crc kubenswrapper[4869]: I1001 15:20:31.798030 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/31ce96c3-bdba-442d-b7f8-dee4694e0e35-config\") pod \"dnsmasq-dns-858794c669-gjc6c\" (UID: \"31ce96c3-bdba-442d-b7f8-dee4694e0e35\") " pod="openstack/dnsmasq-dns-858794c669-gjc6c" Oct 01 15:20:31 crc kubenswrapper[4869]: I1001 15:20:31.802740 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Oct 01 15:20:31 crc kubenswrapper[4869]: I1001 15:20:31.803205 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/31ce96c3-bdba-442d-b7f8-dee4694e0e35-ovsdbserver-nb\") pod \"dnsmasq-dns-858794c669-gjc6c\" (UID: \"31ce96c3-bdba-442d-b7f8-dee4694e0e35\") " pod="openstack/dnsmasq-dns-858794c669-gjc6c" Oct 01 15:20:31 crc kubenswrapper[4869]: I1001 15:20:31.821375 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-api-0"] Oct 01 15:20:31 crc kubenswrapper[4869]: I1001 15:20:31.840773 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dfxs9\" (UniqueName: \"kubernetes.io/projected/31ce96c3-bdba-442d-b7f8-dee4694e0e35-kube-api-access-dfxs9\") pod \"dnsmasq-dns-858794c669-gjc6c\" (UID: \"31ce96c3-bdba-442d-b7f8-dee4694e0e35\") " pod="openstack/dnsmasq-dns-858794c669-gjc6c" Oct 01 15:20:31 crc kubenswrapper[4869]: I1001 15:20:31.899313 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/923e59ee-478b-4a7f-b7ab-4a4cff74c301-config-data\") pod \"cinder-api-0\" (UID: \"923e59ee-478b-4a7f-b7ab-4a4cff74c301\") " pod="openstack/cinder-api-0" Oct 01 15:20:31 crc kubenswrapper[4869]: I1001 15:20:31.899400 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/923e59ee-478b-4a7f-b7ab-4a4cff74c301-etc-machine-id\") pod \"cinder-api-0\" (UID: \"923e59ee-478b-4a7f-b7ab-4a4cff74c301\") " pod="openstack/cinder-api-0" Oct 01 15:20:31 crc kubenswrapper[4869]: I1001 15:20:31.899439 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/923e59ee-478b-4a7f-b7ab-4a4cff74c301-logs\") pod \"cinder-api-0\" (UID: \"923e59ee-478b-4a7f-b7ab-4a4cff74c301\") " pod="openstack/cinder-api-0" Oct 01 15:20:31 crc kubenswrapper[4869]: I1001 15:20:31.899486 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/923e59ee-478b-4a7f-b7ab-4a4cff74c301-config-data-custom\") pod \"cinder-api-0\" (UID: \"923e59ee-478b-4a7f-b7ab-4a4cff74c301\") " pod="openstack/cinder-api-0" Oct 01 15:20:31 crc kubenswrapper[4869]: I1001 15:20:31.899575 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/923e59ee-478b-4a7f-b7ab-4a4cff74c301-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"923e59ee-478b-4a7f-b7ab-4a4cff74c301\") " pod="openstack/cinder-api-0" Oct 01 15:20:31 crc kubenswrapper[4869]: I1001 15:20:31.899625 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hf8cn\" (UniqueName: \"kubernetes.io/projected/923e59ee-478b-4a7f-b7ab-4a4cff74c301-kube-api-access-hf8cn\") pod \"cinder-api-0\" (UID: \"923e59ee-478b-4a7f-b7ab-4a4cff74c301\") " pod="openstack/cinder-api-0" Oct 01 15:20:31 crc kubenswrapper[4869]: I1001 15:20:31.899681 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/923e59ee-478b-4a7f-b7ab-4a4cff74c301-scripts\") pod \"cinder-api-0\" (UID: \"923e59ee-478b-4a7f-b7ab-4a4cff74c301\") " pod="openstack/cinder-api-0" Oct 01 15:20:31 crc kubenswrapper[4869]: I1001 15:20:31.900439 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/923e59ee-478b-4a7f-b7ab-4a4cff74c301-logs\") pod \"cinder-api-0\" (UID: \"923e59ee-478b-4a7f-b7ab-4a4cff74c301\") " pod="openstack/cinder-api-0" Oct 01 15:20:31 crc kubenswrapper[4869]: I1001 15:20:31.900834 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/923e59ee-478b-4a7f-b7ab-4a4cff74c301-etc-machine-id\") pod \"cinder-api-0\" (UID: \"923e59ee-478b-4a7f-b7ab-4a4cff74c301\") " pod="openstack/cinder-api-0" Oct 01 15:20:31 crc kubenswrapper[4869]: I1001 15:20:31.908149 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/923e59ee-478b-4a7f-b7ab-4a4cff74c301-scripts\") pod \"cinder-api-0\" (UID: \"923e59ee-478b-4a7f-b7ab-4a4cff74c301\") " pod="openstack/cinder-api-0" Oct 01 15:20:31 crc kubenswrapper[4869]: I1001 15:20:31.908774 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/923e59ee-478b-4a7f-b7ab-4a4cff74c301-config-data-custom\") pod \"cinder-api-0\" (UID: \"923e59ee-478b-4a7f-b7ab-4a4cff74c301\") " pod="openstack/cinder-api-0" Oct 01 15:20:31 crc kubenswrapper[4869]: I1001 15:20:31.913459 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/923e59ee-478b-4a7f-b7ab-4a4cff74c301-config-data\") pod \"cinder-api-0\" (UID: \"923e59ee-478b-4a7f-b7ab-4a4cff74c301\") " pod="openstack/cinder-api-0" Oct 01 15:20:31 crc kubenswrapper[4869]: I1001 15:20:31.921392 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/923e59ee-478b-4a7f-b7ab-4a4cff74c301-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"923e59ee-478b-4a7f-b7ab-4a4cff74c301\") " pod="openstack/cinder-api-0" Oct 01 15:20:31 crc kubenswrapper[4869]: I1001 15:20:31.941786 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hf8cn\" (UniqueName: \"kubernetes.io/projected/923e59ee-478b-4a7f-b7ab-4a4cff74c301-kube-api-access-hf8cn\") pod \"cinder-api-0\" (UID: \"923e59ee-478b-4a7f-b7ab-4a4cff74c301\") " pod="openstack/cinder-api-0" Oct 01 15:20:31 crc kubenswrapper[4869]: I1001 15:20:31.952809 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-858794c669-gjc6c" Oct 01 15:20:32 crc kubenswrapper[4869]: I1001 15:20:32.081441 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Oct 01 15:20:32 crc kubenswrapper[4869]: I1001 15:20:32.334527 4869 generic.go:334] "Generic (PLEG): container finished" podID="cc0dd8d7-8230-4c07-87b1-6f4fc5e2e631" containerID="99d0e76b22b29e4947bfb92dfa7c9861448ea38250b4b8538bda3b9c670ddc17" exitCode=2 Oct 01 15:20:32 crc kubenswrapper[4869]: I1001 15:20:32.334740 4869 generic.go:334] "Generic (PLEG): container finished" podID="cc0dd8d7-8230-4c07-87b1-6f4fc5e2e631" containerID="e908a2914b310dca9c1c19937855f20362a2902fd309d74542f25cd91fef47d9" exitCode=0 Oct 01 15:20:32 crc kubenswrapper[4869]: I1001 15:20:32.334778 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"cc0dd8d7-8230-4c07-87b1-6f4fc5e2e631","Type":"ContainerDied","Data":"99d0e76b22b29e4947bfb92dfa7c9861448ea38250b4b8538bda3b9c670ddc17"} Oct 01 15:20:32 crc kubenswrapper[4869]: I1001 15:20:32.334801 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"cc0dd8d7-8230-4c07-87b1-6f4fc5e2e631","Type":"ContainerDied","Data":"e908a2914b310dca9c1c19937855f20362a2902fd309d74542f25cd91fef47d9"} Oct 01 15:20:32 crc kubenswrapper[4869]: I1001 15:20:32.353632 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-64dff96bf7-6fjfz" Oct 01 15:20:32 crc kubenswrapper[4869]: I1001 15:20:32.382783 4869 generic.go:334] "Generic (PLEG): container finished" podID="11b54042-d5f6-43e4-92c7-560bc7f55f33" containerID="b08d81afcf91560c9ccf6b61bf61d95bdc867e3dea0228e68a281990f5c2ed83" exitCode=0 Oct 01 15:20:32 crc kubenswrapper[4869]: I1001 15:20:32.382835 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-64dff96bf7-6fjfz" event={"ID":"11b54042-d5f6-43e4-92c7-560bc7f55f33","Type":"ContainerDied","Data":"b08d81afcf91560c9ccf6b61bf61d95bdc867e3dea0228e68a281990f5c2ed83"} Oct 01 15:20:32 crc kubenswrapper[4869]: I1001 15:20:32.382870 4869 scope.go:117] "RemoveContainer" containerID="b08d81afcf91560c9ccf6b61bf61d95bdc867e3dea0228e68a281990f5c2ed83" Oct 01 15:20:32 crc kubenswrapper[4869]: I1001 15:20:32.429011 4869 scope.go:117] "RemoveContainer" containerID="b052c9b9c466e296ca9425673c7545b5dc3b0ff495308382b40e23a030ff5492" Oct 01 15:20:32 crc kubenswrapper[4869]: I1001 15:20:32.441832 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/11b54042-d5f6-43e4-92c7-560bc7f55f33-dns-svc\") pod \"11b54042-d5f6-43e4-92c7-560bc7f55f33\" (UID: \"11b54042-d5f6-43e4-92c7-560bc7f55f33\") " Oct 01 15:20:32 crc kubenswrapper[4869]: I1001 15:20:32.441922 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/11b54042-d5f6-43e4-92c7-560bc7f55f33-ovsdbserver-sb\") pod \"11b54042-d5f6-43e4-92c7-560bc7f55f33\" (UID: \"11b54042-d5f6-43e4-92c7-560bc7f55f33\") " Oct 01 15:20:32 crc kubenswrapper[4869]: I1001 15:20:32.441991 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/11b54042-d5f6-43e4-92c7-560bc7f55f33-ovsdbserver-nb\") pod \"11b54042-d5f6-43e4-92c7-560bc7f55f33\" (UID: \"11b54042-d5f6-43e4-92c7-560bc7f55f33\") " Oct 01 15:20:32 crc kubenswrapper[4869]: I1001 15:20:32.442014 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dm8k6\" (UniqueName: \"kubernetes.io/projected/11b54042-d5f6-43e4-92c7-560bc7f55f33-kube-api-access-dm8k6\") pod \"11b54042-d5f6-43e4-92c7-560bc7f55f33\" (UID: \"11b54042-d5f6-43e4-92c7-560bc7f55f33\") " Oct 01 15:20:32 crc kubenswrapper[4869]: I1001 15:20:32.442060 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/11b54042-d5f6-43e4-92c7-560bc7f55f33-config\") pod \"11b54042-d5f6-43e4-92c7-560bc7f55f33\" (UID: \"11b54042-d5f6-43e4-92c7-560bc7f55f33\") " Oct 01 15:20:32 crc kubenswrapper[4869]: I1001 15:20:32.466798 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/11b54042-d5f6-43e4-92c7-560bc7f55f33-kube-api-access-dm8k6" (OuterVolumeSpecName: "kube-api-access-dm8k6") pod "11b54042-d5f6-43e4-92c7-560bc7f55f33" (UID: "11b54042-d5f6-43e4-92c7-560bc7f55f33"). InnerVolumeSpecName "kube-api-access-dm8k6". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 15:20:32 crc kubenswrapper[4869]: I1001 15:20:32.508863 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/11b54042-d5f6-43e4-92c7-560bc7f55f33-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "11b54042-d5f6-43e4-92c7-560bc7f55f33" (UID: "11b54042-d5f6-43e4-92c7-560bc7f55f33"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 15:20:32 crc kubenswrapper[4869]: I1001 15:20:32.527917 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/11b54042-d5f6-43e4-92c7-560bc7f55f33-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "11b54042-d5f6-43e4-92c7-560bc7f55f33" (UID: "11b54042-d5f6-43e4-92c7-560bc7f55f33"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 15:20:32 crc kubenswrapper[4869]: I1001 15:20:32.543995 4869 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/11b54042-d5f6-43e4-92c7-560bc7f55f33-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Oct 01 15:20:32 crc kubenswrapper[4869]: I1001 15:20:32.544035 4869 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/11b54042-d5f6-43e4-92c7-560bc7f55f33-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Oct 01 15:20:32 crc kubenswrapper[4869]: I1001 15:20:32.544051 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dm8k6\" (UniqueName: \"kubernetes.io/projected/11b54042-d5f6-43e4-92c7-560bc7f55f33-kube-api-access-dm8k6\") on node \"crc\" DevicePath \"\"" Oct 01 15:20:32 crc kubenswrapper[4869]: I1001 15:20:32.575698 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/11b54042-d5f6-43e4-92c7-560bc7f55f33-config" (OuterVolumeSpecName: "config") pod "11b54042-d5f6-43e4-92c7-560bc7f55f33" (UID: "11b54042-d5f6-43e4-92c7-560bc7f55f33"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 15:20:32 crc kubenswrapper[4869]: I1001 15:20:32.627308 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/11b54042-d5f6-43e4-92c7-560bc7f55f33-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "11b54042-d5f6-43e4-92c7-560bc7f55f33" (UID: "11b54042-d5f6-43e4-92c7-560bc7f55f33"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 15:20:32 crc kubenswrapper[4869]: I1001 15:20:32.655470 4869 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/11b54042-d5f6-43e4-92c7-560bc7f55f33-dns-svc\") on node \"crc\" DevicePath \"\"" Oct 01 15:20:32 crc kubenswrapper[4869]: I1001 15:20:32.655495 4869 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/11b54042-d5f6-43e4-92c7-560bc7f55f33-config\") on node \"crc\" DevicePath \"\"" Oct 01 15:20:32 crc kubenswrapper[4869]: I1001 15:20:32.693960 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-858794c669-gjc6c"] Oct 01 15:20:32 crc kubenswrapper[4869]: I1001 15:20:32.866144 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-scheduler-0"] Oct 01 15:20:33 crc kubenswrapper[4869]: I1001 15:20:33.049939 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-api-0"] Oct 01 15:20:33 crc kubenswrapper[4869]: I1001 15:20:33.403050 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"8cf4ad0d-8c54-45ba-b36b-269f68d9e46d","Type":"ContainerStarted","Data":"bd01c0f1c35d9acc00d7b7dcbe47467a0bbda2bdbb35dcc1cf838092d9a62689"} Oct 01 15:20:33 crc kubenswrapper[4869]: I1001 15:20:33.407283 4869 generic.go:334] "Generic (PLEG): container finished" podID="31ce96c3-bdba-442d-b7f8-dee4694e0e35" containerID="fcb635a34ca72a5f99e79e298d63d2007579870aaa49899a1db6e8f062d5c00a" exitCode=0 Oct 01 15:20:33 crc kubenswrapper[4869]: I1001 15:20:33.407364 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-858794c669-gjc6c" event={"ID":"31ce96c3-bdba-442d-b7f8-dee4694e0e35","Type":"ContainerDied","Data":"fcb635a34ca72a5f99e79e298d63d2007579870aaa49899a1db6e8f062d5c00a"} Oct 01 15:20:33 crc kubenswrapper[4869]: I1001 15:20:33.407403 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-858794c669-gjc6c" event={"ID":"31ce96c3-bdba-442d-b7f8-dee4694e0e35","Type":"ContainerStarted","Data":"62eb0b0bd2dfebaa9832aac6456e3bbd8f2185e7f3a3d01e2f04cfa11765412b"} Oct 01 15:20:33 crc kubenswrapper[4869]: I1001 15:20:33.410127 4869 generic.go:334] "Generic (PLEG): container finished" podID="cc0dd8d7-8230-4c07-87b1-6f4fc5e2e631" containerID="2a4d444a3d5e23bf38c8e2f1225cef437847d97521691a2eccf4ae028317772c" exitCode=0 Oct 01 15:20:33 crc kubenswrapper[4869]: I1001 15:20:33.410190 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"cc0dd8d7-8230-4c07-87b1-6f4fc5e2e631","Type":"ContainerDied","Data":"2a4d444a3d5e23bf38c8e2f1225cef437847d97521691a2eccf4ae028317772c"} Oct 01 15:20:33 crc kubenswrapper[4869]: I1001 15:20:33.417343 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-64dff96bf7-6fjfz" event={"ID":"11b54042-d5f6-43e4-92c7-560bc7f55f33","Type":"ContainerDied","Data":"9a264b0d244a16c8b3d5bd5813baeaac86de5874b9c8df55b50d38dec371b441"} Oct 01 15:20:33 crc kubenswrapper[4869]: I1001 15:20:33.417473 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-64dff96bf7-6fjfz" Oct 01 15:20:33 crc kubenswrapper[4869]: I1001 15:20:33.446081 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"923e59ee-478b-4a7f-b7ab-4a4cff74c301","Type":"ContainerStarted","Data":"56950badc0c2eaea1bac9cb191eef2e865e65c1dbfdfbdcb4e44467a91e6f456"} Oct 01 15:20:33 crc kubenswrapper[4869]: I1001 15:20:33.519659 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-64dff96bf7-6fjfz"] Oct 01 15:20:33 crc kubenswrapper[4869]: I1001 15:20:33.530490 4869 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-64dff96bf7-6fjfz"] Oct 01 15:20:33 crc kubenswrapper[4869]: I1001 15:20:33.605951 4869 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="11b54042-d5f6-43e4-92c7-560bc7f55f33" path="/var/lib/kubelet/pods/11b54042-d5f6-43e4-92c7-560bc7f55f33/volumes" Oct 01 15:20:34 crc kubenswrapper[4869]: I1001 15:20:34.188075 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-api-0"] Oct 01 15:20:34 crc kubenswrapper[4869]: I1001 15:20:34.463305 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-858794c669-gjc6c" event={"ID":"31ce96c3-bdba-442d-b7f8-dee4694e0e35","Type":"ContainerStarted","Data":"5cb41dae4bf91d71c27d7756ccbfc867913ef107135985c1bbac39787d97af52"} Oct 01 15:20:34 crc kubenswrapper[4869]: I1001 15:20:34.463561 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-858794c669-gjc6c" Oct 01 15:20:34 crc kubenswrapper[4869]: I1001 15:20:34.467807 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"923e59ee-478b-4a7f-b7ab-4a4cff74c301","Type":"ContainerStarted","Data":"cc3df20a16cf7d5240a794aac982b0ec54ef14b214d50f3c8cf34a8134dd8ddd"} Oct 01 15:20:34 crc kubenswrapper[4869]: I1001 15:20:34.493805 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-858794c669-gjc6c" podStartSLOduration=3.493772951 podStartE2EDuration="3.493772951s" podCreationTimestamp="2025-10-01 15:20:31 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-01 15:20:34.490433797 +0000 UTC m=+943.637276933" watchObservedRunningTime="2025-10-01 15:20:34.493772951 +0000 UTC m=+943.640616067" Oct 01 15:20:35 crc kubenswrapper[4869]: I1001 15:20:35.477971 4869 generic.go:334] "Generic (PLEG): container finished" podID="560e7cea-ec57-4f2c-b0d3-9ee5ded6f37a" containerID="346ca77027203d6eadf53d5f1a1d64a58f3c7f0e500aa8c8d4d64c71e76ee52b" exitCode=0 Oct 01 15:20:35 crc kubenswrapper[4869]: I1001 15:20:35.478069 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-sync-9fzlp" event={"ID":"560e7cea-ec57-4f2c-b0d3-9ee5ded6f37a","Type":"ContainerDied","Data":"346ca77027203d6eadf53d5f1a1d64a58f3c7f0e500aa8c8d4d64c71e76ee52b"} Oct 01 15:20:35 crc kubenswrapper[4869]: I1001 15:20:35.480589 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"923e59ee-478b-4a7f-b7ab-4a4cff74c301","Type":"ContainerStarted","Data":"111b1ff31b0ffb378cb2b8cdbaef649b1c24487798f3ed4fc4e808c0a6b06872"} Oct 01 15:20:35 crc kubenswrapper[4869]: I1001 15:20:35.480705 4869 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-api-0" podUID="923e59ee-478b-4a7f-b7ab-4a4cff74c301" containerName="cinder-api-log" containerID="cri-o://cc3df20a16cf7d5240a794aac982b0ec54ef14b214d50f3c8cf34a8134dd8ddd" gracePeriod=30 Oct 01 15:20:35 crc kubenswrapper[4869]: I1001 15:20:35.480762 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/cinder-api-0" Oct 01 15:20:35 crc kubenswrapper[4869]: I1001 15:20:35.480899 4869 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-api-0" podUID="923e59ee-478b-4a7f-b7ab-4a4cff74c301" containerName="cinder-api" containerID="cri-o://111b1ff31b0ffb378cb2b8cdbaef649b1c24487798f3ed4fc4e808c0a6b06872" gracePeriod=30 Oct 01 15:20:35 crc kubenswrapper[4869]: I1001 15:20:35.482566 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"8cf4ad0d-8c54-45ba-b36b-269f68d9e46d","Type":"ContainerStarted","Data":"206a05984fb8bc0e0aeed737c6e7f44d011d40611058a9b85c9c6d37a902460a"} Oct 01 15:20:35 crc kubenswrapper[4869]: I1001 15:20:35.482605 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"8cf4ad0d-8c54-45ba-b36b-269f68d9e46d","Type":"ContainerStarted","Data":"7b737a2cb3c8cb1f340e265500d60564ed0247c504d3a8a75c2aac3f5eac597f"} Oct 01 15:20:35 crc kubenswrapper[4869]: I1001 15:20:35.534914 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-scheduler-0" podStartSLOduration=3.681954684 podStartE2EDuration="4.534889287s" podCreationTimestamp="2025-10-01 15:20:31 +0000 UTC" firstStartedPulling="2025-10-01 15:20:32.920538367 +0000 UTC m=+942.067381483" lastFinishedPulling="2025-10-01 15:20:33.77347297 +0000 UTC m=+942.920316086" observedRunningTime="2025-10-01 15:20:35.52469888 +0000 UTC m=+944.671541996" watchObservedRunningTime="2025-10-01 15:20:35.534889287 +0000 UTC m=+944.681732403" Oct 01 15:20:35 crc kubenswrapper[4869]: I1001 15:20:35.550494 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-api-0" podStartSLOduration=4.5504688 podStartE2EDuration="4.5504688s" podCreationTimestamp="2025-10-01 15:20:31 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-01 15:20:35.540897439 +0000 UTC m=+944.687740565" watchObservedRunningTime="2025-10-01 15:20:35.5504688 +0000 UTC m=+944.697311916" Oct 01 15:20:35 crc kubenswrapper[4869]: I1001 15:20:35.798988 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/barbican-api-57c668579d-grs6h" Oct 01 15:20:35 crc kubenswrapper[4869]: I1001 15:20:35.969738 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/barbican-api-57c668579d-grs6h" Oct 01 15:20:36 crc kubenswrapper[4869]: I1001 15:20:36.040784 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-api-6647d584b4-vsdfk"] Oct 01 15:20:36 crc kubenswrapper[4869]: I1001 15:20:36.041004 4869 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/barbican-api-6647d584b4-vsdfk" podUID="a59f97ca-2d70-422f-ae93-81ec595b43aa" containerName="barbican-api-log" containerID="cri-o://fd763179891c7ac07764b590c98ced89ccaf481990b12d5588e073446dcf0a6b" gracePeriod=30 Oct 01 15:20:36 crc kubenswrapper[4869]: I1001 15:20:36.041420 4869 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/barbican-api-6647d584b4-vsdfk" podUID="a59f97ca-2d70-422f-ae93-81ec595b43aa" containerName="barbican-api" containerID="cri-o://443ba4fc8389996332bbb96dcf8220fe8bbf57edce9d0d1214f191d9aa34cf6e" gracePeriod=30 Oct 01 15:20:36 crc kubenswrapper[4869]: I1001 15:20:36.147081 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Oct 01 15:20:36 crc kubenswrapper[4869]: I1001 15:20:36.244604 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/923e59ee-478b-4a7f-b7ab-4a4cff74c301-config-data\") pod \"923e59ee-478b-4a7f-b7ab-4a4cff74c301\" (UID: \"923e59ee-478b-4a7f-b7ab-4a4cff74c301\") " Oct 01 15:20:36 crc kubenswrapper[4869]: I1001 15:20:36.244851 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/923e59ee-478b-4a7f-b7ab-4a4cff74c301-combined-ca-bundle\") pod \"923e59ee-478b-4a7f-b7ab-4a4cff74c301\" (UID: \"923e59ee-478b-4a7f-b7ab-4a4cff74c301\") " Oct 01 15:20:36 crc kubenswrapper[4869]: I1001 15:20:36.244871 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/923e59ee-478b-4a7f-b7ab-4a4cff74c301-logs\") pod \"923e59ee-478b-4a7f-b7ab-4a4cff74c301\" (UID: \"923e59ee-478b-4a7f-b7ab-4a4cff74c301\") " Oct 01 15:20:36 crc kubenswrapper[4869]: I1001 15:20:36.244892 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/923e59ee-478b-4a7f-b7ab-4a4cff74c301-scripts\") pod \"923e59ee-478b-4a7f-b7ab-4a4cff74c301\" (UID: \"923e59ee-478b-4a7f-b7ab-4a4cff74c301\") " Oct 01 15:20:36 crc kubenswrapper[4869]: I1001 15:20:36.244943 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/923e59ee-478b-4a7f-b7ab-4a4cff74c301-config-data-custom\") pod \"923e59ee-478b-4a7f-b7ab-4a4cff74c301\" (UID: \"923e59ee-478b-4a7f-b7ab-4a4cff74c301\") " Oct 01 15:20:36 crc kubenswrapper[4869]: I1001 15:20:36.244972 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/923e59ee-478b-4a7f-b7ab-4a4cff74c301-etc-machine-id\") pod \"923e59ee-478b-4a7f-b7ab-4a4cff74c301\" (UID: \"923e59ee-478b-4a7f-b7ab-4a4cff74c301\") " Oct 01 15:20:36 crc kubenswrapper[4869]: I1001 15:20:36.245104 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hf8cn\" (UniqueName: \"kubernetes.io/projected/923e59ee-478b-4a7f-b7ab-4a4cff74c301-kube-api-access-hf8cn\") pod \"923e59ee-478b-4a7f-b7ab-4a4cff74c301\" (UID: \"923e59ee-478b-4a7f-b7ab-4a4cff74c301\") " Oct 01 15:20:36 crc kubenswrapper[4869]: I1001 15:20:36.245261 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/923e59ee-478b-4a7f-b7ab-4a4cff74c301-logs" (OuterVolumeSpecName: "logs") pod "923e59ee-478b-4a7f-b7ab-4a4cff74c301" (UID: "923e59ee-478b-4a7f-b7ab-4a4cff74c301"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 15:20:36 crc kubenswrapper[4869]: I1001 15:20:36.245420 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/923e59ee-478b-4a7f-b7ab-4a4cff74c301-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "923e59ee-478b-4a7f-b7ab-4a4cff74c301" (UID: "923e59ee-478b-4a7f-b7ab-4a4cff74c301"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 01 15:20:36 crc kubenswrapper[4869]: I1001 15:20:36.245507 4869 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/923e59ee-478b-4a7f-b7ab-4a4cff74c301-etc-machine-id\") on node \"crc\" DevicePath \"\"" Oct 01 15:20:36 crc kubenswrapper[4869]: I1001 15:20:36.245523 4869 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/923e59ee-478b-4a7f-b7ab-4a4cff74c301-logs\") on node \"crc\" DevicePath \"\"" Oct 01 15:20:36 crc kubenswrapper[4869]: I1001 15:20:36.252352 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/923e59ee-478b-4a7f-b7ab-4a4cff74c301-kube-api-access-hf8cn" (OuterVolumeSpecName: "kube-api-access-hf8cn") pod "923e59ee-478b-4a7f-b7ab-4a4cff74c301" (UID: "923e59ee-478b-4a7f-b7ab-4a4cff74c301"). InnerVolumeSpecName "kube-api-access-hf8cn". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 15:20:36 crc kubenswrapper[4869]: I1001 15:20:36.253410 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/923e59ee-478b-4a7f-b7ab-4a4cff74c301-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "923e59ee-478b-4a7f-b7ab-4a4cff74c301" (UID: "923e59ee-478b-4a7f-b7ab-4a4cff74c301"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 15:20:36 crc kubenswrapper[4869]: I1001 15:20:36.254633 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/923e59ee-478b-4a7f-b7ab-4a4cff74c301-scripts" (OuterVolumeSpecName: "scripts") pod "923e59ee-478b-4a7f-b7ab-4a4cff74c301" (UID: "923e59ee-478b-4a7f-b7ab-4a4cff74c301"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 15:20:36 crc kubenswrapper[4869]: I1001 15:20:36.281786 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/923e59ee-478b-4a7f-b7ab-4a4cff74c301-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "923e59ee-478b-4a7f-b7ab-4a4cff74c301" (UID: "923e59ee-478b-4a7f-b7ab-4a4cff74c301"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 15:20:36 crc kubenswrapper[4869]: I1001 15:20:36.290628 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/923e59ee-478b-4a7f-b7ab-4a4cff74c301-config-data" (OuterVolumeSpecName: "config-data") pod "923e59ee-478b-4a7f-b7ab-4a4cff74c301" (UID: "923e59ee-478b-4a7f-b7ab-4a4cff74c301"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 15:20:36 crc kubenswrapper[4869]: I1001 15:20:36.347421 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hf8cn\" (UniqueName: \"kubernetes.io/projected/923e59ee-478b-4a7f-b7ab-4a4cff74c301-kube-api-access-hf8cn\") on node \"crc\" DevicePath \"\"" Oct 01 15:20:36 crc kubenswrapper[4869]: I1001 15:20:36.347481 4869 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/923e59ee-478b-4a7f-b7ab-4a4cff74c301-config-data\") on node \"crc\" DevicePath \"\"" Oct 01 15:20:36 crc kubenswrapper[4869]: I1001 15:20:36.347515 4869 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/923e59ee-478b-4a7f-b7ab-4a4cff74c301-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 01 15:20:36 crc kubenswrapper[4869]: I1001 15:20:36.347529 4869 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/923e59ee-478b-4a7f-b7ab-4a4cff74c301-scripts\") on node \"crc\" DevicePath \"\"" Oct 01 15:20:36 crc kubenswrapper[4869]: I1001 15:20:36.347542 4869 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/923e59ee-478b-4a7f-b7ab-4a4cff74c301-config-data-custom\") on node \"crc\" DevicePath \"\"" Oct 01 15:20:36 crc kubenswrapper[4869]: I1001 15:20:36.496427 4869 generic.go:334] "Generic (PLEG): container finished" podID="a59f97ca-2d70-422f-ae93-81ec595b43aa" containerID="fd763179891c7ac07764b590c98ced89ccaf481990b12d5588e073446dcf0a6b" exitCode=143 Oct 01 15:20:36 crc kubenswrapper[4869]: I1001 15:20:36.496506 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-6647d584b4-vsdfk" event={"ID":"a59f97ca-2d70-422f-ae93-81ec595b43aa","Type":"ContainerDied","Data":"fd763179891c7ac07764b590c98ced89ccaf481990b12d5588e073446dcf0a6b"} Oct 01 15:20:36 crc kubenswrapper[4869]: I1001 15:20:36.501450 4869 generic.go:334] "Generic (PLEG): container finished" podID="923e59ee-478b-4a7f-b7ab-4a4cff74c301" containerID="111b1ff31b0ffb378cb2b8cdbaef649b1c24487798f3ed4fc4e808c0a6b06872" exitCode=0 Oct 01 15:20:36 crc kubenswrapper[4869]: I1001 15:20:36.501477 4869 generic.go:334] "Generic (PLEG): container finished" podID="923e59ee-478b-4a7f-b7ab-4a4cff74c301" containerID="cc3df20a16cf7d5240a794aac982b0ec54ef14b214d50f3c8cf34a8134dd8ddd" exitCode=143 Oct 01 15:20:36 crc kubenswrapper[4869]: I1001 15:20:36.502998 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Oct 01 15:20:36 crc kubenswrapper[4869]: I1001 15:20:36.505584 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"923e59ee-478b-4a7f-b7ab-4a4cff74c301","Type":"ContainerDied","Data":"111b1ff31b0ffb378cb2b8cdbaef649b1c24487798f3ed4fc4e808c0a6b06872"} Oct 01 15:20:36 crc kubenswrapper[4869]: I1001 15:20:36.505661 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"923e59ee-478b-4a7f-b7ab-4a4cff74c301","Type":"ContainerDied","Data":"cc3df20a16cf7d5240a794aac982b0ec54ef14b214d50f3c8cf34a8134dd8ddd"} Oct 01 15:20:36 crc kubenswrapper[4869]: I1001 15:20:36.505677 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"923e59ee-478b-4a7f-b7ab-4a4cff74c301","Type":"ContainerDied","Data":"56950badc0c2eaea1bac9cb191eef2e865e65c1dbfdfbdcb4e44467a91e6f456"} Oct 01 15:20:36 crc kubenswrapper[4869]: I1001 15:20:36.505700 4869 scope.go:117] "RemoveContainer" containerID="111b1ff31b0ffb378cb2b8cdbaef649b1c24487798f3ed4fc4e808c0a6b06872" Oct 01 15:20:36 crc kubenswrapper[4869]: I1001 15:20:36.549995 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-api-0"] Oct 01 15:20:36 crc kubenswrapper[4869]: I1001 15:20:36.564683 4869 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-api-0"] Oct 01 15:20:36 crc kubenswrapper[4869]: I1001 15:20:36.568946 4869 scope.go:117] "RemoveContainer" containerID="cc3df20a16cf7d5240a794aac982b0ec54ef14b214d50f3c8cf34a8134dd8ddd" Oct 01 15:20:36 crc kubenswrapper[4869]: I1001 15:20:36.587404 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-api-0"] Oct 01 15:20:36 crc kubenswrapper[4869]: E1001 15:20:36.587774 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="11b54042-d5f6-43e4-92c7-560bc7f55f33" containerName="dnsmasq-dns" Oct 01 15:20:36 crc kubenswrapper[4869]: I1001 15:20:36.587791 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="11b54042-d5f6-43e4-92c7-560bc7f55f33" containerName="dnsmasq-dns" Oct 01 15:20:36 crc kubenswrapper[4869]: E1001 15:20:36.587798 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="923e59ee-478b-4a7f-b7ab-4a4cff74c301" containerName="cinder-api" Oct 01 15:20:36 crc kubenswrapper[4869]: I1001 15:20:36.587804 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="923e59ee-478b-4a7f-b7ab-4a4cff74c301" containerName="cinder-api" Oct 01 15:20:36 crc kubenswrapper[4869]: E1001 15:20:36.587840 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="923e59ee-478b-4a7f-b7ab-4a4cff74c301" containerName="cinder-api-log" Oct 01 15:20:36 crc kubenswrapper[4869]: I1001 15:20:36.587846 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="923e59ee-478b-4a7f-b7ab-4a4cff74c301" containerName="cinder-api-log" Oct 01 15:20:36 crc kubenswrapper[4869]: E1001 15:20:36.587856 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="11b54042-d5f6-43e4-92c7-560bc7f55f33" containerName="init" Oct 01 15:20:36 crc kubenswrapper[4869]: I1001 15:20:36.587863 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="11b54042-d5f6-43e4-92c7-560bc7f55f33" containerName="init" Oct 01 15:20:36 crc kubenswrapper[4869]: I1001 15:20:36.588079 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="11b54042-d5f6-43e4-92c7-560bc7f55f33" containerName="dnsmasq-dns" Oct 01 15:20:36 crc kubenswrapper[4869]: I1001 15:20:36.588095 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="923e59ee-478b-4a7f-b7ab-4a4cff74c301" containerName="cinder-api" Oct 01 15:20:36 crc kubenswrapper[4869]: I1001 15:20:36.588121 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="923e59ee-478b-4a7f-b7ab-4a4cff74c301" containerName="cinder-api-log" Oct 01 15:20:36 crc kubenswrapper[4869]: I1001 15:20:36.590860 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Oct 01 15:20:36 crc kubenswrapper[4869]: I1001 15:20:36.600538 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-api-config-data" Oct 01 15:20:36 crc kubenswrapper[4869]: I1001 15:20:36.600585 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-cinder-internal-svc" Oct 01 15:20:36 crc kubenswrapper[4869]: I1001 15:20:36.600730 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-cinder-public-svc" Oct 01 15:20:36 crc kubenswrapper[4869]: I1001 15:20:36.618327 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-api-0"] Oct 01 15:20:36 crc kubenswrapper[4869]: I1001 15:20:36.629558 4869 scope.go:117] "RemoveContainer" containerID="111b1ff31b0ffb378cb2b8cdbaef649b1c24487798f3ed4fc4e808c0a6b06872" Oct 01 15:20:36 crc kubenswrapper[4869]: E1001 15:20:36.630713 4869 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"111b1ff31b0ffb378cb2b8cdbaef649b1c24487798f3ed4fc4e808c0a6b06872\": container with ID starting with 111b1ff31b0ffb378cb2b8cdbaef649b1c24487798f3ed4fc4e808c0a6b06872 not found: ID does not exist" containerID="111b1ff31b0ffb378cb2b8cdbaef649b1c24487798f3ed4fc4e808c0a6b06872" Oct 01 15:20:36 crc kubenswrapper[4869]: I1001 15:20:36.630767 4869 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"111b1ff31b0ffb378cb2b8cdbaef649b1c24487798f3ed4fc4e808c0a6b06872"} err="failed to get container status \"111b1ff31b0ffb378cb2b8cdbaef649b1c24487798f3ed4fc4e808c0a6b06872\": rpc error: code = NotFound desc = could not find container \"111b1ff31b0ffb378cb2b8cdbaef649b1c24487798f3ed4fc4e808c0a6b06872\": container with ID starting with 111b1ff31b0ffb378cb2b8cdbaef649b1c24487798f3ed4fc4e808c0a6b06872 not found: ID does not exist" Oct 01 15:20:36 crc kubenswrapper[4869]: I1001 15:20:36.630799 4869 scope.go:117] "RemoveContainer" containerID="cc3df20a16cf7d5240a794aac982b0ec54ef14b214d50f3c8cf34a8134dd8ddd" Oct 01 15:20:36 crc kubenswrapper[4869]: E1001 15:20:36.632675 4869 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"cc3df20a16cf7d5240a794aac982b0ec54ef14b214d50f3c8cf34a8134dd8ddd\": container with ID starting with cc3df20a16cf7d5240a794aac982b0ec54ef14b214d50f3c8cf34a8134dd8ddd not found: ID does not exist" containerID="cc3df20a16cf7d5240a794aac982b0ec54ef14b214d50f3c8cf34a8134dd8ddd" Oct 01 15:20:36 crc kubenswrapper[4869]: I1001 15:20:36.632711 4869 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"cc3df20a16cf7d5240a794aac982b0ec54ef14b214d50f3c8cf34a8134dd8ddd"} err="failed to get container status \"cc3df20a16cf7d5240a794aac982b0ec54ef14b214d50f3c8cf34a8134dd8ddd\": rpc error: code = NotFound desc = could not find container \"cc3df20a16cf7d5240a794aac982b0ec54ef14b214d50f3c8cf34a8134dd8ddd\": container with ID starting with cc3df20a16cf7d5240a794aac982b0ec54ef14b214d50f3c8cf34a8134dd8ddd not found: ID does not exist" Oct 01 15:20:36 crc kubenswrapper[4869]: I1001 15:20:36.632728 4869 scope.go:117] "RemoveContainer" containerID="111b1ff31b0ffb378cb2b8cdbaef649b1c24487798f3ed4fc4e808c0a6b06872" Oct 01 15:20:36 crc kubenswrapper[4869]: I1001 15:20:36.633368 4869 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"111b1ff31b0ffb378cb2b8cdbaef649b1c24487798f3ed4fc4e808c0a6b06872"} err="failed to get container status \"111b1ff31b0ffb378cb2b8cdbaef649b1c24487798f3ed4fc4e808c0a6b06872\": rpc error: code = NotFound desc = could not find container \"111b1ff31b0ffb378cb2b8cdbaef649b1c24487798f3ed4fc4e808c0a6b06872\": container with ID starting with 111b1ff31b0ffb378cb2b8cdbaef649b1c24487798f3ed4fc4e808c0a6b06872 not found: ID does not exist" Oct 01 15:20:36 crc kubenswrapper[4869]: I1001 15:20:36.633404 4869 scope.go:117] "RemoveContainer" containerID="cc3df20a16cf7d5240a794aac982b0ec54ef14b214d50f3c8cf34a8134dd8ddd" Oct 01 15:20:36 crc kubenswrapper[4869]: I1001 15:20:36.633996 4869 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"cc3df20a16cf7d5240a794aac982b0ec54ef14b214d50f3c8cf34a8134dd8ddd"} err="failed to get container status \"cc3df20a16cf7d5240a794aac982b0ec54ef14b214d50f3c8cf34a8134dd8ddd\": rpc error: code = NotFound desc = could not find container \"cc3df20a16cf7d5240a794aac982b0ec54ef14b214d50f3c8cf34a8134dd8ddd\": container with ID starting with cc3df20a16cf7d5240a794aac982b0ec54ef14b214d50f3c8cf34a8134dd8ddd not found: ID does not exist" Oct 01 15:20:36 crc kubenswrapper[4869]: I1001 15:20:36.653750 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0af42303-cf8f-4774-82a6-cdc0818f976c-scripts\") pod \"cinder-api-0\" (UID: \"0af42303-cf8f-4774-82a6-cdc0818f976c\") " pod="openstack/cinder-api-0" Oct 01 15:20:36 crc kubenswrapper[4869]: I1001 15:20:36.653798 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/0af42303-cf8f-4774-82a6-cdc0818f976c-config-data-custom\") pod \"cinder-api-0\" (UID: \"0af42303-cf8f-4774-82a6-cdc0818f976c\") " pod="openstack/cinder-api-0" Oct 01 15:20:36 crc kubenswrapper[4869]: I1001 15:20:36.653902 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/0af42303-cf8f-4774-82a6-cdc0818f976c-public-tls-certs\") pod \"cinder-api-0\" (UID: \"0af42303-cf8f-4774-82a6-cdc0818f976c\") " pod="openstack/cinder-api-0" Oct 01 15:20:36 crc kubenswrapper[4869]: I1001 15:20:36.653988 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/0af42303-cf8f-4774-82a6-cdc0818f976c-etc-machine-id\") pod \"cinder-api-0\" (UID: \"0af42303-cf8f-4774-82a6-cdc0818f976c\") " pod="openstack/cinder-api-0" Oct 01 15:20:36 crc kubenswrapper[4869]: I1001 15:20:36.654468 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hsr99\" (UniqueName: \"kubernetes.io/projected/0af42303-cf8f-4774-82a6-cdc0818f976c-kube-api-access-hsr99\") pod \"cinder-api-0\" (UID: \"0af42303-cf8f-4774-82a6-cdc0818f976c\") " pod="openstack/cinder-api-0" Oct 01 15:20:36 crc kubenswrapper[4869]: I1001 15:20:36.654537 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/0af42303-cf8f-4774-82a6-cdc0818f976c-logs\") pod \"cinder-api-0\" (UID: \"0af42303-cf8f-4774-82a6-cdc0818f976c\") " pod="openstack/cinder-api-0" Oct 01 15:20:36 crc kubenswrapper[4869]: I1001 15:20:36.654578 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/0af42303-cf8f-4774-82a6-cdc0818f976c-internal-tls-certs\") pod \"cinder-api-0\" (UID: \"0af42303-cf8f-4774-82a6-cdc0818f976c\") " pod="openstack/cinder-api-0" Oct 01 15:20:36 crc kubenswrapper[4869]: I1001 15:20:36.654629 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0af42303-cf8f-4774-82a6-cdc0818f976c-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"0af42303-cf8f-4774-82a6-cdc0818f976c\") " pod="openstack/cinder-api-0" Oct 01 15:20:36 crc kubenswrapper[4869]: I1001 15:20:36.654705 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0af42303-cf8f-4774-82a6-cdc0818f976c-config-data\") pod \"cinder-api-0\" (UID: \"0af42303-cf8f-4774-82a6-cdc0818f976c\") " pod="openstack/cinder-api-0" Oct 01 15:20:36 crc kubenswrapper[4869]: I1001 15:20:36.757131 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0af42303-cf8f-4774-82a6-cdc0818f976c-config-data\") pod \"cinder-api-0\" (UID: \"0af42303-cf8f-4774-82a6-cdc0818f976c\") " pod="openstack/cinder-api-0" Oct 01 15:20:36 crc kubenswrapper[4869]: I1001 15:20:36.757519 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0af42303-cf8f-4774-82a6-cdc0818f976c-scripts\") pod \"cinder-api-0\" (UID: \"0af42303-cf8f-4774-82a6-cdc0818f976c\") " pod="openstack/cinder-api-0" Oct 01 15:20:36 crc kubenswrapper[4869]: I1001 15:20:36.758788 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/0af42303-cf8f-4774-82a6-cdc0818f976c-config-data-custom\") pod \"cinder-api-0\" (UID: \"0af42303-cf8f-4774-82a6-cdc0818f976c\") " pod="openstack/cinder-api-0" Oct 01 15:20:36 crc kubenswrapper[4869]: I1001 15:20:36.758861 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/0af42303-cf8f-4774-82a6-cdc0818f976c-public-tls-certs\") pod \"cinder-api-0\" (UID: \"0af42303-cf8f-4774-82a6-cdc0818f976c\") " pod="openstack/cinder-api-0" Oct 01 15:20:36 crc kubenswrapper[4869]: I1001 15:20:36.758904 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/0af42303-cf8f-4774-82a6-cdc0818f976c-etc-machine-id\") pod \"cinder-api-0\" (UID: \"0af42303-cf8f-4774-82a6-cdc0818f976c\") " pod="openstack/cinder-api-0" Oct 01 15:20:36 crc kubenswrapper[4869]: I1001 15:20:36.758968 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hsr99\" (UniqueName: \"kubernetes.io/projected/0af42303-cf8f-4774-82a6-cdc0818f976c-kube-api-access-hsr99\") pod \"cinder-api-0\" (UID: \"0af42303-cf8f-4774-82a6-cdc0818f976c\") " pod="openstack/cinder-api-0" Oct 01 15:20:36 crc kubenswrapper[4869]: I1001 15:20:36.759036 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/0af42303-cf8f-4774-82a6-cdc0818f976c-logs\") pod \"cinder-api-0\" (UID: \"0af42303-cf8f-4774-82a6-cdc0818f976c\") " pod="openstack/cinder-api-0" Oct 01 15:20:36 crc kubenswrapper[4869]: I1001 15:20:36.759059 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/0af42303-cf8f-4774-82a6-cdc0818f976c-internal-tls-certs\") pod \"cinder-api-0\" (UID: \"0af42303-cf8f-4774-82a6-cdc0818f976c\") " pod="openstack/cinder-api-0" Oct 01 15:20:36 crc kubenswrapper[4869]: I1001 15:20:36.759091 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0af42303-cf8f-4774-82a6-cdc0818f976c-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"0af42303-cf8f-4774-82a6-cdc0818f976c\") " pod="openstack/cinder-api-0" Oct 01 15:20:36 crc kubenswrapper[4869]: I1001 15:20:36.779615 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/0af42303-cf8f-4774-82a6-cdc0818f976c-public-tls-certs\") pod \"cinder-api-0\" (UID: \"0af42303-cf8f-4774-82a6-cdc0818f976c\") " pod="openstack/cinder-api-0" Oct 01 15:20:36 crc kubenswrapper[4869]: I1001 15:20:36.779952 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/0af42303-cf8f-4774-82a6-cdc0818f976c-etc-machine-id\") pod \"cinder-api-0\" (UID: \"0af42303-cf8f-4774-82a6-cdc0818f976c\") " pod="openstack/cinder-api-0" Oct 01 15:20:36 crc kubenswrapper[4869]: I1001 15:20:36.780450 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/0af42303-cf8f-4774-82a6-cdc0818f976c-logs\") pod \"cinder-api-0\" (UID: \"0af42303-cf8f-4774-82a6-cdc0818f976c\") " pod="openstack/cinder-api-0" Oct 01 15:20:36 crc kubenswrapper[4869]: I1001 15:20:36.781030 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0af42303-cf8f-4774-82a6-cdc0818f976c-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"0af42303-cf8f-4774-82a6-cdc0818f976c\") " pod="openstack/cinder-api-0" Oct 01 15:20:36 crc kubenswrapper[4869]: I1001 15:20:36.796074 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0af42303-cf8f-4774-82a6-cdc0818f976c-scripts\") pod \"cinder-api-0\" (UID: \"0af42303-cf8f-4774-82a6-cdc0818f976c\") " pod="openstack/cinder-api-0" Oct 01 15:20:36 crc kubenswrapper[4869]: I1001 15:20:36.796167 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/0af42303-cf8f-4774-82a6-cdc0818f976c-internal-tls-certs\") pod \"cinder-api-0\" (UID: \"0af42303-cf8f-4774-82a6-cdc0818f976c\") " pod="openstack/cinder-api-0" Oct 01 15:20:36 crc kubenswrapper[4869]: I1001 15:20:36.796408 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/0af42303-cf8f-4774-82a6-cdc0818f976c-config-data-custom\") pod \"cinder-api-0\" (UID: \"0af42303-cf8f-4774-82a6-cdc0818f976c\") " pod="openstack/cinder-api-0" Oct 01 15:20:36 crc kubenswrapper[4869]: I1001 15:20:36.801626 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0af42303-cf8f-4774-82a6-cdc0818f976c-config-data\") pod \"cinder-api-0\" (UID: \"0af42303-cf8f-4774-82a6-cdc0818f976c\") " pod="openstack/cinder-api-0" Oct 01 15:20:36 crc kubenswrapper[4869]: I1001 15:20:36.810377 4869 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/cinder-scheduler-0" Oct 01 15:20:36 crc kubenswrapper[4869]: I1001 15:20:36.813780 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hsr99\" (UniqueName: \"kubernetes.io/projected/0af42303-cf8f-4774-82a6-cdc0818f976c-kube-api-access-hsr99\") pod \"cinder-api-0\" (UID: \"0af42303-cf8f-4774-82a6-cdc0818f976c\") " pod="openstack/cinder-api-0" Oct 01 15:20:36 crc kubenswrapper[4869]: I1001 15:20:36.944445 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-sync-9fzlp" Oct 01 15:20:36 crc kubenswrapper[4869]: I1001 15:20:36.963385 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/560e7cea-ec57-4f2c-b0d3-9ee5ded6f37a-config\") pod \"560e7cea-ec57-4f2c-b0d3-9ee5ded6f37a\" (UID: \"560e7cea-ec57-4f2c-b0d3-9ee5ded6f37a\") " Oct 01 15:20:36 crc kubenswrapper[4869]: I1001 15:20:36.963516 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gv4l7\" (UniqueName: \"kubernetes.io/projected/560e7cea-ec57-4f2c-b0d3-9ee5ded6f37a-kube-api-access-gv4l7\") pod \"560e7cea-ec57-4f2c-b0d3-9ee5ded6f37a\" (UID: \"560e7cea-ec57-4f2c-b0d3-9ee5ded6f37a\") " Oct 01 15:20:36 crc kubenswrapper[4869]: I1001 15:20:36.963546 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/560e7cea-ec57-4f2c-b0d3-9ee5ded6f37a-combined-ca-bundle\") pod \"560e7cea-ec57-4f2c-b0d3-9ee5ded6f37a\" (UID: \"560e7cea-ec57-4f2c-b0d3-9ee5ded6f37a\") " Oct 01 15:20:36 crc kubenswrapper[4869]: I1001 15:20:36.968450 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/560e7cea-ec57-4f2c-b0d3-9ee5ded6f37a-kube-api-access-gv4l7" (OuterVolumeSpecName: "kube-api-access-gv4l7") pod "560e7cea-ec57-4f2c-b0d3-9ee5ded6f37a" (UID: "560e7cea-ec57-4f2c-b0d3-9ee5ded6f37a"). InnerVolumeSpecName "kube-api-access-gv4l7". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 15:20:36 crc kubenswrapper[4869]: I1001 15:20:36.984710 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Oct 01 15:20:37 crc kubenswrapper[4869]: I1001 15:20:37.022420 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/560e7cea-ec57-4f2c-b0d3-9ee5ded6f37a-config" (OuterVolumeSpecName: "config") pod "560e7cea-ec57-4f2c-b0d3-9ee5ded6f37a" (UID: "560e7cea-ec57-4f2c-b0d3-9ee5ded6f37a"). InnerVolumeSpecName "config". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 15:20:37 crc kubenswrapper[4869]: I1001 15:20:37.027433 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/560e7cea-ec57-4f2c-b0d3-9ee5ded6f37a-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "560e7cea-ec57-4f2c-b0d3-9ee5ded6f37a" (UID: "560e7cea-ec57-4f2c-b0d3-9ee5ded6f37a"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 15:20:37 crc kubenswrapper[4869]: I1001 15:20:37.050193 4869 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-64dff96bf7-6fjfz" podUID="11b54042-d5f6-43e4-92c7-560bc7f55f33" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.149:5353: i/o timeout" Oct 01 15:20:37 crc kubenswrapper[4869]: I1001 15:20:37.066099 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gv4l7\" (UniqueName: \"kubernetes.io/projected/560e7cea-ec57-4f2c-b0d3-9ee5ded6f37a-kube-api-access-gv4l7\") on node \"crc\" DevicePath \"\"" Oct 01 15:20:37 crc kubenswrapper[4869]: I1001 15:20:37.066125 4869 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/560e7cea-ec57-4f2c-b0d3-9ee5ded6f37a-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 01 15:20:37 crc kubenswrapper[4869]: I1001 15:20:37.066135 4869 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/secret/560e7cea-ec57-4f2c-b0d3-9ee5ded6f37a-config\") on node \"crc\" DevicePath \"\"" Oct 01 15:20:37 crc kubenswrapper[4869]: I1001 15:20:37.431306 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-api-0"] Oct 01 15:20:37 crc kubenswrapper[4869]: W1001 15:20:37.435375 4869 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod0af42303_cf8f_4774_82a6_cdc0818f976c.slice/crio-00a003be69947b8b47cf521b35ab5e4889977433c321275428e0314963064ce5 WatchSource:0}: Error finding container 00a003be69947b8b47cf521b35ab5e4889977433c321275428e0314963064ce5: Status 404 returned error can't find the container with id 00a003be69947b8b47cf521b35ab5e4889977433c321275428e0314963064ce5 Oct 01 15:20:37 crc kubenswrapper[4869]: I1001 15:20:37.516607 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-sync-9fzlp" event={"ID":"560e7cea-ec57-4f2c-b0d3-9ee5ded6f37a","Type":"ContainerDied","Data":"ae665cc1791dd6c7102f5680d16b3a30021f40d60c9f7e20585f04fddd336163"} Oct 01 15:20:37 crc kubenswrapper[4869]: I1001 15:20:37.516895 4869 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="ae665cc1791dd6c7102f5680d16b3a30021f40d60c9f7e20585f04fddd336163" Oct 01 15:20:37 crc kubenswrapper[4869]: I1001 15:20:37.516663 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-sync-9fzlp" Oct 01 15:20:37 crc kubenswrapper[4869]: I1001 15:20:37.524683 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"0af42303-cf8f-4774-82a6-cdc0818f976c","Type":"ContainerStarted","Data":"00a003be69947b8b47cf521b35ab5e4889977433c321275428e0314963064ce5"} Oct 01 15:20:37 crc kubenswrapper[4869]: I1001 15:20:37.627670 4869 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="923e59ee-478b-4a7f-b7ab-4a4cff74c301" path="/var/lib/kubelet/pods/923e59ee-478b-4a7f-b7ab-4a4cff74c301/volumes" Oct 01 15:20:37 crc kubenswrapper[4869]: I1001 15:20:37.662987 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-858794c669-gjc6c"] Oct 01 15:20:37 crc kubenswrapper[4869]: I1001 15:20:37.663337 4869 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-858794c669-gjc6c" podUID="31ce96c3-bdba-442d-b7f8-dee4694e0e35" containerName="dnsmasq-dns" containerID="cri-o://5cb41dae4bf91d71c27d7756ccbfc867913ef107135985c1bbac39787d97af52" gracePeriod=10 Oct 01 15:20:37 crc kubenswrapper[4869]: I1001 15:20:37.750480 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-f548b88b9-jqj9f"] Oct 01 15:20:37 crc kubenswrapper[4869]: E1001 15:20:37.751765 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="560e7cea-ec57-4f2c-b0d3-9ee5ded6f37a" containerName="neutron-db-sync" Oct 01 15:20:37 crc kubenswrapper[4869]: I1001 15:20:37.751842 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="560e7cea-ec57-4f2c-b0d3-9ee5ded6f37a" containerName="neutron-db-sync" Oct 01 15:20:37 crc kubenswrapper[4869]: I1001 15:20:37.752444 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="560e7cea-ec57-4f2c-b0d3-9ee5ded6f37a" containerName="neutron-db-sync" Oct 01 15:20:37 crc kubenswrapper[4869]: I1001 15:20:37.754867 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-f548b88b9-jqj9f" Oct 01 15:20:37 crc kubenswrapper[4869]: I1001 15:20:37.776032 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-f548b88b9-jqj9f"] Oct 01 15:20:37 crc kubenswrapper[4869]: I1001 15:20:37.788322 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/8fdb029f-29fa-47b7-9899-1fd9f14fb383-ovsdbserver-sb\") pod \"dnsmasq-dns-f548b88b9-jqj9f\" (UID: \"8fdb029f-29fa-47b7-9899-1fd9f14fb383\") " pod="openstack/dnsmasq-dns-f548b88b9-jqj9f" Oct 01 15:20:37 crc kubenswrapper[4869]: I1001 15:20:37.788391 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/8fdb029f-29fa-47b7-9899-1fd9f14fb383-dns-svc\") pod \"dnsmasq-dns-f548b88b9-jqj9f\" (UID: \"8fdb029f-29fa-47b7-9899-1fd9f14fb383\") " pod="openstack/dnsmasq-dns-f548b88b9-jqj9f" Oct 01 15:20:37 crc kubenswrapper[4869]: I1001 15:20:37.788418 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-x64zb\" (UniqueName: \"kubernetes.io/projected/8fdb029f-29fa-47b7-9899-1fd9f14fb383-kube-api-access-x64zb\") pod \"dnsmasq-dns-f548b88b9-jqj9f\" (UID: \"8fdb029f-29fa-47b7-9899-1fd9f14fb383\") " pod="openstack/dnsmasq-dns-f548b88b9-jqj9f" Oct 01 15:20:37 crc kubenswrapper[4869]: I1001 15:20:37.788496 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/8fdb029f-29fa-47b7-9899-1fd9f14fb383-ovsdbserver-nb\") pod \"dnsmasq-dns-f548b88b9-jqj9f\" (UID: \"8fdb029f-29fa-47b7-9899-1fd9f14fb383\") " pod="openstack/dnsmasq-dns-f548b88b9-jqj9f" Oct 01 15:20:37 crc kubenswrapper[4869]: I1001 15:20:37.788547 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8fdb029f-29fa-47b7-9899-1fd9f14fb383-config\") pod \"dnsmasq-dns-f548b88b9-jqj9f\" (UID: \"8fdb029f-29fa-47b7-9899-1fd9f14fb383\") " pod="openstack/dnsmasq-dns-f548b88b9-jqj9f" Oct 01 15:20:37 crc kubenswrapper[4869]: I1001 15:20:37.892297 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8fdb029f-29fa-47b7-9899-1fd9f14fb383-config\") pod \"dnsmasq-dns-f548b88b9-jqj9f\" (UID: \"8fdb029f-29fa-47b7-9899-1fd9f14fb383\") " pod="openstack/dnsmasq-dns-f548b88b9-jqj9f" Oct 01 15:20:37 crc kubenswrapper[4869]: I1001 15:20:37.892364 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/8fdb029f-29fa-47b7-9899-1fd9f14fb383-ovsdbserver-sb\") pod \"dnsmasq-dns-f548b88b9-jqj9f\" (UID: \"8fdb029f-29fa-47b7-9899-1fd9f14fb383\") " pod="openstack/dnsmasq-dns-f548b88b9-jqj9f" Oct 01 15:20:37 crc kubenswrapper[4869]: I1001 15:20:37.892403 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/8fdb029f-29fa-47b7-9899-1fd9f14fb383-dns-svc\") pod \"dnsmasq-dns-f548b88b9-jqj9f\" (UID: \"8fdb029f-29fa-47b7-9899-1fd9f14fb383\") " pod="openstack/dnsmasq-dns-f548b88b9-jqj9f" Oct 01 15:20:37 crc kubenswrapper[4869]: I1001 15:20:37.892421 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-x64zb\" (UniqueName: \"kubernetes.io/projected/8fdb029f-29fa-47b7-9899-1fd9f14fb383-kube-api-access-x64zb\") pod \"dnsmasq-dns-f548b88b9-jqj9f\" (UID: \"8fdb029f-29fa-47b7-9899-1fd9f14fb383\") " pod="openstack/dnsmasq-dns-f548b88b9-jqj9f" Oct 01 15:20:37 crc kubenswrapper[4869]: I1001 15:20:37.892493 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/8fdb029f-29fa-47b7-9899-1fd9f14fb383-ovsdbserver-nb\") pod \"dnsmasq-dns-f548b88b9-jqj9f\" (UID: \"8fdb029f-29fa-47b7-9899-1fd9f14fb383\") " pod="openstack/dnsmasq-dns-f548b88b9-jqj9f" Oct 01 15:20:37 crc kubenswrapper[4869]: I1001 15:20:37.893333 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/8fdb029f-29fa-47b7-9899-1fd9f14fb383-ovsdbserver-nb\") pod \"dnsmasq-dns-f548b88b9-jqj9f\" (UID: \"8fdb029f-29fa-47b7-9899-1fd9f14fb383\") " pod="openstack/dnsmasq-dns-f548b88b9-jqj9f" Oct 01 15:20:37 crc kubenswrapper[4869]: I1001 15:20:37.893782 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/8fdb029f-29fa-47b7-9899-1fd9f14fb383-ovsdbserver-sb\") pod \"dnsmasq-dns-f548b88b9-jqj9f\" (UID: \"8fdb029f-29fa-47b7-9899-1fd9f14fb383\") " pod="openstack/dnsmasq-dns-f548b88b9-jqj9f" Oct 01 15:20:37 crc kubenswrapper[4869]: I1001 15:20:37.893893 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/8fdb029f-29fa-47b7-9899-1fd9f14fb383-dns-svc\") pod \"dnsmasq-dns-f548b88b9-jqj9f\" (UID: \"8fdb029f-29fa-47b7-9899-1fd9f14fb383\") " pod="openstack/dnsmasq-dns-f548b88b9-jqj9f" Oct 01 15:20:37 crc kubenswrapper[4869]: I1001 15:20:37.895293 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8fdb029f-29fa-47b7-9899-1fd9f14fb383-config\") pod \"dnsmasq-dns-f548b88b9-jqj9f\" (UID: \"8fdb029f-29fa-47b7-9899-1fd9f14fb383\") " pod="openstack/dnsmasq-dns-f548b88b9-jqj9f" Oct 01 15:20:37 crc kubenswrapper[4869]: I1001 15:20:37.945473 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-6c8b49859b-vzzr7"] Oct 01 15:20:37 crc kubenswrapper[4869]: I1001 15:20:37.953919 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-6c8b49859b-vzzr7" Oct 01 15:20:37 crc kubenswrapper[4869]: I1001 15:20:37.957033 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-httpd-config" Oct 01 15:20:37 crc kubenswrapper[4869]: I1001 15:20:37.957404 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-neutron-dockercfg-dgxt5" Oct 01 15:20:37 crc kubenswrapper[4869]: I1001 15:20:37.961600 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-config" Oct 01 15:20:37 crc kubenswrapper[4869]: I1001 15:20:37.965548 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-x64zb\" (UniqueName: \"kubernetes.io/projected/8fdb029f-29fa-47b7-9899-1fd9f14fb383-kube-api-access-x64zb\") pod \"dnsmasq-dns-f548b88b9-jqj9f\" (UID: \"8fdb029f-29fa-47b7-9899-1fd9f14fb383\") " pod="openstack/dnsmasq-dns-f548b88b9-jqj9f" Oct 01 15:20:37 crc kubenswrapper[4869]: I1001 15:20:37.986446 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-neutron-ovndbs" Oct 01 15:20:37 crc kubenswrapper[4869]: I1001 15:20:37.999720 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-6c8b49859b-vzzr7"] Oct 01 15:20:38 crc kubenswrapper[4869]: I1001 15:20:38.101413 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/91b925da-fe68-4787-8a77-1f49f04cd917-combined-ca-bundle\") pod \"neutron-6c8b49859b-vzzr7\" (UID: \"91b925da-fe68-4787-8a77-1f49f04cd917\") " pod="openstack/neutron-6c8b49859b-vzzr7" Oct 01 15:20:38 crc kubenswrapper[4869]: I1001 15:20:38.101501 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/91b925da-fe68-4787-8a77-1f49f04cd917-config\") pod \"neutron-6c8b49859b-vzzr7\" (UID: \"91b925da-fe68-4787-8a77-1f49f04cd917\") " pod="openstack/neutron-6c8b49859b-vzzr7" Oct 01 15:20:38 crc kubenswrapper[4869]: I1001 15:20:38.101555 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/91b925da-fe68-4787-8a77-1f49f04cd917-ovndb-tls-certs\") pod \"neutron-6c8b49859b-vzzr7\" (UID: \"91b925da-fe68-4787-8a77-1f49f04cd917\") " pod="openstack/neutron-6c8b49859b-vzzr7" Oct 01 15:20:38 crc kubenswrapper[4869]: I1001 15:20:38.101770 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/91b925da-fe68-4787-8a77-1f49f04cd917-httpd-config\") pod \"neutron-6c8b49859b-vzzr7\" (UID: \"91b925da-fe68-4787-8a77-1f49f04cd917\") " pod="openstack/neutron-6c8b49859b-vzzr7" Oct 01 15:20:38 crc kubenswrapper[4869]: I1001 15:20:38.101803 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-k6xrh\" (UniqueName: \"kubernetes.io/projected/91b925da-fe68-4787-8a77-1f49f04cd917-kube-api-access-k6xrh\") pod \"neutron-6c8b49859b-vzzr7\" (UID: \"91b925da-fe68-4787-8a77-1f49f04cd917\") " pod="openstack/neutron-6c8b49859b-vzzr7" Oct 01 15:20:38 crc kubenswrapper[4869]: I1001 15:20:38.174258 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-f548b88b9-jqj9f" Oct 01 15:20:38 crc kubenswrapper[4869]: I1001 15:20:38.204838 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/91b925da-fe68-4787-8a77-1f49f04cd917-httpd-config\") pod \"neutron-6c8b49859b-vzzr7\" (UID: \"91b925da-fe68-4787-8a77-1f49f04cd917\") " pod="openstack/neutron-6c8b49859b-vzzr7" Oct 01 15:20:38 crc kubenswrapper[4869]: I1001 15:20:38.204898 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-k6xrh\" (UniqueName: \"kubernetes.io/projected/91b925da-fe68-4787-8a77-1f49f04cd917-kube-api-access-k6xrh\") pod \"neutron-6c8b49859b-vzzr7\" (UID: \"91b925da-fe68-4787-8a77-1f49f04cd917\") " pod="openstack/neutron-6c8b49859b-vzzr7" Oct 01 15:20:38 crc kubenswrapper[4869]: I1001 15:20:38.204975 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/91b925da-fe68-4787-8a77-1f49f04cd917-combined-ca-bundle\") pod \"neutron-6c8b49859b-vzzr7\" (UID: \"91b925da-fe68-4787-8a77-1f49f04cd917\") " pod="openstack/neutron-6c8b49859b-vzzr7" Oct 01 15:20:38 crc kubenswrapper[4869]: I1001 15:20:38.205019 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/91b925da-fe68-4787-8a77-1f49f04cd917-config\") pod \"neutron-6c8b49859b-vzzr7\" (UID: \"91b925da-fe68-4787-8a77-1f49f04cd917\") " pod="openstack/neutron-6c8b49859b-vzzr7" Oct 01 15:20:38 crc kubenswrapper[4869]: I1001 15:20:38.205045 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/91b925da-fe68-4787-8a77-1f49f04cd917-ovndb-tls-certs\") pod \"neutron-6c8b49859b-vzzr7\" (UID: \"91b925da-fe68-4787-8a77-1f49f04cd917\") " pod="openstack/neutron-6c8b49859b-vzzr7" Oct 01 15:20:38 crc kubenswrapper[4869]: I1001 15:20:38.210316 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/91b925da-fe68-4787-8a77-1f49f04cd917-ovndb-tls-certs\") pod \"neutron-6c8b49859b-vzzr7\" (UID: \"91b925da-fe68-4787-8a77-1f49f04cd917\") " pod="openstack/neutron-6c8b49859b-vzzr7" Oct 01 15:20:38 crc kubenswrapper[4869]: I1001 15:20:38.212239 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/91b925da-fe68-4787-8a77-1f49f04cd917-combined-ca-bundle\") pod \"neutron-6c8b49859b-vzzr7\" (UID: \"91b925da-fe68-4787-8a77-1f49f04cd917\") " pod="openstack/neutron-6c8b49859b-vzzr7" Oct 01 15:20:38 crc kubenswrapper[4869]: I1001 15:20:38.213132 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/secret/91b925da-fe68-4787-8a77-1f49f04cd917-config\") pod \"neutron-6c8b49859b-vzzr7\" (UID: \"91b925da-fe68-4787-8a77-1f49f04cd917\") " pod="openstack/neutron-6c8b49859b-vzzr7" Oct 01 15:20:38 crc kubenswrapper[4869]: I1001 15:20:38.224941 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/91b925da-fe68-4787-8a77-1f49f04cd917-httpd-config\") pod \"neutron-6c8b49859b-vzzr7\" (UID: \"91b925da-fe68-4787-8a77-1f49f04cd917\") " pod="openstack/neutron-6c8b49859b-vzzr7" Oct 01 15:20:38 crc kubenswrapper[4869]: I1001 15:20:38.225704 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-k6xrh\" (UniqueName: \"kubernetes.io/projected/91b925da-fe68-4787-8a77-1f49f04cd917-kube-api-access-k6xrh\") pod \"neutron-6c8b49859b-vzzr7\" (UID: \"91b925da-fe68-4787-8a77-1f49f04cd917\") " pod="openstack/neutron-6c8b49859b-vzzr7" Oct 01 15:20:38 crc kubenswrapper[4869]: I1001 15:20:38.317064 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-6c8b49859b-vzzr7" Oct 01 15:20:38 crc kubenswrapper[4869]: I1001 15:20:38.359042 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-858794c669-gjc6c" Oct 01 15:20:38 crc kubenswrapper[4869]: I1001 15:20:38.415062 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/31ce96c3-bdba-442d-b7f8-dee4694e0e35-ovsdbserver-sb\") pod \"31ce96c3-bdba-442d-b7f8-dee4694e0e35\" (UID: \"31ce96c3-bdba-442d-b7f8-dee4694e0e35\") " Oct 01 15:20:38 crc kubenswrapper[4869]: I1001 15:20:38.415312 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/31ce96c3-bdba-442d-b7f8-dee4694e0e35-ovsdbserver-nb\") pod \"31ce96c3-bdba-442d-b7f8-dee4694e0e35\" (UID: \"31ce96c3-bdba-442d-b7f8-dee4694e0e35\") " Oct 01 15:20:38 crc kubenswrapper[4869]: I1001 15:20:38.415344 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/31ce96c3-bdba-442d-b7f8-dee4694e0e35-dns-svc\") pod \"31ce96c3-bdba-442d-b7f8-dee4694e0e35\" (UID: \"31ce96c3-bdba-442d-b7f8-dee4694e0e35\") " Oct 01 15:20:38 crc kubenswrapper[4869]: I1001 15:20:38.415374 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dfxs9\" (UniqueName: \"kubernetes.io/projected/31ce96c3-bdba-442d-b7f8-dee4694e0e35-kube-api-access-dfxs9\") pod \"31ce96c3-bdba-442d-b7f8-dee4694e0e35\" (UID: \"31ce96c3-bdba-442d-b7f8-dee4694e0e35\") " Oct 01 15:20:38 crc kubenswrapper[4869]: I1001 15:20:38.415460 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/31ce96c3-bdba-442d-b7f8-dee4694e0e35-config\") pod \"31ce96c3-bdba-442d-b7f8-dee4694e0e35\" (UID: \"31ce96c3-bdba-442d-b7f8-dee4694e0e35\") " Oct 01 15:20:38 crc kubenswrapper[4869]: I1001 15:20:38.425016 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/31ce96c3-bdba-442d-b7f8-dee4694e0e35-kube-api-access-dfxs9" (OuterVolumeSpecName: "kube-api-access-dfxs9") pod "31ce96c3-bdba-442d-b7f8-dee4694e0e35" (UID: "31ce96c3-bdba-442d-b7f8-dee4694e0e35"). InnerVolumeSpecName "kube-api-access-dfxs9". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 15:20:38 crc kubenswrapper[4869]: I1001 15:20:38.477592 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/31ce96c3-bdba-442d-b7f8-dee4694e0e35-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "31ce96c3-bdba-442d-b7f8-dee4694e0e35" (UID: "31ce96c3-bdba-442d-b7f8-dee4694e0e35"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 15:20:38 crc kubenswrapper[4869]: I1001 15:20:38.477613 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/31ce96c3-bdba-442d-b7f8-dee4694e0e35-config" (OuterVolumeSpecName: "config") pod "31ce96c3-bdba-442d-b7f8-dee4694e0e35" (UID: "31ce96c3-bdba-442d-b7f8-dee4694e0e35"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 15:20:38 crc kubenswrapper[4869]: I1001 15:20:38.494191 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/31ce96c3-bdba-442d-b7f8-dee4694e0e35-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "31ce96c3-bdba-442d-b7f8-dee4694e0e35" (UID: "31ce96c3-bdba-442d-b7f8-dee4694e0e35"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 15:20:38 crc kubenswrapper[4869]: I1001 15:20:38.503827 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/31ce96c3-bdba-442d-b7f8-dee4694e0e35-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "31ce96c3-bdba-442d-b7f8-dee4694e0e35" (UID: "31ce96c3-bdba-442d-b7f8-dee4694e0e35"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 15:20:38 crc kubenswrapper[4869]: I1001 15:20:38.518007 4869 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/31ce96c3-bdba-442d-b7f8-dee4694e0e35-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Oct 01 15:20:38 crc kubenswrapper[4869]: I1001 15:20:38.518096 4869 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/31ce96c3-bdba-442d-b7f8-dee4694e0e35-dns-svc\") on node \"crc\" DevicePath \"\"" Oct 01 15:20:38 crc kubenswrapper[4869]: I1001 15:20:38.518106 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dfxs9\" (UniqueName: \"kubernetes.io/projected/31ce96c3-bdba-442d-b7f8-dee4694e0e35-kube-api-access-dfxs9\") on node \"crc\" DevicePath \"\"" Oct 01 15:20:38 crc kubenswrapper[4869]: I1001 15:20:38.518148 4869 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/31ce96c3-bdba-442d-b7f8-dee4694e0e35-config\") on node \"crc\" DevicePath \"\"" Oct 01 15:20:38 crc kubenswrapper[4869]: I1001 15:20:38.518158 4869 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/31ce96c3-bdba-442d-b7f8-dee4694e0e35-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Oct 01 15:20:38 crc kubenswrapper[4869]: I1001 15:20:38.549017 4869 generic.go:334] "Generic (PLEG): container finished" podID="31ce96c3-bdba-442d-b7f8-dee4694e0e35" containerID="5cb41dae4bf91d71c27d7756ccbfc867913ef107135985c1bbac39787d97af52" exitCode=0 Oct 01 15:20:38 crc kubenswrapper[4869]: I1001 15:20:38.549078 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-858794c669-gjc6c" event={"ID":"31ce96c3-bdba-442d-b7f8-dee4694e0e35","Type":"ContainerDied","Data":"5cb41dae4bf91d71c27d7756ccbfc867913ef107135985c1bbac39787d97af52"} Oct 01 15:20:38 crc kubenswrapper[4869]: I1001 15:20:38.549102 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-858794c669-gjc6c" event={"ID":"31ce96c3-bdba-442d-b7f8-dee4694e0e35","Type":"ContainerDied","Data":"62eb0b0bd2dfebaa9832aac6456e3bbd8f2185e7f3a3d01e2f04cfa11765412b"} Oct 01 15:20:38 crc kubenswrapper[4869]: I1001 15:20:38.549118 4869 scope.go:117] "RemoveContainer" containerID="5cb41dae4bf91d71c27d7756ccbfc867913ef107135985c1bbac39787d97af52" Oct 01 15:20:38 crc kubenswrapper[4869]: I1001 15:20:38.549235 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-858794c669-gjc6c" Oct 01 15:20:38 crc kubenswrapper[4869]: I1001 15:20:38.557019 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"0af42303-cf8f-4774-82a6-cdc0818f976c","Type":"ContainerStarted","Data":"fe539c12186c79470577dda00ec4054fbc36b99d823c2c37e3c65e1be2324afd"} Oct 01 15:20:38 crc kubenswrapper[4869]: I1001 15:20:38.580124 4869 scope.go:117] "RemoveContainer" containerID="fcb635a34ca72a5f99e79e298d63d2007579870aaa49899a1db6e8f062d5c00a" Oct 01 15:20:38 crc kubenswrapper[4869]: I1001 15:20:38.594564 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-858794c669-gjc6c"] Oct 01 15:20:38 crc kubenswrapper[4869]: I1001 15:20:38.610506 4869 scope.go:117] "RemoveContainer" containerID="5cb41dae4bf91d71c27d7756ccbfc867913ef107135985c1bbac39787d97af52" Oct 01 15:20:38 crc kubenswrapper[4869]: E1001 15:20:38.612384 4869 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5cb41dae4bf91d71c27d7756ccbfc867913ef107135985c1bbac39787d97af52\": container with ID starting with 5cb41dae4bf91d71c27d7756ccbfc867913ef107135985c1bbac39787d97af52 not found: ID does not exist" containerID="5cb41dae4bf91d71c27d7756ccbfc867913ef107135985c1bbac39787d97af52" Oct 01 15:20:38 crc kubenswrapper[4869]: I1001 15:20:38.612439 4869 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5cb41dae4bf91d71c27d7756ccbfc867913ef107135985c1bbac39787d97af52"} err="failed to get container status \"5cb41dae4bf91d71c27d7756ccbfc867913ef107135985c1bbac39787d97af52\": rpc error: code = NotFound desc = could not find container \"5cb41dae4bf91d71c27d7756ccbfc867913ef107135985c1bbac39787d97af52\": container with ID starting with 5cb41dae4bf91d71c27d7756ccbfc867913ef107135985c1bbac39787d97af52 not found: ID does not exist" Oct 01 15:20:38 crc kubenswrapper[4869]: I1001 15:20:38.612464 4869 scope.go:117] "RemoveContainer" containerID="fcb635a34ca72a5f99e79e298d63d2007579870aaa49899a1db6e8f062d5c00a" Oct 01 15:20:38 crc kubenswrapper[4869]: E1001 15:20:38.615448 4869 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"fcb635a34ca72a5f99e79e298d63d2007579870aaa49899a1db6e8f062d5c00a\": container with ID starting with fcb635a34ca72a5f99e79e298d63d2007579870aaa49899a1db6e8f062d5c00a not found: ID does not exist" containerID="fcb635a34ca72a5f99e79e298d63d2007579870aaa49899a1db6e8f062d5c00a" Oct 01 15:20:38 crc kubenswrapper[4869]: I1001 15:20:38.615477 4869 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"fcb635a34ca72a5f99e79e298d63d2007579870aaa49899a1db6e8f062d5c00a"} err="failed to get container status \"fcb635a34ca72a5f99e79e298d63d2007579870aaa49899a1db6e8f062d5c00a\": rpc error: code = NotFound desc = could not find container \"fcb635a34ca72a5f99e79e298d63d2007579870aaa49899a1db6e8f062d5c00a\": container with ID starting with fcb635a34ca72a5f99e79e298d63d2007579870aaa49899a1db6e8f062d5c00a not found: ID does not exist" Oct 01 15:20:38 crc kubenswrapper[4869]: I1001 15:20:38.643576 4869 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-858794c669-gjc6c"] Oct 01 15:20:38 crc kubenswrapper[4869]: I1001 15:20:38.704537 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-f548b88b9-jqj9f"] Oct 01 15:20:38 crc kubenswrapper[4869]: I1001 15:20:38.931438 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-6c8b49859b-vzzr7"] Oct 01 15:20:38 crc kubenswrapper[4869]: W1001 15:20:38.976906 4869 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod91b925da_fe68_4787_8a77_1f49f04cd917.slice/crio-3268cdb704aa8139554398ad54303af6a62c92b9bcd503a021deb1fc49fed0bb WatchSource:0}: Error finding container 3268cdb704aa8139554398ad54303af6a62c92b9bcd503a021deb1fc49fed0bb: Status 404 returned error can't find the container with id 3268cdb704aa8139554398ad54303af6a62c92b9bcd503a021deb1fc49fed0bb Oct 01 15:20:39 crc kubenswrapper[4869]: I1001 15:20:39.200590 4869 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/barbican-api-6647d584b4-vsdfk" podUID="a59f97ca-2d70-422f-ae93-81ec595b43aa" containerName="barbican-api" probeResult="failure" output="Get \"http://10.217.0.150:9311/healthcheck\": read tcp 10.217.0.2:41104->10.217.0.150:9311: read: connection reset by peer" Oct 01 15:20:39 crc kubenswrapper[4869]: I1001 15:20:39.200592 4869 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/barbican-api-6647d584b4-vsdfk" podUID="a59f97ca-2d70-422f-ae93-81ec595b43aa" containerName="barbican-api-log" probeResult="failure" output="Get \"http://10.217.0.150:9311/healthcheck\": read tcp 10.217.0.2:41116->10.217.0.150:9311: read: connection reset by peer" Oct 01 15:20:39 crc kubenswrapper[4869]: I1001 15:20:39.565777 4869 generic.go:334] "Generic (PLEG): container finished" podID="a59f97ca-2d70-422f-ae93-81ec595b43aa" containerID="443ba4fc8389996332bbb96dcf8220fe8bbf57edce9d0d1214f191d9aa34cf6e" exitCode=0 Oct 01 15:20:39 crc kubenswrapper[4869]: I1001 15:20:39.565835 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-6647d584b4-vsdfk" event={"ID":"a59f97ca-2d70-422f-ae93-81ec595b43aa","Type":"ContainerDied","Data":"443ba4fc8389996332bbb96dcf8220fe8bbf57edce9d0d1214f191d9aa34cf6e"} Oct 01 15:20:39 crc kubenswrapper[4869]: I1001 15:20:39.568681 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-6c8b49859b-vzzr7" event={"ID":"91b925da-fe68-4787-8a77-1f49f04cd917","Type":"ContainerStarted","Data":"ee76674fc7efa494d4f99f5af61cc48f47f33b2bb6125efa8b907f758d65fd24"} Oct 01 15:20:39 crc kubenswrapper[4869]: I1001 15:20:39.568714 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-6c8b49859b-vzzr7" event={"ID":"91b925da-fe68-4787-8a77-1f49f04cd917","Type":"ContainerStarted","Data":"54eeba3e2bdb10d9d0dd2c62f76c5e26affc83398074893c9ee7cf58bd30f5fe"} Oct 01 15:20:39 crc kubenswrapper[4869]: I1001 15:20:39.568726 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-6c8b49859b-vzzr7" event={"ID":"91b925da-fe68-4787-8a77-1f49f04cd917","Type":"ContainerStarted","Data":"3268cdb704aa8139554398ad54303af6a62c92b9bcd503a021deb1fc49fed0bb"} Oct 01 15:20:39 crc kubenswrapper[4869]: I1001 15:20:39.569781 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/neutron-6c8b49859b-vzzr7" Oct 01 15:20:39 crc kubenswrapper[4869]: I1001 15:20:39.572714 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"0af42303-cf8f-4774-82a6-cdc0818f976c","Type":"ContainerStarted","Data":"24176edcd78abe23f0895645950f5df29fa8c9ad7f771d9c5d3328edb8e9e6e3"} Oct 01 15:20:39 crc kubenswrapper[4869]: I1001 15:20:39.572833 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/cinder-api-0" Oct 01 15:20:39 crc kubenswrapper[4869]: I1001 15:20:39.582423 4869 generic.go:334] "Generic (PLEG): container finished" podID="8fdb029f-29fa-47b7-9899-1fd9f14fb383" containerID="14fb375b9e35c03786f11ad8329d185cc78f7f8ebb9896b2e665bf299a67ef22" exitCode=0 Oct 01 15:20:39 crc kubenswrapper[4869]: I1001 15:20:39.591498 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-6c8b49859b-vzzr7" podStartSLOduration=2.591480924 podStartE2EDuration="2.591480924s" podCreationTimestamp="2025-10-01 15:20:37 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-01 15:20:39.587250367 +0000 UTC m=+948.734093483" watchObservedRunningTime="2025-10-01 15:20:39.591480924 +0000 UTC m=+948.738324050" Oct 01 15:20:39 crc kubenswrapper[4869]: I1001 15:20:39.629311 4869 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="31ce96c3-bdba-442d-b7f8-dee4694e0e35" path="/var/lib/kubelet/pods/31ce96c3-bdba-442d-b7f8-dee4694e0e35/volumes" Oct 01 15:20:39 crc kubenswrapper[4869]: I1001 15:20:39.629988 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-f548b88b9-jqj9f" event={"ID":"8fdb029f-29fa-47b7-9899-1fd9f14fb383","Type":"ContainerDied","Data":"14fb375b9e35c03786f11ad8329d185cc78f7f8ebb9896b2e665bf299a67ef22"} Oct 01 15:20:39 crc kubenswrapper[4869]: I1001 15:20:39.630011 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-f548b88b9-jqj9f" event={"ID":"8fdb029f-29fa-47b7-9899-1fd9f14fb383","Type":"ContainerStarted","Data":"a4f6fd6d18b03449fd6ebce38c353704f7f87350c7bda61d312945cd69244f60"} Oct 01 15:20:39 crc kubenswrapper[4869]: I1001 15:20:39.634528 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-api-0" podStartSLOduration=3.634510911 podStartE2EDuration="3.634510911s" podCreationTimestamp="2025-10-01 15:20:36 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-01 15:20:39.626746995 +0000 UTC m=+948.773590111" watchObservedRunningTime="2025-10-01 15:20:39.634510911 +0000 UTC m=+948.781354027" Oct 01 15:20:39 crc kubenswrapper[4869]: I1001 15:20:39.759684 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-6647d584b4-vsdfk" Oct 01 15:20:39 crc kubenswrapper[4869]: I1001 15:20:39.943080 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a59f97ca-2d70-422f-ae93-81ec595b43aa-combined-ca-bundle\") pod \"a59f97ca-2d70-422f-ae93-81ec595b43aa\" (UID: \"a59f97ca-2d70-422f-ae93-81ec595b43aa\") " Oct 01 15:20:39 crc kubenswrapper[4869]: I1001 15:20:39.943177 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-46hj2\" (UniqueName: \"kubernetes.io/projected/a59f97ca-2d70-422f-ae93-81ec595b43aa-kube-api-access-46hj2\") pod \"a59f97ca-2d70-422f-ae93-81ec595b43aa\" (UID: \"a59f97ca-2d70-422f-ae93-81ec595b43aa\") " Oct 01 15:20:39 crc kubenswrapper[4869]: I1001 15:20:39.943339 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a59f97ca-2d70-422f-ae93-81ec595b43aa-logs\") pod \"a59f97ca-2d70-422f-ae93-81ec595b43aa\" (UID: \"a59f97ca-2d70-422f-ae93-81ec595b43aa\") " Oct 01 15:20:39 crc kubenswrapper[4869]: I1001 15:20:39.943406 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a59f97ca-2d70-422f-ae93-81ec595b43aa-config-data\") pod \"a59f97ca-2d70-422f-ae93-81ec595b43aa\" (UID: \"a59f97ca-2d70-422f-ae93-81ec595b43aa\") " Oct 01 15:20:39 crc kubenswrapper[4869]: I1001 15:20:39.943444 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/a59f97ca-2d70-422f-ae93-81ec595b43aa-config-data-custom\") pod \"a59f97ca-2d70-422f-ae93-81ec595b43aa\" (UID: \"a59f97ca-2d70-422f-ae93-81ec595b43aa\") " Oct 01 15:20:39 crc kubenswrapper[4869]: I1001 15:20:39.946226 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a59f97ca-2d70-422f-ae93-81ec595b43aa-logs" (OuterVolumeSpecName: "logs") pod "a59f97ca-2d70-422f-ae93-81ec595b43aa" (UID: "a59f97ca-2d70-422f-ae93-81ec595b43aa"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 15:20:39 crc kubenswrapper[4869]: I1001 15:20:39.964836 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a59f97ca-2d70-422f-ae93-81ec595b43aa-kube-api-access-46hj2" (OuterVolumeSpecName: "kube-api-access-46hj2") pod "a59f97ca-2d70-422f-ae93-81ec595b43aa" (UID: "a59f97ca-2d70-422f-ae93-81ec595b43aa"). InnerVolumeSpecName "kube-api-access-46hj2". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 15:20:39 crc kubenswrapper[4869]: I1001 15:20:39.964872 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a59f97ca-2d70-422f-ae93-81ec595b43aa-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "a59f97ca-2d70-422f-ae93-81ec595b43aa" (UID: "a59f97ca-2d70-422f-ae93-81ec595b43aa"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 15:20:39 crc kubenswrapper[4869]: I1001 15:20:39.981774 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a59f97ca-2d70-422f-ae93-81ec595b43aa-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "a59f97ca-2d70-422f-ae93-81ec595b43aa" (UID: "a59f97ca-2d70-422f-ae93-81ec595b43aa"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 15:20:40 crc kubenswrapper[4869]: I1001 15:20:40.038353 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a59f97ca-2d70-422f-ae93-81ec595b43aa-config-data" (OuterVolumeSpecName: "config-data") pod "a59f97ca-2d70-422f-ae93-81ec595b43aa" (UID: "a59f97ca-2d70-422f-ae93-81ec595b43aa"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 15:20:40 crc kubenswrapper[4869]: I1001 15:20:40.045461 4869 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a59f97ca-2d70-422f-ae93-81ec595b43aa-logs\") on node \"crc\" DevicePath \"\"" Oct 01 15:20:40 crc kubenswrapper[4869]: I1001 15:20:40.045496 4869 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a59f97ca-2d70-422f-ae93-81ec595b43aa-config-data\") on node \"crc\" DevicePath \"\"" Oct 01 15:20:40 crc kubenswrapper[4869]: I1001 15:20:40.045508 4869 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/a59f97ca-2d70-422f-ae93-81ec595b43aa-config-data-custom\") on node \"crc\" DevicePath \"\"" Oct 01 15:20:40 crc kubenswrapper[4869]: I1001 15:20:40.045519 4869 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a59f97ca-2d70-422f-ae93-81ec595b43aa-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 01 15:20:40 crc kubenswrapper[4869]: I1001 15:20:40.045528 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-46hj2\" (UniqueName: \"kubernetes.io/projected/a59f97ca-2d70-422f-ae93-81ec595b43aa-kube-api-access-46hj2\") on node \"crc\" DevicePath \"\"" Oct 01 15:20:40 crc kubenswrapper[4869]: I1001 15:20:40.228040 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-85845f7997-n9h7g" Oct 01 15:20:40 crc kubenswrapper[4869]: I1001 15:20:40.349012 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/b141c51a-44cd-4c2c-be11-6c8b5576a289-scripts\") pod \"b141c51a-44cd-4c2c-be11-6c8b5576a289\" (UID: \"b141c51a-44cd-4c2c-be11-6c8b5576a289\") " Oct 01 15:20:40 crc kubenswrapper[4869]: I1001 15:20:40.349102 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/b141c51a-44cd-4c2c-be11-6c8b5576a289-config-data\") pod \"b141c51a-44cd-4c2c-be11-6c8b5576a289\" (UID: \"b141c51a-44cd-4c2c-be11-6c8b5576a289\") " Oct 01 15:20:40 crc kubenswrapper[4869]: I1001 15:20:40.349229 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b141c51a-44cd-4c2c-be11-6c8b5576a289-logs\") pod \"b141c51a-44cd-4c2c-be11-6c8b5576a289\" (UID: \"b141c51a-44cd-4c2c-be11-6c8b5576a289\") " Oct 01 15:20:40 crc kubenswrapper[4869]: I1001 15:20:40.349321 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wnt2w\" (UniqueName: \"kubernetes.io/projected/b141c51a-44cd-4c2c-be11-6c8b5576a289-kube-api-access-wnt2w\") pod \"b141c51a-44cd-4c2c-be11-6c8b5576a289\" (UID: \"b141c51a-44cd-4c2c-be11-6c8b5576a289\") " Oct 01 15:20:40 crc kubenswrapper[4869]: I1001 15:20:40.349432 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/b141c51a-44cd-4c2c-be11-6c8b5576a289-horizon-secret-key\") pod \"b141c51a-44cd-4c2c-be11-6c8b5576a289\" (UID: \"b141c51a-44cd-4c2c-be11-6c8b5576a289\") " Oct 01 15:20:40 crc kubenswrapper[4869]: I1001 15:20:40.349802 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b141c51a-44cd-4c2c-be11-6c8b5576a289-logs" (OuterVolumeSpecName: "logs") pod "b141c51a-44cd-4c2c-be11-6c8b5576a289" (UID: "b141c51a-44cd-4c2c-be11-6c8b5576a289"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 15:20:40 crc kubenswrapper[4869]: I1001 15:20:40.352112 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b141c51a-44cd-4c2c-be11-6c8b5576a289-kube-api-access-wnt2w" (OuterVolumeSpecName: "kube-api-access-wnt2w") pod "b141c51a-44cd-4c2c-be11-6c8b5576a289" (UID: "b141c51a-44cd-4c2c-be11-6c8b5576a289"). InnerVolumeSpecName "kube-api-access-wnt2w". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 15:20:40 crc kubenswrapper[4869]: I1001 15:20:40.352678 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b141c51a-44cd-4c2c-be11-6c8b5576a289-horizon-secret-key" (OuterVolumeSpecName: "horizon-secret-key") pod "b141c51a-44cd-4c2c-be11-6c8b5576a289" (UID: "b141c51a-44cd-4c2c-be11-6c8b5576a289"). InnerVolumeSpecName "horizon-secret-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 15:20:40 crc kubenswrapper[4869]: I1001 15:20:40.369390 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b141c51a-44cd-4c2c-be11-6c8b5576a289-scripts" (OuterVolumeSpecName: "scripts") pod "b141c51a-44cd-4c2c-be11-6c8b5576a289" (UID: "b141c51a-44cd-4c2c-be11-6c8b5576a289"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 15:20:40 crc kubenswrapper[4869]: I1001 15:20:40.371857 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b141c51a-44cd-4c2c-be11-6c8b5576a289-config-data" (OuterVolumeSpecName: "config-data") pod "b141c51a-44cd-4c2c-be11-6c8b5576a289" (UID: "b141c51a-44cd-4c2c-be11-6c8b5576a289"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 15:20:40 crc kubenswrapper[4869]: I1001 15:20:40.451558 4869 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b141c51a-44cd-4c2c-be11-6c8b5576a289-logs\") on node \"crc\" DevicePath \"\"" Oct 01 15:20:40 crc kubenswrapper[4869]: I1001 15:20:40.451592 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wnt2w\" (UniqueName: \"kubernetes.io/projected/b141c51a-44cd-4c2c-be11-6c8b5576a289-kube-api-access-wnt2w\") on node \"crc\" DevicePath \"\"" Oct 01 15:20:40 crc kubenswrapper[4869]: I1001 15:20:40.451603 4869 reconciler_common.go:293] "Volume detached for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/b141c51a-44cd-4c2c-be11-6c8b5576a289-horizon-secret-key\") on node \"crc\" DevicePath \"\"" Oct 01 15:20:40 crc kubenswrapper[4869]: I1001 15:20:40.451615 4869 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/b141c51a-44cd-4c2c-be11-6c8b5576a289-scripts\") on node \"crc\" DevicePath \"\"" Oct 01 15:20:40 crc kubenswrapper[4869]: I1001 15:20:40.451625 4869 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/b141c51a-44cd-4c2c-be11-6c8b5576a289-config-data\") on node \"crc\" DevicePath \"\"" Oct 01 15:20:40 crc kubenswrapper[4869]: I1001 15:20:40.466599 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-96b4fb6d7-g25hj"] Oct 01 15:20:40 crc kubenswrapper[4869]: E1001 15:20:40.466966 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a59f97ca-2d70-422f-ae93-81ec595b43aa" containerName="barbican-api-log" Oct 01 15:20:40 crc kubenswrapper[4869]: I1001 15:20:40.466981 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="a59f97ca-2d70-422f-ae93-81ec595b43aa" containerName="barbican-api-log" Oct 01 15:20:40 crc kubenswrapper[4869]: E1001 15:20:40.466990 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b141c51a-44cd-4c2c-be11-6c8b5576a289" containerName="horizon" Oct 01 15:20:40 crc kubenswrapper[4869]: I1001 15:20:40.466996 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="b141c51a-44cd-4c2c-be11-6c8b5576a289" containerName="horizon" Oct 01 15:20:40 crc kubenswrapper[4869]: E1001 15:20:40.467008 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b141c51a-44cd-4c2c-be11-6c8b5576a289" containerName="horizon-log" Oct 01 15:20:40 crc kubenswrapper[4869]: I1001 15:20:40.467015 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="b141c51a-44cd-4c2c-be11-6c8b5576a289" containerName="horizon-log" Oct 01 15:20:40 crc kubenswrapper[4869]: E1001 15:20:40.467033 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a59f97ca-2d70-422f-ae93-81ec595b43aa" containerName="barbican-api" Oct 01 15:20:40 crc kubenswrapper[4869]: I1001 15:20:40.467038 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="a59f97ca-2d70-422f-ae93-81ec595b43aa" containerName="barbican-api" Oct 01 15:20:40 crc kubenswrapper[4869]: E1001 15:20:40.467049 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="31ce96c3-bdba-442d-b7f8-dee4694e0e35" containerName="dnsmasq-dns" Oct 01 15:20:40 crc kubenswrapper[4869]: I1001 15:20:40.467054 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="31ce96c3-bdba-442d-b7f8-dee4694e0e35" containerName="dnsmasq-dns" Oct 01 15:20:40 crc kubenswrapper[4869]: E1001 15:20:40.467063 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="31ce96c3-bdba-442d-b7f8-dee4694e0e35" containerName="init" Oct 01 15:20:40 crc kubenswrapper[4869]: I1001 15:20:40.467069 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="31ce96c3-bdba-442d-b7f8-dee4694e0e35" containerName="init" Oct 01 15:20:40 crc kubenswrapper[4869]: I1001 15:20:40.467223 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="31ce96c3-bdba-442d-b7f8-dee4694e0e35" containerName="dnsmasq-dns" Oct 01 15:20:40 crc kubenswrapper[4869]: I1001 15:20:40.467232 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="a59f97ca-2d70-422f-ae93-81ec595b43aa" containerName="barbican-api-log" Oct 01 15:20:40 crc kubenswrapper[4869]: I1001 15:20:40.467239 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="b141c51a-44cd-4c2c-be11-6c8b5576a289" containerName="horizon-log" Oct 01 15:20:40 crc kubenswrapper[4869]: I1001 15:20:40.467252 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="b141c51a-44cd-4c2c-be11-6c8b5576a289" containerName="horizon" Oct 01 15:20:40 crc kubenswrapper[4869]: I1001 15:20:40.467280 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="a59f97ca-2d70-422f-ae93-81ec595b43aa" containerName="barbican-api" Oct 01 15:20:40 crc kubenswrapper[4869]: I1001 15:20:40.468112 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-96b4fb6d7-g25hj" Oct 01 15:20:40 crc kubenswrapper[4869]: I1001 15:20:40.471983 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-neutron-public-svc" Oct 01 15:20:40 crc kubenswrapper[4869]: I1001 15:20:40.472826 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-neutron-internal-svc" Oct 01 15:20:40 crc kubenswrapper[4869]: I1001 15:20:40.490807 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-96b4fb6d7-g25hj"] Oct 01 15:20:40 crc kubenswrapper[4869]: I1001 15:20:40.606326 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-f548b88b9-jqj9f" event={"ID":"8fdb029f-29fa-47b7-9899-1fd9f14fb383","Type":"ContainerStarted","Data":"70e0830c58a3ddf1e7d4ee66671f11e4a25d780d6063de9893c2c07ebb04ffc0"} Oct 01 15:20:40 crc kubenswrapper[4869]: I1001 15:20:40.607463 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-f548b88b9-jqj9f" Oct 01 15:20:40 crc kubenswrapper[4869]: I1001 15:20:40.609803 4869 generic.go:334] "Generic (PLEG): container finished" podID="b141c51a-44cd-4c2c-be11-6c8b5576a289" containerID="03a3bc893c21f1cd1e12eb54fbe6056ca2c18b130dbae8c65059fb06dbe819b4" exitCode=137 Oct 01 15:20:40 crc kubenswrapper[4869]: I1001 15:20:40.609836 4869 generic.go:334] "Generic (PLEG): container finished" podID="b141c51a-44cd-4c2c-be11-6c8b5576a289" containerID="5337d199ab90cec38c8fb6bb0815ad515f9dc13f7c13dc9fc845f1f713e29768" exitCode=137 Oct 01 15:20:40 crc kubenswrapper[4869]: I1001 15:20:40.609850 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-85845f7997-n9h7g" event={"ID":"b141c51a-44cd-4c2c-be11-6c8b5576a289","Type":"ContainerDied","Data":"03a3bc893c21f1cd1e12eb54fbe6056ca2c18b130dbae8c65059fb06dbe819b4"} Oct 01 15:20:40 crc kubenswrapper[4869]: I1001 15:20:40.609883 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-85845f7997-n9h7g" event={"ID":"b141c51a-44cd-4c2c-be11-6c8b5576a289","Type":"ContainerDied","Data":"5337d199ab90cec38c8fb6bb0815ad515f9dc13f7c13dc9fc845f1f713e29768"} Oct 01 15:20:40 crc kubenswrapper[4869]: I1001 15:20:40.609893 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-85845f7997-n9h7g" event={"ID":"b141c51a-44cd-4c2c-be11-6c8b5576a289","Type":"ContainerDied","Data":"f0e61d29e31f231496b06bc3f60f48eec18267c36c477af60090db3acc47a4dc"} Oct 01 15:20:40 crc kubenswrapper[4869]: I1001 15:20:40.609838 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-85845f7997-n9h7g" Oct 01 15:20:40 crc kubenswrapper[4869]: I1001 15:20:40.609906 4869 scope.go:117] "RemoveContainer" containerID="03a3bc893c21f1cd1e12eb54fbe6056ca2c18b130dbae8c65059fb06dbe819b4" Oct 01 15:20:40 crc kubenswrapper[4869]: I1001 15:20:40.611938 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-6647d584b4-vsdfk" event={"ID":"a59f97ca-2d70-422f-ae93-81ec595b43aa","Type":"ContainerDied","Data":"6573c00e28b352e685cd57276693cd1f5aff349f12947c3cc00136fb400ba3ff"} Oct 01 15:20:40 crc kubenswrapper[4869]: I1001 15:20:40.612005 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-6647d584b4-vsdfk" Oct 01 15:20:40 crc kubenswrapper[4869]: I1001 15:20:40.647181 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-f548b88b9-jqj9f" podStartSLOduration=3.647157048 podStartE2EDuration="3.647157048s" podCreationTimestamp="2025-10-01 15:20:37 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-01 15:20:40.626789973 +0000 UTC m=+949.773633099" watchObservedRunningTime="2025-10-01 15:20:40.647157048 +0000 UTC m=+949.794000174" Oct 01 15:20:40 crc kubenswrapper[4869]: I1001 15:20:40.654359 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e2f39f35-09b7-4953-b846-2cba520d5325-combined-ca-bundle\") pod \"neutron-96b4fb6d7-g25hj\" (UID: \"e2f39f35-09b7-4953-b846-2cba520d5325\") " pod="openstack/neutron-96b4fb6d7-g25hj" Oct 01 15:20:40 crc kubenswrapper[4869]: I1001 15:20:40.654431 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/e2f39f35-09b7-4953-b846-2cba520d5325-httpd-config\") pod \"neutron-96b4fb6d7-g25hj\" (UID: \"e2f39f35-09b7-4953-b846-2cba520d5325\") " pod="openstack/neutron-96b4fb6d7-g25hj" Oct 01 15:20:40 crc kubenswrapper[4869]: I1001 15:20:40.654573 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/e2f39f35-09b7-4953-b846-2cba520d5325-ovndb-tls-certs\") pod \"neutron-96b4fb6d7-g25hj\" (UID: \"e2f39f35-09b7-4953-b846-2cba520d5325\") " pod="openstack/neutron-96b4fb6d7-g25hj" Oct 01 15:20:40 crc kubenswrapper[4869]: I1001 15:20:40.654842 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/e2f39f35-09b7-4953-b846-2cba520d5325-internal-tls-certs\") pod \"neutron-96b4fb6d7-g25hj\" (UID: \"e2f39f35-09b7-4953-b846-2cba520d5325\") " pod="openstack/neutron-96b4fb6d7-g25hj" Oct 01 15:20:40 crc kubenswrapper[4869]: I1001 15:20:40.654880 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/e2f39f35-09b7-4953-b846-2cba520d5325-config\") pod \"neutron-96b4fb6d7-g25hj\" (UID: \"e2f39f35-09b7-4953-b846-2cba520d5325\") " pod="openstack/neutron-96b4fb6d7-g25hj" Oct 01 15:20:40 crc kubenswrapper[4869]: I1001 15:20:40.654940 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/e2f39f35-09b7-4953-b846-2cba520d5325-public-tls-certs\") pod \"neutron-96b4fb6d7-g25hj\" (UID: \"e2f39f35-09b7-4953-b846-2cba520d5325\") " pod="openstack/neutron-96b4fb6d7-g25hj" Oct 01 15:20:40 crc kubenswrapper[4869]: I1001 15:20:40.655042 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wh2d2\" (UniqueName: \"kubernetes.io/projected/e2f39f35-09b7-4953-b846-2cba520d5325-kube-api-access-wh2d2\") pod \"neutron-96b4fb6d7-g25hj\" (UID: \"e2f39f35-09b7-4953-b846-2cba520d5325\") " pod="openstack/neutron-96b4fb6d7-g25hj" Oct 01 15:20:40 crc kubenswrapper[4869]: I1001 15:20:40.669855 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-85845f7997-n9h7g"] Oct 01 15:20:40 crc kubenswrapper[4869]: I1001 15:20:40.684058 4869 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/horizon-85845f7997-n9h7g"] Oct 01 15:20:40 crc kubenswrapper[4869]: I1001 15:20:40.696981 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-api-6647d584b4-vsdfk"] Oct 01 15:20:40 crc kubenswrapper[4869]: I1001 15:20:40.702316 4869 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-api-6647d584b4-vsdfk"] Oct 01 15:20:40 crc kubenswrapper[4869]: I1001 15:20:40.757284 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/e2f39f35-09b7-4953-b846-2cba520d5325-internal-tls-certs\") pod \"neutron-96b4fb6d7-g25hj\" (UID: \"e2f39f35-09b7-4953-b846-2cba520d5325\") " pod="openstack/neutron-96b4fb6d7-g25hj" Oct 01 15:20:40 crc kubenswrapper[4869]: I1001 15:20:40.757324 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/e2f39f35-09b7-4953-b846-2cba520d5325-config\") pod \"neutron-96b4fb6d7-g25hj\" (UID: \"e2f39f35-09b7-4953-b846-2cba520d5325\") " pod="openstack/neutron-96b4fb6d7-g25hj" Oct 01 15:20:40 crc kubenswrapper[4869]: I1001 15:20:40.757355 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/e2f39f35-09b7-4953-b846-2cba520d5325-public-tls-certs\") pod \"neutron-96b4fb6d7-g25hj\" (UID: \"e2f39f35-09b7-4953-b846-2cba520d5325\") " pod="openstack/neutron-96b4fb6d7-g25hj" Oct 01 15:20:40 crc kubenswrapper[4869]: I1001 15:20:40.758162 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wh2d2\" (UniqueName: \"kubernetes.io/projected/e2f39f35-09b7-4953-b846-2cba520d5325-kube-api-access-wh2d2\") pod \"neutron-96b4fb6d7-g25hj\" (UID: \"e2f39f35-09b7-4953-b846-2cba520d5325\") " pod="openstack/neutron-96b4fb6d7-g25hj" Oct 01 15:20:40 crc kubenswrapper[4869]: I1001 15:20:40.758225 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e2f39f35-09b7-4953-b846-2cba520d5325-combined-ca-bundle\") pod \"neutron-96b4fb6d7-g25hj\" (UID: \"e2f39f35-09b7-4953-b846-2cba520d5325\") " pod="openstack/neutron-96b4fb6d7-g25hj" Oct 01 15:20:40 crc kubenswrapper[4869]: I1001 15:20:40.758300 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/e2f39f35-09b7-4953-b846-2cba520d5325-httpd-config\") pod \"neutron-96b4fb6d7-g25hj\" (UID: \"e2f39f35-09b7-4953-b846-2cba520d5325\") " pod="openstack/neutron-96b4fb6d7-g25hj" Oct 01 15:20:40 crc kubenswrapper[4869]: I1001 15:20:40.758356 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/e2f39f35-09b7-4953-b846-2cba520d5325-ovndb-tls-certs\") pod \"neutron-96b4fb6d7-g25hj\" (UID: \"e2f39f35-09b7-4953-b846-2cba520d5325\") " pod="openstack/neutron-96b4fb6d7-g25hj" Oct 01 15:20:40 crc kubenswrapper[4869]: I1001 15:20:40.761067 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/e2f39f35-09b7-4953-b846-2cba520d5325-internal-tls-certs\") pod \"neutron-96b4fb6d7-g25hj\" (UID: \"e2f39f35-09b7-4953-b846-2cba520d5325\") " pod="openstack/neutron-96b4fb6d7-g25hj" Oct 01 15:20:40 crc kubenswrapper[4869]: I1001 15:20:40.761139 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/secret/e2f39f35-09b7-4953-b846-2cba520d5325-config\") pod \"neutron-96b4fb6d7-g25hj\" (UID: \"e2f39f35-09b7-4953-b846-2cba520d5325\") " pod="openstack/neutron-96b4fb6d7-g25hj" Oct 01 15:20:40 crc kubenswrapper[4869]: I1001 15:20:40.762452 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/e2f39f35-09b7-4953-b846-2cba520d5325-httpd-config\") pod \"neutron-96b4fb6d7-g25hj\" (UID: \"e2f39f35-09b7-4953-b846-2cba520d5325\") " pod="openstack/neutron-96b4fb6d7-g25hj" Oct 01 15:20:40 crc kubenswrapper[4869]: I1001 15:20:40.764422 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/e2f39f35-09b7-4953-b846-2cba520d5325-ovndb-tls-certs\") pod \"neutron-96b4fb6d7-g25hj\" (UID: \"e2f39f35-09b7-4953-b846-2cba520d5325\") " pod="openstack/neutron-96b4fb6d7-g25hj" Oct 01 15:20:40 crc kubenswrapper[4869]: I1001 15:20:40.774014 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e2f39f35-09b7-4953-b846-2cba520d5325-combined-ca-bundle\") pod \"neutron-96b4fb6d7-g25hj\" (UID: \"e2f39f35-09b7-4953-b846-2cba520d5325\") " pod="openstack/neutron-96b4fb6d7-g25hj" Oct 01 15:20:40 crc kubenswrapper[4869]: I1001 15:20:40.777297 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/e2f39f35-09b7-4953-b846-2cba520d5325-public-tls-certs\") pod \"neutron-96b4fb6d7-g25hj\" (UID: \"e2f39f35-09b7-4953-b846-2cba520d5325\") " pod="openstack/neutron-96b4fb6d7-g25hj" Oct 01 15:20:40 crc kubenswrapper[4869]: I1001 15:20:40.778732 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wh2d2\" (UniqueName: \"kubernetes.io/projected/e2f39f35-09b7-4953-b846-2cba520d5325-kube-api-access-wh2d2\") pod \"neutron-96b4fb6d7-g25hj\" (UID: \"e2f39f35-09b7-4953-b846-2cba520d5325\") " pod="openstack/neutron-96b4fb6d7-g25hj" Oct 01 15:20:40 crc kubenswrapper[4869]: I1001 15:20:40.800785 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-96b4fb6d7-g25hj" Oct 01 15:20:40 crc kubenswrapper[4869]: I1001 15:20:40.813576 4869 scope.go:117] "RemoveContainer" containerID="5337d199ab90cec38c8fb6bb0815ad515f9dc13f7c13dc9fc845f1f713e29768" Oct 01 15:20:40 crc kubenswrapper[4869]: I1001 15:20:40.910437 4869 scope.go:117] "RemoveContainer" containerID="03a3bc893c21f1cd1e12eb54fbe6056ca2c18b130dbae8c65059fb06dbe819b4" Oct 01 15:20:40 crc kubenswrapper[4869]: E1001 15:20:40.911387 4869 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"03a3bc893c21f1cd1e12eb54fbe6056ca2c18b130dbae8c65059fb06dbe819b4\": container with ID starting with 03a3bc893c21f1cd1e12eb54fbe6056ca2c18b130dbae8c65059fb06dbe819b4 not found: ID does not exist" containerID="03a3bc893c21f1cd1e12eb54fbe6056ca2c18b130dbae8c65059fb06dbe819b4" Oct 01 15:20:40 crc kubenswrapper[4869]: I1001 15:20:40.911438 4869 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"03a3bc893c21f1cd1e12eb54fbe6056ca2c18b130dbae8c65059fb06dbe819b4"} err="failed to get container status \"03a3bc893c21f1cd1e12eb54fbe6056ca2c18b130dbae8c65059fb06dbe819b4\": rpc error: code = NotFound desc = could not find container \"03a3bc893c21f1cd1e12eb54fbe6056ca2c18b130dbae8c65059fb06dbe819b4\": container with ID starting with 03a3bc893c21f1cd1e12eb54fbe6056ca2c18b130dbae8c65059fb06dbe819b4 not found: ID does not exist" Oct 01 15:20:40 crc kubenswrapper[4869]: I1001 15:20:40.911476 4869 scope.go:117] "RemoveContainer" containerID="5337d199ab90cec38c8fb6bb0815ad515f9dc13f7c13dc9fc845f1f713e29768" Oct 01 15:20:40 crc kubenswrapper[4869]: E1001 15:20:40.912576 4869 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5337d199ab90cec38c8fb6bb0815ad515f9dc13f7c13dc9fc845f1f713e29768\": container with ID starting with 5337d199ab90cec38c8fb6bb0815ad515f9dc13f7c13dc9fc845f1f713e29768 not found: ID does not exist" containerID="5337d199ab90cec38c8fb6bb0815ad515f9dc13f7c13dc9fc845f1f713e29768" Oct 01 15:20:40 crc kubenswrapper[4869]: I1001 15:20:40.912603 4869 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5337d199ab90cec38c8fb6bb0815ad515f9dc13f7c13dc9fc845f1f713e29768"} err="failed to get container status \"5337d199ab90cec38c8fb6bb0815ad515f9dc13f7c13dc9fc845f1f713e29768\": rpc error: code = NotFound desc = could not find container \"5337d199ab90cec38c8fb6bb0815ad515f9dc13f7c13dc9fc845f1f713e29768\": container with ID starting with 5337d199ab90cec38c8fb6bb0815ad515f9dc13f7c13dc9fc845f1f713e29768 not found: ID does not exist" Oct 01 15:20:40 crc kubenswrapper[4869]: I1001 15:20:40.912621 4869 scope.go:117] "RemoveContainer" containerID="03a3bc893c21f1cd1e12eb54fbe6056ca2c18b130dbae8c65059fb06dbe819b4" Oct 01 15:20:40 crc kubenswrapper[4869]: I1001 15:20:40.913716 4869 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"03a3bc893c21f1cd1e12eb54fbe6056ca2c18b130dbae8c65059fb06dbe819b4"} err="failed to get container status \"03a3bc893c21f1cd1e12eb54fbe6056ca2c18b130dbae8c65059fb06dbe819b4\": rpc error: code = NotFound desc = could not find container \"03a3bc893c21f1cd1e12eb54fbe6056ca2c18b130dbae8c65059fb06dbe819b4\": container with ID starting with 03a3bc893c21f1cd1e12eb54fbe6056ca2c18b130dbae8c65059fb06dbe819b4 not found: ID does not exist" Oct 01 15:20:40 crc kubenswrapper[4869]: I1001 15:20:40.913736 4869 scope.go:117] "RemoveContainer" containerID="5337d199ab90cec38c8fb6bb0815ad515f9dc13f7c13dc9fc845f1f713e29768" Oct 01 15:20:40 crc kubenswrapper[4869]: I1001 15:20:40.914014 4869 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5337d199ab90cec38c8fb6bb0815ad515f9dc13f7c13dc9fc845f1f713e29768"} err="failed to get container status \"5337d199ab90cec38c8fb6bb0815ad515f9dc13f7c13dc9fc845f1f713e29768\": rpc error: code = NotFound desc = could not find container \"5337d199ab90cec38c8fb6bb0815ad515f9dc13f7c13dc9fc845f1f713e29768\": container with ID starting with 5337d199ab90cec38c8fb6bb0815ad515f9dc13f7c13dc9fc845f1f713e29768 not found: ID does not exist" Oct 01 15:20:40 crc kubenswrapper[4869]: I1001 15:20:40.914033 4869 scope.go:117] "RemoveContainer" containerID="443ba4fc8389996332bbb96dcf8220fe8bbf57edce9d0d1214f191d9aa34cf6e" Oct 01 15:20:40 crc kubenswrapper[4869]: I1001 15:20:40.948770 4869 scope.go:117] "RemoveContainer" containerID="fd763179891c7ac07764b590c98ced89ccaf481990b12d5588e073446dcf0a6b" Oct 01 15:20:41 crc kubenswrapper[4869]: I1001 15:20:41.326969 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-96b4fb6d7-g25hj"] Oct 01 15:20:41 crc kubenswrapper[4869]: I1001 15:20:41.337393 4869 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/horizon-5766b74c9d-wpxpf" Oct 01 15:20:41 crc kubenswrapper[4869]: I1001 15:20:41.407837 4869 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/horizon-5f66f6967d-mnbqz" Oct 01 15:20:41 crc kubenswrapper[4869]: I1001 15:20:41.609488 4869 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a59f97ca-2d70-422f-ae93-81ec595b43aa" path="/var/lib/kubelet/pods/a59f97ca-2d70-422f-ae93-81ec595b43aa/volumes" Oct 01 15:20:41 crc kubenswrapper[4869]: I1001 15:20:41.610422 4869 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b141c51a-44cd-4c2c-be11-6c8b5576a289" path="/var/lib/kubelet/pods/b141c51a-44cd-4c2c-be11-6c8b5576a289/volumes" Oct 01 15:20:41 crc kubenswrapper[4869]: I1001 15:20:41.625768 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-96b4fb6d7-g25hj" event={"ID":"e2f39f35-09b7-4953-b846-2cba520d5325","Type":"ContainerStarted","Data":"9993e9113fd23ea64c82054f5e51c521494822a0beaa4580d807c0832baf13c7"} Oct 01 15:20:41 crc kubenswrapper[4869]: I1001 15:20:41.625808 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-96b4fb6d7-g25hj" event={"ID":"e2f39f35-09b7-4953-b846-2cba520d5325","Type":"ContainerStarted","Data":"0242ac682ad06ea4ba97c73eb8fbe171729e43eb6580a2bee943103a4369f770"} Oct 01 15:20:42 crc kubenswrapper[4869]: I1001 15:20:42.069977 4869 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/cinder-scheduler-0" Oct 01 15:20:42 crc kubenswrapper[4869]: I1001 15:20:42.146118 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-scheduler-0"] Oct 01 15:20:42 crc kubenswrapper[4869]: I1001 15:20:42.636072 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-96b4fb6d7-g25hj" event={"ID":"e2f39f35-09b7-4953-b846-2cba520d5325","Type":"ContainerStarted","Data":"ef725a2d61205922457b1daf1e12cdbfe14119af3011a067d9f87319eab37cda"} Oct 01 15:20:42 crc kubenswrapper[4869]: I1001 15:20:42.636214 4869 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-scheduler-0" podUID="8cf4ad0d-8c54-45ba-b36b-269f68d9e46d" containerName="cinder-scheduler" containerID="cri-o://7b737a2cb3c8cb1f340e265500d60564ed0247c504d3a8a75c2aac3f5eac597f" gracePeriod=30 Oct 01 15:20:42 crc kubenswrapper[4869]: I1001 15:20:42.636285 4869 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-scheduler-0" podUID="8cf4ad0d-8c54-45ba-b36b-269f68d9e46d" containerName="probe" containerID="cri-o://206a05984fb8bc0e0aeed737c6e7f44d011d40611058a9b85c9c6d37a902460a" gracePeriod=30 Oct 01 15:20:42 crc kubenswrapper[4869]: I1001 15:20:42.669464 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-96b4fb6d7-g25hj" podStartSLOduration=2.669137766 podStartE2EDuration="2.669137766s" podCreationTimestamp="2025-10-01 15:20:40 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-01 15:20:42.660699703 +0000 UTC m=+951.807542849" watchObservedRunningTime="2025-10-01 15:20:42.669137766 +0000 UTC m=+951.815980932" Oct 01 15:20:43 crc kubenswrapper[4869]: I1001 15:20:43.026818 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/horizon-5766b74c9d-wpxpf" Oct 01 15:20:43 crc kubenswrapper[4869]: I1001 15:20:43.199647 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/horizon-5f66f6967d-mnbqz" Oct 01 15:20:43 crc kubenswrapper[4869]: I1001 15:20:43.279749 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-5766b74c9d-wpxpf"] Oct 01 15:20:43 crc kubenswrapper[4869]: I1001 15:20:43.648314 4869 generic.go:334] "Generic (PLEG): container finished" podID="8cf4ad0d-8c54-45ba-b36b-269f68d9e46d" containerID="206a05984fb8bc0e0aeed737c6e7f44d011d40611058a9b85c9c6d37a902460a" exitCode=0 Oct 01 15:20:43 crc kubenswrapper[4869]: I1001 15:20:43.648402 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"8cf4ad0d-8c54-45ba-b36b-269f68d9e46d","Type":"ContainerDied","Data":"206a05984fb8bc0e0aeed737c6e7f44d011d40611058a9b85c9c6d37a902460a"} Oct 01 15:20:43 crc kubenswrapper[4869]: I1001 15:20:43.648696 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/neutron-96b4fb6d7-g25hj" Oct 01 15:20:43 crc kubenswrapper[4869]: I1001 15:20:43.648879 4869 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/horizon-5766b74c9d-wpxpf" podUID="011bdc54-d9ef-4ae9-a16b-5eaf4e97b2cf" containerName="horizon-log" containerID="cri-o://848fa104b788b0a643bef9f8bc271d7b678ee782e58d1385dc06d84691994654" gracePeriod=30 Oct 01 15:20:43 crc kubenswrapper[4869]: I1001 15:20:43.648892 4869 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/horizon-5766b74c9d-wpxpf" podUID="011bdc54-d9ef-4ae9-a16b-5eaf4e97b2cf" containerName="horizon" containerID="cri-o://26d24bc7aa47d955a5fcbe67784e963e6b5f386b86c229c48e9a56720466c88e" gracePeriod=30 Oct 01 15:20:46 crc kubenswrapper[4869]: I1001 15:20:46.484966 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/placement-8454446974-2h6ft" Oct 01 15:20:46 crc kubenswrapper[4869]: I1001 15:20:46.536361 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/placement-8454446974-2h6ft" Oct 01 15:20:46 crc kubenswrapper[4869]: I1001 15:20:46.708751 4869 generic.go:334] "Generic (PLEG): container finished" podID="8cf4ad0d-8c54-45ba-b36b-269f68d9e46d" containerID="7b737a2cb3c8cb1f340e265500d60564ed0247c504d3a8a75c2aac3f5eac597f" exitCode=0 Oct 01 15:20:46 crc kubenswrapper[4869]: I1001 15:20:46.708962 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"8cf4ad0d-8c54-45ba-b36b-269f68d9e46d","Type":"ContainerDied","Data":"7b737a2cb3c8cb1f340e265500d60564ed0247c504d3a8a75c2aac3f5eac597f"} Oct 01 15:20:47 crc kubenswrapper[4869]: I1001 15:20:47.046936 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Oct 01 15:20:47 crc kubenswrapper[4869]: I1001 15:20:47.081140 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/keystone-6b94b79f97-gtm9w" Oct 01 15:20:47 crc kubenswrapper[4869]: I1001 15:20:47.225538 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8cf4ad0d-8c54-45ba-b36b-269f68d9e46d-combined-ca-bundle\") pod \"8cf4ad0d-8c54-45ba-b36b-269f68d9e46d\" (UID: \"8cf4ad0d-8c54-45ba-b36b-269f68d9e46d\") " Oct 01 15:20:47 crc kubenswrapper[4869]: I1001 15:20:47.225853 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/8cf4ad0d-8c54-45ba-b36b-269f68d9e46d-config-data-custom\") pod \"8cf4ad0d-8c54-45ba-b36b-269f68d9e46d\" (UID: \"8cf4ad0d-8c54-45ba-b36b-269f68d9e46d\") " Oct 01 15:20:47 crc kubenswrapper[4869]: I1001 15:20:47.226009 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8cf4ad0d-8c54-45ba-b36b-269f68d9e46d-scripts\") pod \"8cf4ad0d-8c54-45ba-b36b-269f68d9e46d\" (UID: \"8cf4ad0d-8c54-45ba-b36b-269f68d9e46d\") " Oct 01 15:20:47 crc kubenswrapper[4869]: I1001 15:20:47.226041 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8cf4ad0d-8c54-45ba-b36b-269f68d9e46d-config-data\") pod \"8cf4ad0d-8c54-45ba-b36b-269f68d9e46d\" (UID: \"8cf4ad0d-8c54-45ba-b36b-269f68d9e46d\") " Oct 01 15:20:47 crc kubenswrapper[4869]: I1001 15:20:47.226058 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/8cf4ad0d-8c54-45ba-b36b-269f68d9e46d-etc-machine-id\") pod \"8cf4ad0d-8c54-45ba-b36b-269f68d9e46d\" (UID: \"8cf4ad0d-8c54-45ba-b36b-269f68d9e46d\") " Oct 01 15:20:47 crc kubenswrapper[4869]: I1001 15:20:47.226088 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rqv66\" (UniqueName: \"kubernetes.io/projected/8cf4ad0d-8c54-45ba-b36b-269f68d9e46d-kube-api-access-rqv66\") pod \"8cf4ad0d-8c54-45ba-b36b-269f68d9e46d\" (UID: \"8cf4ad0d-8c54-45ba-b36b-269f68d9e46d\") " Oct 01 15:20:47 crc kubenswrapper[4869]: I1001 15:20:47.226150 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/8cf4ad0d-8c54-45ba-b36b-269f68d9e46d-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "8cf4ad0d-8c54-45ba-b36b-269f68d9e46d" (UID: "8cf4ad0d-8c54-45ba-b36b-269f68d9e46d"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 01 15:20:47 crc kubenswrapper[4869]: I1001 15:20:47.227326 4869 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/8cf4ad0d-8c54-45ba-b36b-269f68d9e46d-etc-machine-id\") on node \"crc\" DevicePath \"\"" Oct 01 15:20:47 crc kubenswrapper[4869]: I1001 15:20:47.230722 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8cf4ad0d-8c54-45ba-b36b-269f68d9e46d-scripts" (OuterVolumeSpecName: "scripts") pod "8cf4ad0d-8c54-45ba-b36b-269f68d9e46d" (UID: "8cf4ad0d-8c54-45ba-b36b-269f68d9e46d"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 15:20:47 crc kubenswrapper[4869]: I1001 15:20:47.230784 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8cf4ad0d-8c54-45ba-b36b-269f68d9e46d-kube-api-access-rqv66" (OuterVolumeSpecName: "kube-api-access-rqv66") pod "8cf4ad0d-8c54-45ba-b36b-269f68d9e46d" (UID: "8cf4ad0d-8c54-45ba-b36b-269f68d9e46d"). InnerVolumeSpecName "kube-api-access-rqv66". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 15:20:47 crc kubenswrapper[4869]: I1001 15:20:47.232900 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8cf4ad0d-8c54-45ba-b36b-269f68d9e46d-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "8cf4ad0d-8c54-45ba-b36b-269f68d9e46d" (UID: "8cf4ad0d-8c54-45ba-b36b-269f68d9e46d"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 15:20:47 crc kubenswrapper[4869]: I1001 15:20:47.271708 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8cf4ad0d-8c54-45ba-b36b-269f68d9e46d-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "8cf4ad0d-8c54-45ba-b36b-269f68d9e46d" (UID: "8cf4ad0d-8c54-45ba-b36b-269f68d9e46d"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 15:20:47 crc kubenswrapper[4869]: I1001 15:20:47.339216 4869 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8cf4ad0d-8c54-45ba-b36b-269f68d9e46d-scripts\") on node \"crc\" DevicePath \"\"" Oct 01 15:20:47 crc kubenswrapper[4869]: I1001 15:20:47.339247 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rqv66\" (UniqueName: \"kubernetes.io/projected/8cf4ad0d-8c54-45ba-b36b-269f68d9e46d-kube-api-access-rqv66\") on node \"crc\" DevicePath \"\"" Oct 01 15:20:47 crc kubenswrapper[4869]: I1001 15:20:47.339274 4869 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8cf4ad0d-8c54-45ba-b36b-269f68d9e46d-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 01 15:20:47 crc kubenswrapper[4869]: I1001 15:20:47.339284 4869 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/8cf4ad0d-8c54-45ba-b36b-269f68d9e46d-config-data-custom\") on node \"crc\" DevicePath \"\"" Oct 01 15:20:47 crc kubenswrapper[4869]: I1001 15:20:47.348535 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8cf4ad0d-8c54-45ba-b36b-269f68d9e46d-config-data" (OuterVolumeSpecName: "config-data") pod "8cf4ad0d-8c54-45ba-b36b-269f68d9e46d" (UID: "8cf4ad0d-8c54-45ba-b36b-269f68d9e46d"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 15:20:47 crc kubenswrapper[4869]: I1001 15:20:47.442706 4869 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8cf4ad0d-8c54-45ba-b36b-269f68d9e46d-config-data\") on node \"crc\" DevicePath \"\"" Oct 01 15:20:47 crc kubenswrapper[4869]: I1001 15:20:47.719314 4869 generic.go:334] "Generic (PLEG): container finished" podID="011bdc54-d9ef-4ae9-a16b-5eaf4e97b2cf" containerID="26d24bc7aa47d955a5fcbe67784e963e6b5f386b86c229c48e9a56720466c88e" exitCode=0 Oct 01 15:20:47 crc kubenswrapper[4869]: I1001 15:20:47.719351 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-5766b74c9d-wpxpf" event={"ID":"011bdc54-d9ef-4ae9-a16b-5eaf4e97b2cf","Type":"ContainerDied","Data":"26d24bc7aa47d955a5fcbe67784e963e6b5f386b86c229c48e9a56720466c88e"} Oct 01 15:20:47 crc kubenswrapper[4869]: I1001 15:20:47.721331 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"8cf4ad0d-8c54-45ba-b36b-269f68d9e46d","Type":"ContainerDied","Data":"bd01c0f1c35d9acc00d7b7dcbe47467a0bbda2bdbb35dcc1cf838092d9a62689"} Oct 01 15:20:47 crc kubenswrapper[4869]: I1001 15:20:47.721363 4869 scope.go:117] "RemoveContainer" containerID="206a05984fb8bc0e0aeed737c6e7f44d011d40611058a9b85c9c6d37a902460a" Oct 01 15:20:47 crc kubenswrapper[4869]: I1001 15:20:47.721473 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Oct 01 15:20:47 crc kubenswrapper[4869]: I1001 15:20:47.745866 4869 scope.go:117] "RemoveContainer" containerID="7b737a2cb3c8cb1f340e265500d60564ed0247c504d3a8a75c2aac3f5eac597f" Oct 01 15:20:47 crc kubenswrapper[4869]: I1001 15:20:47.746928 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-scheduler-0"] Oct 01 15:20:47 crc kubenswrapper[4869]: I1001 15:20:47.763103 4869 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-scheduler-0"] Oct 01 15:20:47 crc kubenswrapper[4869]: I1001 15:20:47.770596 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-scheduler-0"] Oct 01 15:20:47 crc kubenswrapper[4869]: E1001 15:20:47.772802 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8cf4ad0d-8c54-45ba-b36b-269f68d9e46d" containerName="cinder-scheduler" Oct 01 15:20:47 crc kubenswrapper[4869]: I1001 15:20:47.772834 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="8cf4ad0d-8c54-45ba-b36b-269f68d9e46d" containerName="cinder-scheduler" Oct 01 15:20:47 crc kubenswrapper[4869]: E1001 15:20:47.772867 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8cf4ad0d-8c54-45ba-b36b-269f68d9e46d" containerName="probe" Oct 01 15:20:47 crc kubenswrapper[4869]: I1001 15:20:47.772876 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="8cf4ad0d-8c54-45ba-b36b-269f68d9e46d" containerName="probe" Oct 01 15:20:47 crc kubenswrapper[4869]: I1001 15:20:47.773119 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="8cf4ad0d-8c54-45ba-b36b-269f68d9e46d" containerName="probe" Oct 01 15:20:47 crc kubenswrapper[4869]: I1001 15:20:47.773146 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="8cf4ad0d-8c54-45ba-b36b-269f68d9e46d" containerName="cinder-scheduler" Oct 01 15:20:47 crc kubenswrapper[4869]: I1001 15:20:47.774308 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Oct 01 15:20:47 crc kubenswrapper[4869]: I1001 15:20:47.776906 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-scheduler-config-data" Oct 01 15:20:47 crc kubenswrapper[4869]: I1001 15:20:47.785883 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-scheduler-0"] Oct 01 15:20:47 crc kubenswrapper[4869]: I1001 15:20:47.848773 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/8e8826d4-6549-4216-bcae-afc6a135af5f-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"8e8826d4-6549-4216-bcae-afc6a135af5f\") " pod="openstack/cinder-scheduler-0" Oct 01 15:20:47 crc kubenswrapper[4869]: I1001 15:20:47.848851 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8e8826d4-6549-4216-bcae-afc6a135af5f-scripts\") pod \"cinder-scheduler-0\" (UID: \"8e8826d4-6549-4216-bcae-afc6a135af5f\") " pod="openstack/cinder-scheduler-0" Oct 01 15:20:47 crc kubenswrapper[4869]: I1001 15:20:47.849034 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8e8826d4-6549-4216-bcae-afc6a135af5f-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"8e8826d4-6549-4216-bcae-afc6a135af5f\") " pod="openstack/cinder-scheduler-0" Oct 01 15:20:47 crc kubenswrapper[4869]: I1001 15:20:47.849071 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/8e8826d4-6549-4216-bcae-afc6a135af5f-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"8e8826d4-6549-4216-bcae-afc6a135af5f\") " pod="openstack/cinder-scheduler-0" Oct 01 15:20:47 crc kubenswrapper[4869]: I1001 15:20:47.849162 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dkkfh\" (UniqueName: \"kubernetes.io/projected/8e8826d4-6549-4216-bcae-afc6a135af5f-kube-api-access-dkkfh\") pod \"cinder-scheduler-0\" (UID: \"8e8826d4-6549-4216-bcae-afc6a135af5f\") " pod="openstack/cinder-scheduler-0" Oct 01 15:20:47 crc kubenswrapper[4869]: I1001 15:20:47.849223 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8e8826d4-6549-4216-bcae-afc6a135af5f-config-data\") pod \"cinder-scheduler-0\" (UID: \"8e8826d4-6549-4216-bcae-afc6a135af5f\") " pod="openstack/cinder-scheduler-0" Oct 01 15:20:47 crc kubenswrapper[4869]: I1001 15:20:47.965927 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8e8826d4-6549-4216-bcae-afc6a135af5f-config-data\") pod \"cinder-scheduler-0\" (UID: \"8e8826d4-6549-4216-bcae-afc6a135af5f\") " pod="openstack/cinder-scheduler-0" Oct 01 15:20:47 crc kubenswrapper[4869]: I1001 15:20:47.966099 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/8e8826d4-6549-4216-bcae-afc6a135af5f-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"8e8826d4-6549-4216-bcae-afc6a135af5f\") " pod="openstack/cinder-scheduler-0" Oct 01 15:20:47 crc kubenswrapper[4869]: I1001 15:20:47.966152 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8e8826d4-6549-4216-bcae-afc6a135af5f-scripts\") pod \"cinder-scheduler-0\" (UID: \"8e8826d4-6549-4216-bcae-afc6a135af5f\") " pod="openstack/cinder-scheduler-0" Oct 01 15:20:47 crc kubenswrapper[4869]: I1001 15:20:47.966348 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8e8826d4-6549-4216-bcae-afc6a135af5f-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"8e8826d4-6549-4216-bcae-afc6a135af5f\") " pod="openstack/cinder-scheduler-0" Oct 01 15:20:47 crc kubenswrapper[4869]: I1001 15:20:47.966401 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/8e8826d4-6549-4216-bcae-afc6a135af5f-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"8e8826d4-6549-4216-bcae-afc6a135af5f\") " pod="openstack/cinder-scheduler-0" Oct 01 15:20:47 crc kubenswrapper[4869]: I1001 15:20:47.966588 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dkkfh\" (UniqueName: \"kubernetes.io/projected/8e8826d4-6549-4216-bcae-afc6a135af5f-kube-api-access-dkkfh\") pod \"cinder-scheduler-0\" (UID: \"8e8826d4-6549-4216-bcae-afc6a135af5f\") " pod="openstack/cinder-scheduler-0" Oct 01 15:20:47 crc kubenswrapper[4869]: I1001 15:20:47.967676 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/8e8826d4-6549-4216-bcae-afc6a135af5f-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"8e8826d4-6549-4216-bcae-afc6a135af5f\") " pod="openstack/cinder-scheduler-0" Oct 01 15:20:47 crc kubenswrapper[4869]: I1001 15:20:47.973099 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8e8826d4-6549-4216-bcae-afc6a135af5f-config-data\") pod \"cinder-scheduler-0\" (UID: \"8e8826d4-6549-4216-bcae-afc6a135af5f\") " pod="openstack/cinder-scheduler-0" Oct 01 15:20:47 crc kubenswrapper[4869]: I1001 15:20:47.973535 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/8e8826d4-6549-4216-bcae-afc6a135af5f-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"8e8826d4-6549-4216-bcae-afc6a135af5f\") " pod="openstack/cinder-scheduler-0" Oct 01 15:20:47 crc kubenswrapper[4869]: I1001 15:20:47.983798 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8e8826d4-6549-4216-bcae-afc6a135af5f-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"8e8826d4-6549-4216-bcae-afc6a135af5f\") " pod="openstack/cinder-scheduler-0" Oct 01 15:20:47 crc kubenswrapper[4869]: I1001 15:20:47.988628 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dkkfh\" (UniqueName: \"kubernetes.io/projected/8e8826d4-6549-4216-bcae-afc6a135af5f-kube-api-access-dkkfh\") pod \"cinder-scheduler-0\" (UID: \"8e8826d4-6549-4216-bcae-afc6a135af5f\") " pod="openstack/cinder-scheduler-0" Oct 01 15:20:47 crc kubenswrapper[4869]: I1001 15:20:47.993692 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8e8826d4-6549-4216-bcae-afc6a135af5f-scripts\") pod \"cinder-scheduler-0\" (UID: \"8e8826d4-6549-4216-bcae-afc6a135af5f\") " pod="openstack/cinder-scheduler-0" Oct 01 15:20:48 crc kubenswrapper[4869]: I1001 15:20:48.091899 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Oct 01 15:20:48 crc kubenswrapper[4869]: I1001 15:20:48.177480 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-f548b88b9-jqj9f" Oct 01 15:20:48 crc kubenswrapper[4869]: I1001 15:20:48.238097 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-d6669bb45-npxjb"] Oct 01 15:20:48 crc kubenswrapper[4869]: I1001 15:20:48.238337 4869 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-d6669bb45-npxjb" podUID="7b58b756-3a68-4820-a610-e9d23f2cc4bb" containerName="dnsmasq-dns" containerID="cri-o://4233b1734d1bb72ea33d30f635bd91befb5597fa01a2bd88f116bd250f8d1831" gracePeriod=10 Oct 01 15:20:48 crc kubenswrapper[4869]: I1001 15:20:48.350287 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/openstackclient"] Oct 01 15:20:48 crc kubenswrapper[4869]: I1001 15:20:48.352594 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Oct 01 15:20:48 crc kubenswrapper[4869]: I1001 15:20:48.357677 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-config" Oct 01 15:20:48 crc kubenswrapper[4869]: I1001 15:20:48.357849 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-config-secret" Oct 01 15:20:48 crc kubenswrapper[4869]: I1001 15:20:48.357952 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstackclient-openstackclient-dockercfg-b68ll" Oct 01 15:20:48 crc kubenswrapper[4869]: I1001 15:20:48.358486 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstackclient"] Oct 01 15:20:48 crc kubenswrapper[4869]: I1001 15:20:48.380140 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/a25ceadd-9d07-4575-83d0-44bd065cca59-openstack-config-secret\") pod \"openstackclient\" (UID: \"a25ceadd-9d07-4575-83d0-44bd065cca59\") " pod="openstack/openstackclient" Oct 01 15:20:48 crc kubenswrapper[4869]: I1001 15:20:48.380199 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a25ceadd-9d07-4575-83d0-44bd065cca59-combined-ca-bundle\") pod \"openstackclient\" (UID: \"a25ceadd-9d07-4575-83d0-44bd065cca59\") " pod="openstack/openstackclient" Oct 01 15:20:48 crc kubenswrapper[4869]: I1001 15:20:48.380228 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/a25ceadd-9d07-4575-83d0-44bd065cca59-openstack-config\") pod \"openstackclient\" (UID: \"a25ceadd-9d07-4575-83d0-44bd065cca59\") " pod="openstack/openstackclient" Oct 01 15:20:48 crc kubenswrapper[4869]: I1001 15:20:48.380300 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xxd9c\" (UniqueName: \"kubernetes.io/projected/a25ceadd-9d07-4575-83d0-44bd065cca59-kube-api-access-xxd9c\") pod \"openstackclient\" (UID: \"a25ceadd-9d07-4575-83d0-44bd065cca59\") " pod="openstack/openstackclient" Oct 01 15:20:48 crc kubenswrapper[4869]: I1001 15:20:48.481728 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a25ceadd-9d07-4575-83d0-44bd065cca59-combined-ca-bundle\") pod \"openstackclient\" (UID: \"a25ceadd-9d07-4575-83d0-44bd065cca59\") " pod="openstack/openstackclient" Oct 01 15:20:48 crc kubenswrapper[4869]: I1001 15:20:48.481781 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/a25ceadd-9d07-4575-83d0-44bd065cca59-openstack-config\") pod \"openstackclient\" (UID: \"a25ceadd-9d07-4575-83d0-44bd065cca59\") " pod="openstack/openstackclient" Oct 01 15:20:48 crc kubenswrapper[4869]: I1001 15:20:48.481844 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xxd9c\" (UniqueName: \"kubernetes.io/projected/a25ceadd-9d07-4575-83d0-44bd065cca59-kube-api-access-xxd9c\") pod \"openstackclient\" (UID: \"a25ceadd-9d07-4575-83d0-44bd065cca59\") " pod="openstack/openstackclient" Oct 01 15:20:48 crc kubenswrapper[4869]: I1001 15:20:48.481905 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/a25ceadd-9d07-4575-83d0-44bd065cca59-openstack-config-secret\") pod \"openstackclient\" (UID: \"a25ceadd-9d07-4575-83d0-44bd065cca59\") " pod="openstack/openstackclient" Oct 01 15:20:48 crc kubenswrapper[4869]: I1001 15:20:48.483370 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/a25ceadd-9d07-4575-83d0-44bd065cca59-openstack-config\") pod \"openstackclient\" (UID: \"a25ceadd-9d07-4575-83d0-44bd065cca59\") " pod="openstack/openstackclient" Oct 01 15:20:48 crc kubenswrapper[4869]: I1001 15:20:48.488829 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/a25ceadd-9d07-4575-83d0-44bd065cca59-openstack-config-secret\") pod \"openstackclient\" (UID: \"a25ceadd-9d07-4575-83d0-44bd065cca59\") " pod="openstack/openstackclient" Oct 01 15:20:48 crc kubenswrapper[4869]: I1001 15:20:48.498821 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xxd9c\" (UniqueName: \"kubernetes.io/projected/a25ceadd-9d07-4575-83d0-44bd065cca59-kube-api-access-xxd9c\") pod \"openstackclient\" (UID: \"a25ceadd-9d07-4575-83d0-44bd065cca59\") " pod="openstack/openstackclient" Oct 01 15:20:48 crc kubenswrapper[4869]: I1001 15:20:48.498857 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a25ceadd-9d07-4575-83d0-44bd065cca59-combined-ca-bundle\") pod \"openstackclient\" (UID: \"a25ceadd-9d07-4575-83d0-44bd065cca59\") " pod="openstack/openstackclient" Oct 01 15:20:48 crc kubenswrapper[4869]: I1001 15:20:48.647924 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-scheduler-0"] Oct 01 15:20:48 crc kubenswrapper[4869]: I1001 15:20:48.716604 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-d6669bb45-npxjb" Oct 01 15:20:48 crc kubenswrapper[4869]: I1001 15:20:48.731880 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"8e8826d4-6549-4216-bcae-afc6a135af5f","Type":"ContainerStarted","Data":"5ff7e9a995d33f43989e6b7fd6f941c0f2a504936da9f6ed5d2b6370bfceb194"} Oct 01 15:20:48 crc kubenswrapper[4869]: I1001 15:20:48.740881 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Oct 01 15:20:48 crc kubenswrapper[4869]: I1001 15:20:48.747409 4869 generic.go:334] "Generic (PLEG): container finished" podID="7b58b756-3a68-4820-a610-e9d23f2cc4bb" containerID="4233b1734d1bb72ea33d30f635bd91befb5597fa01a2bd88f116bd250f8d1831" exitCode=0 Oct 01 15:20:48 crc kubenswrapper[4869]: I1001 15:20:48.747448 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-d6669bb45-npxjb" event={"ID":"7b58b756-3a68-4820-a610-e9d23f2cc4bb","Type":"ContainerDied","Data":"4233b1734d1bb72ea33d30f635bd91befb5597fa01a2bd88f116bd250f8d1831"} Oct 01 15:20:48 crc kubenswrapper[4869]: I1001 15:20:48.747473 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-d6669bb45-npxjb" event={"ID":"7b58b756-3a68-4820-a610-e9d23f2cc4bb","Type":"ContainerDied","Data":"407ae8ccc0336c490520cf6284719406a77cfd102545843daa9f5053793352e6"} Oct 01 15:20:48 crc kubenswrapper[4869]: I1001 15:20:48.747487 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-d6669bb45-npxjb" Oct 01 15:20:48 crc kubenswrapper[4869]: I1001 15:20:48.747494 4869 scope.go:117] "RemoveContainer" containerID="4233b1734d1bb72ea33d30f635bd91befb5597fa01a2bd88f116bd250f8d1831" Oct 01 15:20:48 crc kubenswrapper[4869]: I1001 15:20:48.774513 4869 scope.go:117] "RemoveContainer" containerID="76edf854ebd4d43f579051422e21f9dcac3447506c81fc9dcd283ff343cf78a1" Oct 01 15:20:48 crc kubenswrapper[4869]: I1001 15:20:48.786400 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/7b58b756-3a68-4820-a610-e9d23f2cc4bb-dns-svc\") pod \"7b58b756-3a68-4820-a610-e9d23f2cc4bb\" (UID: \"7b58b756-3a68-4820-a610-e9d23f2cc4bb\") " Oct 01 15:20:48 crc kubenswrapper[4869]: I1001 15:20:48.786452 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7b58b756-3a68-4820-a610-e9d23f2cc4bb-config\") pod \"7b58b756-3a68-4820-a610-e9d23f2cc4bb\" (UID: \"7b58b756-3a68-4820-a610-e9d23f2cc4bb\") " Oct 01 15:20:48 crc kubenswrapper[4869]: I1001 15:20:48.786481 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/7b58b756-3a68-4820-a610-e9d23f2cc4bb-ovsdbserver-nb\") pod \"7b58b756-3a68-4820-a610-e9d23f2cc4bb\" (UID: \"7b58b756-3a68-4820-a610-e9d23f2cc4bb\") " Oct 01 15:20:48 crc kubenswrapper[4869]: I1001 15:20:48.786568 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/7b58b756-3a68-4820-a610-e9d23f2cc4bb-ovsdbserver-sb\") pod \"7b58b756-3a68-4820-a610-e9d23f2cc4bb\" (UID: \"7b58b756-3a68-4820-a610-e9d23f2cc4bb\") " Oct 01 15:20:48 crc kubenswrapper[4869]: I1001 15:20:48.786627 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jqssh\" (UniqueName: \"kubernetes.io/projected/7b58b756-3a68-4820-a610-e9d23f2cc4bb-kube-api-access-jqssh\") pod \"7b58b756-3a68-4820-a610-e9d23f2cc4bb\" (UID: \"7b58b756-3a68-4820-a610-e9d23f2cc4bb\") " Oct 01 15:20:48 crc kubenswrapper[4869]: I1001 15:20:48.792557 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7b58b756-3a68-4820-a610-e9d23f2cc4bb-kube-api-access-jqssh" (OuterVolumeSpecName: "kube-api-access-jqssh") pod "7b58b756-3a68-4820-a610-e9d23f2cc4bb" (UID: "7b58b756-3a68-4820-a610-e9d23f2cc4bb"). InnerVolumeSpecName "kube-api-access-jqssh". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 15:20:48 crc kubenswrapper[4869]: I1001 15:20:48.818977 4869 scope.go:117] "RemoveContainer" containerID="4233b1734d1bb72ea33d30f635bd91befb5597fa01a2bd88f116bd250f8d1831" Oct 01 15:20:48 crc kubenswrapper[4869]: E1001 15:20:48.821377 4869 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4233b1734d1bb72ea33d30f635bd91befb5597fa01a2bd88f116bd250f8d1831\": container with ID starting with 4233b1734d1bb72ea33d30f635bd91befb5597fa01a2bd88f116bd250f8d1831 not found: ID does not exist" containerID="4233b1734d1bb72ea33d30f635bd91befb5597fa01a2bd88f116bd250f8d1831" Oct 01 15:20:48 crc kubenswrapper[4869]: I1001 15:20:48.821569 4869 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4233b1734d1bb72ea33d30f635bd91befb5597fa01a2bd88f116bd250f8d1831"} err="failed to get container status \"4233b1734d1bb72ea33d30f635bd91befb5597fa01a2bd88f116bd250f8d1831\": rpc error: code = NotFound desc = could not find container \"4233b1734d1bb72ea33d30f635bd91befb5597fa01a2bd88f116bd250f8d1831\": container with ID starting with 4233b1734d1bb72ea33d30f635bd91befb5597fa01a2bd88f116bd250f8d1831 not found: ID does not exist" Oct 01 15:20:48 crc kubenswrapper[4869]: I1001 15:20:48.821682 4869 scope.go:117] "RemoveContainer" containerID="76edf854ebd4d43f579051422e21f9dcac3447506c81fc9dcd283ff343cf78a1" Oct 01 15:20:48 crc kubenswrapper[4869]: E1001 15:20:48.823517 4869 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"76edf854ebd4d43f579051422e21f9dcac3447506c81fc9dcd283ff343cf78a1\": container with ID starting with 76edf854ebd4d43f579051422e21f9dcac3447506c81fc9dcd283ff343cf78a1 not found: ID does not exist" containerID="76edf854ebd4d43f579051422e21f9dcac3447506c81fc9dcd283ff343cf78a1" Oct 01 15:20:48 crc kubenswrapper[4869]: I1001 15:20:48.823552 4869 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"76edf854ebd4d43f579051422e21f9dcac3447506c81fc9dcd283ff343cf78a1"} err="failed to get container status \"76edf854ebd4d43f579051422e21f9dcac3447506c81fc9dcd283ff343cf78a1\": rpc error: code = NotFound desc = could not find container \"76edf854ebd4d43f579051422e21f9dcac3447506c81fc9dcd283ff343cf78a1\": container with ID starting with 76edf854ebd4d43f579051422e21f9dcac3447506c81fc9dcd283ff343cf78a1 not found: ID does not exist" Oct 01 15:20:48 crc kubenswrapper[4869]: I1001 15:20:48.851905 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7b58b756-3a68-4820-a610-e9d23f2cc4bb-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "7b58b756-3a68-4820-a610-e9d23f2cc4bb" (UID: "7b58b756-3a68-4820-a610-e9d23f2cc4bb"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 15:20:48 crc kubenswrapper[4869]: I1001 15:20:48.867696 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7b58b756-3a68-4820-a610-e9d23f2cc4bb-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "7b58b756-3a68-4820-a610-e9d23f2cc4bb" (UID: "7b58b756-3a68-4820-a610-e9d23f2cc4bb"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 15:20:48 crc kubenswrapper[4869]: I1001 15:20:48.878133 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7b58b756-3a68-4820-a610-e9d23f2cc4bb-config" (OuterVolumeSpecName: "config") pod "7b58b756-3a68-4820-a610-e9d23f2cc4bb" (UID: "7b58b756-3a68-4820-a610-e9d23f2cc4bb"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 15:20:48 crc kubenswrapper[4869]: I1001 15:20:48.892907 4869 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7b58b756-3a68-4820-a610-e9d23f2cc4bb-config\") on node \"crc\" DevicePath \"\"" Oct 01 15:20:48 crc kubenswrapper[4869]: I1001 15:20:48.892939 4869 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/7b58b756-3a68-4820-a610-e9d23f2cc4bb-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Oct 01 15:20:48 crc kubenswrapper[4869]: I1001 15:20:48.892949 4869 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/7b58b756-3a68-4820-a610-e9d23f2cc4bb-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Oct 01 15:20:48 crc kubenswrapper[4869]: I1001 15:20:48.892960 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jqssh\" (UniqueName: \"kubernetes.io/projected/7b58b756-3a68-4820-a610-e9d23f2cc4bb-kube-api-access-jqssh\") on node \"crc\" DevicePath \"\"" Oct 01 15:20:48 crc kubenswrapper[4869]: I1001 15:20:48.908724 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7b58b756-3a68-4820-a610-e9d23f2cc4bb-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "7b58b756-3a68-4820-a610-e9d23f2cc4bb" (UID: "7b58b756-3a68-4820-a610-e9d23f2cc4bb"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 15:20:48 crc kubenswrapper[4869]: I1001 15:20:48.995608 4869 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/7b58b756-3a68-4820-a610-e9d23f2cc4bb-dns-svc\") on node \"crc\" DevicePath \"\"" Oct 01 15:20:49 crc kubenswrapper[4869]: I1001 15:20:49.095104 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-d6669bb45-npxjb"] Oct 01 15:20:49 crc kubenswrapper[4869]: I1001 15:20:49.122458 4869 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-d6669bb45-npxjb"] Oct 01 15:20:49 crc kubenswrapper[4869]: I1001 15:20:49.211651 4869 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/horizon-5766b74c9d-wpxpf" podUID="011bdc54-d9ef-4ae9-a16b-5eaf4e97b2cf" containerName="horizon" probeResult="failure" output="Get \"https://10.217.0.140:8443/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.0.140:8443: connect: connection refused" Oct 01 15:20:49 crc kubenswrapper[4869]: I1001 15:20:49.325522 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstackclient"] Oct 01 15:20:49 crc kubenswrapper[4869]: I1001 15:20:49.491773 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/cinder-api-0" Oct 01 15:20:49 crc kubenswrapper[4869]: I1001 15:20:49.604649 4869 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7b58b756-3a68-4820-a610-e9d23f2cc4bb" path="/var/lib/kubelet/pods/7b58b756-3a68-4820-a610-e9d23f2cc4bb/volumes" Oct 01 15:20:49 crc kubenswrapper[4869]: I1001 15:20:49.606018 4869 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8cf4ad0d-8c54-45ba-b36b-269f68d9e46d" path="/var/lib/kubelet/pods/8cf4ad0d-8c54-45ba-b36b-269f68d9e46d/volumes" Oct 01 15:20:49 crc kubenswrapper[4869]: I1001 15:20:49.771866 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"8e8826d4-6549-4216-bcae-afc6a135af5f","Type":"ContainerStarted","Data":"c546f28b2a0f917d5ead6b81c09d60c6a74a9bec32160ee5402ef152f1892532"} Oct 01 15:20:49 crc kubenswrapper[4869]: I1001 15:20:49.772682 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstackclient" event={"ID":"a25ceadd-9d07-4575-83d0-44bd065cca59","Type":"ContainerStarted","Data":"993b4701436c93ae1f8eb2c88b862f09c5e607b208b0889e558ab14c7f576104"} Oct 01 15:20:50 crc kubenswrapper[4869]: I1001 15:20:50.790697 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"8e8826d4-6549-4216-bcae-afc6a135af5f","Type":"ContainerStarted","Data":"5ab6185fa57a2babf8896542d0fc1e346fd2f35e53d3868bdc50d75da312c15a"} Oct 01 15:20:50 crc kubenswrapper[4869]: I1001 15:20:50.814350 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-scheduler-0" podStartSLOduration=3.814329639 podStartE2EDuration="3.814329639s" podCreationTimestamp="2025-10-01 15:20:47 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-01 15:20:50.80880814 +0000 UTC m=+959.955651256" watchObservedRunningTime="2025-10-01 15:20:50.814329639 +0000 UTC m=+959.961172755" Oct 01 15:20:53 crc kubenswrapper[4869]: I1001 15:20:53.092462 4869 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/cinder-scheduler-0" Oct 01 15:20:58 crc kubenswrapper[4869]: I1001 15:20:58.389015 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-db-create-gtw2h"] Oct 01 15:20:58 crc kubenswrapper[4869]: E1001 15:20:58.389790 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7b58b756-3a68-4820-a610-e9d23f2cc4bb" containerName="init" Oct 01 15:20:58 crc kubenswrapper[4869]: I1001 15:20:58.389801 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="7b58b756-3a68-4820-a610-e9d23f2cc4bb" containerName="init" Oct 01 15:20:58 crc kubenswrapper[4869]: E1001 15:20:58.389823 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7b58b756-3a68-4820-a610-e9d23f2cc4bb" containerName="dnsmasq-dns" Oct 01 15:20:58 crc kubenswrapper[4869]: I1001 15:20:58.389829 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="7b58b756-3a68-4820-a610-e9d23f2cc4bb" containerName="dnsmasq-dns" Oct 01 15:20:58 crc kubenswrapper[4869]: I1001 15:20:58.390003 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="7b58b756-3a68-4820-a610-e9d23f2cc4bb" containerName="dnsmasq-dns" Oct 01 15:20:58 crc kubenswrapper[4869]: I1001 15:20:58.390579 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-db-create-gtw2h" Oct 01 15:20:58 crc kubenswrapper[4869]: I1001 15:20:58.400006 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-db-create-gtw2h"] Oct 01 15:20:58 crc kubenswrapper[4869]: I1001 15:20:58.415217 4869 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/cinder-scheduler-0" Oct 01 15:20:58 crc kubenswrapper[4869]: I1001 15:20:58.484224 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fwrjl\" (UniqueName: \"kubernetes.io/projected/88a0cb67-d211-4d9f-b794-c6d3f2a552b7-kube-api-access-fwrjl\") pod \"nova-api-db-create-gtw2h\" (UID: \"88a0cb67-d211-4d9f-b794-c6d3f2a552b7\") " pod="openstack/nova-api-db-create-gtw2h" Oct 01 15:20:58 crc kubenswrapper[4869]: I1001 15:20:58.506182 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-db-create-tbsmb"] Oct 01 15:20:58 crc kubenswrapper[4869]: I1001 15:20:58.507459 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-db-create-tbsmb" Oct 01 15:20:58 crc kubenswrapper[4869]: I1001 15:20:58.523406 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-db-create-tbsmb"] Oct 01 15:20:58 crc kubenswrapper[4869]: I1001 15:20:58.586040 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fwrjl\" (UniqueName: \"kubernetes.io/projected/88a0cb67-d211-4d9f-b794-c6d3f2a552b7-kube-api-access-fwrjl\") pod \"nova-api-db-create-gtw2h\" (UID: \"88a0cb67-d211-4d9f-b794-c6d3f2a552b7\") " pod="openstack/nova-api-db-create-gtw2h" Oct 01 15:20:58 crc kubenswrapper[4869]: I1001 15:20:58.591477 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-db-create-k5cms"] Oct 01 15:20:58 crc kubenswrapper[4869]: I1001 15:20:58.592535 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-db-create-k5cms" Oct 01 15:20:58 crc kubenswrapper[4869]: I1001 15:20:58.608171 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fwrjl\" (UniqueName: \"kubernetes.io/projected/88a0cb67-d211-4d9f-b794-c6d3f2a552b7-kube-api-access-fwrjl\") pod \"nova-api-db-create-gtw2h\" (UID: \"88a0cb67-d211-4d9f-b794-c6d3f2a552b7\") " pod="openstack/nova-api-db-create-gtw2h" Oct 01 15:20:58 crc kubenswrapper[4869]: I1001 15:20:58.609005 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-db-create-k5cms"] Oct 01 15:20:58 crc kubenswrapper[4869]: I1001 15:20:58.688153 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pczt5\" (UniqueName: \"kubernetes.io/projected/f10e4cd8-ebba-4aec-a029-0fd2536e170d-kube-api-access-pczt5\") pod \"nova-cell1-db-create-k5cms\" (UID: \"f10e4cd8-ebba-4aec-a029-0fd2536e170d\") " pod="openstack/nova-cell1-db-create-k5cms" Oct 01 15:20:58 crc kubenswrapper[4869]: I1001 15:20:58.688201 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fvv8v\" (UniqueName: \"kubernetes.io/projected/f7f7278d-fa72-44d6-bf31-2a3ca3d3e417-kube-api-access-fvv8v\") pod \"nova-cell0-db-create-tbsmb\" (UID: \"f7f7278d-fa72-44d6-bf31-2a3ca3d3e417\") " pod="openstack/nova-cell0-db-create-tbsmb" Oct 01 15:20:58 crc kubenswrapper[4869]: I1001 15:20:58.718556 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-db-create-gtw2h" Oct 01 15:20:58 crc kubenswrapper[4869]: I1001 15:20:58.789786 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pczt5\" (UniqueName: \"kubernetes.io/projected/f10e4cd8-ebba-4aec-a029-0fd2536e170d-kube-api-access-pczt5\") pod \"nova-cell1-db-create-k5cms\" (UID: \"f10e4cd8-ebba-4aec-a029-0fd2536e170d\") " pod="openstack/nova-cell1-db-create-k5cms" Oct 01 15:20:58 crc kubenswrapper[4869]: I1001 15:20:58.790378 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fvv8v\" (UniqueName: \"kubernetes.io/projected/f7f7278d-fa72-44d6-bf31-2a3ca3d3e417-kube-api-access-fvv8v\") pod \"nova-cell0-db-create-tbsmb\" (UID: \"f7f7278d-fa72-44d6-bf31-2a3ca3d3e417\") " pod="openstack/nova-cell0-db-create-tbsmb" Oct 01 15:20:58 crc kubenswrapper[4869]: I1001 15:20:58.809672 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pczt5\" (UniqueName: \"kubernetes.io/projected/f10e4cd8-ebba-4aec-a029-0fd2536e170d-kube-api-access-pczt5\") pod \"nova-cell1-db-create-k5cms\" (UID: \"f10e4cd8-ebba-4aec-a029-0fd2536e170d\") " pod="openstack/nova-cell1-db-create-k5cms" Oct 01 15:20:58 crc kubenswrapper[4869]: I1001 15:20:58.810245 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fvv8v\" (UniqueName: \"kubernetes.io/projected/f7f7278d-fa72-44d6-bf31-2a3ca3d3e417-kube-api-access-fvv8v\") pod \"nova-cell0-db-create-tbsmb\" (UID: \"f7f7278d-fa72-44d6-bf31-2a3ca3d3e417\") " pod="openstack/nova-cell0-db-create-tbsmb" Oct 01 15:20:58 crc kubenswrapper[4869]: I1001 15:20:58.839497 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-db-create-tbsmb" Oct 01 15:20:58 crc kubenswrapper[4869]: I1001 15:20:58.956162 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-db-create-k5cms" Oct 01 15:20:59 crc kubenswrapper[4869]: I1001 15:20:59.212078 4869 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/horizon-5766b74c9d-wpxpf" podUID="011bdc54-d9ef-4ae9-a16b-5eaf4e97b2cf" containerName="horizon" probeResult="failure" output="Get \"https://10.217.0.140:8443/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.0.140:8443: connect: connection refused" Oct 01 15:20:59 crc kubenswrapper[4869]: I1001 15:20:59.931464 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstackclient" event={"ID":"a25ceadd-9d07-4575-83d0-44bd065cca59","Type":"ContainerStarted","Data":"a172e0692573df3d9b550de2025eefea8069ca5d52301f10c6c5f1ab906fa18b"} Oct 01 15:21:00 crc kubenswrapper[4869]: I1001 15:21:00.002085 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/openstackclient" podStartSLOduration=1.783720119 podStartE2EDuration="12.002066943s" podCreationTimestamp="2025-10-01 15:20:48 +0000 UTC" firstStartedPulling="2025-10-01 15:20:49.364791728 +0000 UTC m=+958.511634844" lastFinishedPulling="2025-10-01 15:20:59.583138552 +0000 UTC m=+968.729981668" observedRunningTime="2025-10-01 15:20:59.977920953 +0000 UTC m=+969.124764069" watchObservedRunningTime="2025-10-01 15:21:00.002066943 +0000 UTC m=+969.148910059" Oct 01 15:21:00 crc kubenswrapper[4869]: I1001 15:21:00.080670 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-db-create-gtw2h"] Oct 01 15:21:00 crc kubenswrapper[4869]: W1001 15:21:00.088107 4869 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod88a0cb67_d211_4d9f_b794_c6d3f2a552b7.slice/crio-4256e6732370495ea60a6291f14b890b4c7d1018271285db9856120f0845a90e WatchSource:0}: Error finding container 4256e6732370495ea60a6291f14b890b4c7d1018271285db9856120f0845a90e: Status 404 returned error can't find the container with id 4256e6732370495ea60a6291f14b890b4c7d1018271285db9856120f0845a90e Oct 01 15:21:00 crc kubenswrapper[4869]: I1001 15:21:00.088746 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-db-create-tbsmb"] Oct 01 15:21:00 crc kubenswrapper[4869]: W1001 15:21:00.090367 4869 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podf7f7278d_fa72_44d6_bf31_2a3ca3d3e417.slice/crio-97cc334a14760a15cbc0c4378148e0ffd5fc10ad0b116845debab340d3982c10 WatchSource:0}: Error finding container 97cc334a14760a15cbc0c4378148e0ffd5fc10ad0b116845debab340d3982c10: Status 404 returned error can't find the container with id 97cc334a14760a15cbc0c4378148e0ffd5fc10ad0b116845debab340d3982c10 Oct 01 15:21:00 crc kubenswrapper[4869]: I1001 15:21:00.318252 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-db-create-k5cms"] Oct 01 15:21:00 crc kubenswrapper[4869]: I1001 15:21:00.941928 4869 generic.go:334] "Generic (PLEG): container finished" podID="f10e4cd8-ebba-4aec-a029-0fd2536e170d" containerID="d76f347a36de5d899965d01a6e6e6851b17ed2a3beb4214cb1d38c4588d3f693" exitCode=0 Oct 01 15:21:00 crc kubenswrapper[4869]: I1001 15:21:00.941984 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-db-create-k5cms" event={"ID":"f10e4cd8-ebba-4aec-a029-0fd2536e170d","Type":"ContainerDied","Data":"d76f347a36de5d899965d01a6e6e6851b17ed2a3beb4214cb1d38c4588d3f693"} Oct 01 15:21:00 crc kubenswrapper[4869]: I1001 15:21:00.943000 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-db-create-k5cms" event={"ID":"f10e4cd8-ebba-4aec-a029-0fd2536e170d","Type":"ContainerStarted","Data":"ff817d4b3c52ba69f12f801fbef00838de22ce6a5d034af143204e557368f214"} Oct 01 15:21:00 crc kubenswrapper[4869]: I1001 15:21:00.943487 4869 generic.go:334] "Generic (PLEG): container finished" podID="88a0cb67-d211-4d9f-b794-c6d3f2a552b7" containerID="ca3dc69a1e196b7aba3d96fd90ce4056225902db6371625177623336a532d0c2" exitCode=0 Oct 01 15:21:00 crc kubenswrapper[4869]: I1001 15:21:00.943535 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-db-create-gtw2h" event={"ID":"88a0cb67-d211-4d9f-b794-c6d3f2a552b7","Type":"ContainerDied","Data":"ca3dc69a1e196b7aba3d96fd90ce4056225902db6371625177623336a532d0c2"} Oct 01 15:21:00 crc kubenswrapper[4869]: I1001 15:21:00.943814 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-db-create-gtw2h" event={"ID":"88a0cb67-d211-4d9f-b794-c6d3f2a552b7","Type":"ContainerStarted","Data":"4256e6732370495ea60a6291f14b890b4c7d1018271285db9856120f0845a90e"} Oct 01 15:21:00 crc kubenswrapper[4869]: I1001 15:21:00.945687 4869 generic.go:334] "Generic (PLEG): container finished" podID="f7f7278d-fa72-44d6-bf31-2a3ca3d3e417" containerID="c00c29af0c199f885bd34258cce7079f4ee9948ec7ee596002e7641554320b97" exitCode=0 Oct 01 15:21:00 crc kubenswrapper[4869]: I1001 15:21:00.945715 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-db-create-tbsmb" event={"ID":"f7f7278d-fa72-44d6-bf31-2a3ca3d3e417","Type":"ContainerDied","Data":"c00c29af0c199f885bd34258cce7079f4ee9948ec7ee596002e7641554320b97"} Oct 01 15:21:00 crc kubenswrapper[4869]: I1001 15:21:00.945729 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-db-create-tbsmb" event={"ID":"f7f7278d-fa72-44d6-bf31-2a3ca3d3e417","Type":"ContainerStarted","Data":"97cc334a14760a15cbc0c4378148e0ffd5fc10ad0b116845debab340d3982c10"} Oct 01 15:21:01 crc kubenswrapper[4869]: I1001 15:21:01.644818 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 01 15:21:01 crc kubenswrapper[4869]: I1001 15:21:01.740341 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/cc0dd8d7-8230-4c07-87b1-6f4fc5e2e631-scripts\") pod \"cc0dd8d7-8230-4c07-87b1-6f4fc5e2e631\" (UID: \"cc0dd8d7-8230-4c07-87b1-6f4fc5e2e631\") " Oct 01 15:21:01 crc kubenswrapper[4869]: I1001 15:21:01.740424 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cc0dd8d7-8230-4c07-87b1-6f4fc5e2e631-combined-ca-bundle\") pod \"cc0dd8d7-8230-4c07-87b1-6f4fc5e2e631\" (UID: \"cc0dd8d7-8230-4c07-87b1-6f4fc5e2e631\") " Oct 01 15:21:01 crc kubenswrapper[4869]: I1001 15:21:01.740455 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dm4cr\" (UniqueName: \"kubernetes.io/projected/cc0dd8d7-8230-4c07-87b1-6f4fc5e2e631-kube-api-access-dm4cr\") pod \"cc0dd8d7-8230-4c07-87b1-6f4fc5e2e631\" (UID: \"cc0dd8d7-8230-4c07-87b1-6f4fc5e2e631\") " Oct 01 15:21:01 crc kubenswrapper[4869]: I1001 15:21:01.740480 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/cc0dd8d7-8230-4c07-87b1-6f4fc5e2e631-log-httpd\") pod \"cc0dd8d7-8230-4c07-87b1-6f4fc5e2e631\" (UID: \"cc0dd8d7-8230-4c07-87b1-6f4fc5e2e631\") " Oct 01 15:21:01 crc kubenswrapper[4869]: I1001 15:21:01.740592 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/cc0dd8d7-8230-4c07-87b1-6f4fc5e2e631-run-httpd\") pod \"cc0dd8d7-8230-4c07-87b1-6f4fc5e2e631\" (UID: \"cc0dd8d7-8230-4c07-87b1-6f4fc5e2e631\") " Oct 01 15:21:01 crc kubenswrapper[4869]: I1001 15:21:01.740631 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cc0dd8d7-8230-4c07-87b1-6f4fc5e2e631-config-data\") pod \"cc0dd8d7-8230-4c07-87b1-6f4fc5e2e631\" (UID: \"cc0dd8d7-8230-4c07-87b1-6f4fc5e2e631\") " Oct 01 15:21:01 crc kubenswrapper[4869]: I1001 15:21:01.740649 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/cc0dd8d7-8230-4c07-87b1-6f4fc5e2e631-sg-core-conf-yaml\") pod \"cc0dd8d7-8230-4c07-87b1-6f4fc5e2e631\" (UID: \"cc0dd8d7-8230-4c07-87b1-6f4fc5e2e631\") " Oct 01 15:21:01 crc kubenswrapper[4869]: I1001 15:21:01.741564 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/cc0dd8d7-8230-4c07-87b1-6f4fc5e2e631-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "cc0dd8d7-8230-4c07-87b1-6f4fc5e2e631" (UID: "cc0dd8d7-8230-4c07-87b1-6f4fc5e2e631"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 15:21:01 crc kubenswrapper[4869]: I1001 15:21:01.741844 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/cc0dd8d7-8230-4c07-87b1-6f4fc5e2e631-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "cc0dd8d7-8230-4c07-87b1-6f4fc5e2e631" (UID: "cc0dd8d7-8230-4c07-87b1-6f4fc5e2e631"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 15:21:01 crc kubenswrapper[4869]: I1001 15:21:01.742383 4869 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/cc0dd8d7-8230-4c07-87b1-6f4fc5e2e631-log-httpd\") on node \"crc\" DevicePath \"\"" Oct 01 15:21:01 crc kubenswrapper[4869]: I1001 15:21:01.742423 4869 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/cc0dd8d7-8230-4c07-87b1-6f4fc5e2e631-run-httpd\") on node \"crc\" DevicePath \"\"" Oct 01 15:21:01 crc kubenswrapper[4869]: I1001 15:21:01.746531 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cc0dd8d7-8230-4c07-87b1-6f4fc5e2e631-scripts" (OuterVolumeSpecName: "scripts") pod "cc0dd8d7-8230-4c07-87b1-6f4fc5e2e631" (UID: "cc0dd8d7-8230-4c07-87b1-6f4fc5e2e631"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 15:21:01 crc kubenswrapper[4869]: I1001 15:21:01.752922 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cc0dd8d7-8230-4c07-87b1-6f4fc5e2e631-kube-api-access-dm4cr" (OuterVolumeSpecName: "kube-api-access-dm4cr") pod "cc0dd8d7-8230-4c07-87b1-6f4fc5e2e631" (UID: "cc0dd8d7-8230-4c07-87b1-6f4fc5e2e631"). InnerVolumeSpecName "kube-api-access-dm4cr". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 15:21:01 crc kubenswrapper[4869]: I1001 15:21:01.833728 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cc0dd8d7-8230-4c07-87b1-6f4fc5e2e631-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "cc0dd8d7-8230-4c07-87b1-6f4fc5e2e631" (UID: "cc0dd8d7-8230-4c07-87b1-6f4fc5e2e631"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 15:21:01 crc kubenswrapper[4869]: I1001 15:21:01.843909 4869 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/cc0dd8d7-8230-4c07-87b1-6f4fc5e2e631-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Oct 01 15:21:01 crc kubenswrapper[4869]: I1001 15:21:01.843933 4869 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/cc0dd8d7-8230-4c07-87b1-6f4fc5e2e631-scripts\") on node \"crc\" DevicePath \"\"" Oct 01 15:21:01 crc kubenswrapper[4869]: I1001 15:21:01.843943 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dm4cr\" (UniqueName: \"kubernetes.io/projected/cc0dd8d7-8230-4c07-87b1-6f4fc5e2e631-kube-api-access-dm4cr\") on node \"crc\" DevicePath \"\"" Oct 01 15:21:01 crc kubenswrapper[4869]: I1001 15:21:01.850589 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cc0dd8d7-8230-4c07-87b1-6f4fc5e2e631-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "cc0dd8d7-8230-4c07-87b1-6f4fc5e2e631" (UID: "cc0dd8d7-8230-4c07-87b1-6f4fc5e2e631"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 15:21:01 crc kubenswrapper[4869]: I1001 15:21:01.887746 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cc0dd8d7-8230-4c07-87b1-6f4fc5e2e631-config-data" (OuterVolumeSpecName: "config-data") pod "cc0dd8d7-8230-4c07-87b1-6f4fc5e2e631" (UID: "cc0dd8d7-8230-4c07-87b1-6f4fc5e2e631"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 15:21:01 crc kubenswrapper[4869]: I1001 15:21:01.945857 4869 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cc0dd8d7-8230-4c07-87b1-6f4fc5e2e631-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 01 15:21:01 crc kubenswrapper[4869]: I1001 15:21:01.945895 4869 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cc0dd8d7-8230-4c07-87b1-6f4fc5e2e631-config-data\") on node \"crc\" DevicePath \"\"" Oct 01 15:21:01 crc kubenswrapper[4869]: I1001 15:21:01.960114 4869 generic.go:334] "Generic (PLEG): container finished" podID="cc0dd8d7-8230-4c07-87b1-6f4fc5e2e631" containerID="d4ca17658569506170532a80a1c1f9c873d3f66d15512d2b00799f03d85ea88b" exitCode=137 Oct 01 15:21:01 crc kubenswrapper[4869]: I1001 15:21:01.960377 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 01 15:21:01 crc kubenswrapper[4869]: I1001 15:21:01.960977 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"cc0dd8d7-8230-4c07-87b1-6f4fc5e2e631","Type":"ContainerDied","Data":"d4ca17658569506170532a80a1c1f9c873d3f66d15512d2b00799f03d85ea88b"} Oct 01 15:21:01 crc kubenswrapper[4869]: I1001 15:21:01.961048 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"cc0dd8d7-8230-4c07-87b1-6f4fc5e2e631","Type":"ContainerDied","Data":"ca1d0cde3b352d9ca482a458374343992e8e99fc078b7c27009c3f2d7ede5292"} Oct 01 15:21:01 crc kubenswrapper[4869]: I1001 15:21:01.961068 4869 scope.go:117] "RemoveContainer" containerID="d4ca17658569506170532a80a1c1f9c873d3f66d15512d2b00799f03d85ea88b" Oct 01 15:21:02 crc kubenswrapper[4869]: I1001 15:21:02.011184 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Oct 01 15:21:02 crc kubenswrapper[4869]: I1001 15:21:02.014217 4869 scope.go:117] "RemoveContainer" containerID="99d0e76b22b29e4947bfb92dfa7c9861448ea38250b4b8538bda3b9c670ddc17" Oct 01 15:21:02 crc kubenswrapper[4869]: I1001 15:21:02.025826 4869 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Oct 01 15:21:02 crc kubenswrapper[4869]: I1001 15:21:02.031883 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Oct 01 15:21:02 crc kubenswrapper[4869]: E1001 15:21:02.032295 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cc0dd8d7-8230-4c07-87b1-6f4fc5e2e631" containerName="ceilometer-central-agent" Oct 01 15:21:02 crc kubenswrapper[4869]: I1001 15:21:02.032311 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="cc0dd8d7-8230-4c07-87b1-6f4fc5e2e631" containerName="ceilometer-central-agent" Oct 01 15:21:02 crc kubenswrapper[4869]: E1001 15:21:02.032336 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cc0dd8d7-8230-4c07-87b1-6f4fc5e2e631" containerName="proxy-httpd" Oct 01 15:21:02 crc kubenswrapper[4869]: I1001 15:21:02.032342 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="cc0dd8d7-8230-4c07-87b1-6f4fc5e2e631" containerName="proxy-httpd" Oct 01 15:21:02 crc kubenswrapper[4869]: E1001 15:21:02.032352 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cc0dd8d7-8230-4c07-87b1-6f4fc5e2e631" containerName="sg-core" Oct 01 15:21:02 crc kubenswrapper[4869]: I1001 15:21:02.032358 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="cc0dd8d7-8230-4c07-87b1-6f4fc5e2e631" containerName="sg-core" Oct 01 15:21:02 crc kubenswrapper[4869]: E1001 15:21:02.032383 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cc0dd8d7-8230-4c07-87b1-6f4fc5e2e631" containerName="ceilometer-notification-agent" Oct 01 15:21:02 crc kubenswrapper[4869]: I1001 15:21:02.032389 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="cc0dd8d7-8230-4c07-87b1-6f4fc5e2e631" containerName="ceilometer-notification-agent" Oct 01 15:21:02 crc kubenswrapper[4869]: I1001 15:21:02.032536 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="cc0dd8d7-8230-4c07-87b1-6f4fc5e2e631" containerName="ceilometer-central-agent" Oct 01 15:21:02 crc kubenswrapper[4869]: I1001 15:21:02.032553 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="cc0dd8d7-8230-4c07-87b1-6f4fc5e2e631" containerName="sg-core" Oct 01 15:21:02 crc kubenswrapper[4869]: I1001 15:21:02.032570 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="cc0dd8d7-8230-4c07-87b1-6f4fc5e2e631" containerName="proxy-httpd" Oct 01 15:21:02 crc kubenswrapper[4869]: I1001 15:21:02.032581 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="cc0dd8d7-8230-4c07-87b1-6f4fc5e2e631" containerName="ceilometer-notification-agent" Oct 01 15:21:02 crc kubenswrapper[4869]: I1001 15:21:02.034155 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 01 15:21:02 crc kubenswrapper[4869]: I1001 15:21:02.037036 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Oct 01 15:21:02 crc kubenswrapper[4869]: I1001 15:21:02.037227 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Oct 01 15:21:02 crc kubenswrapper[4869]: I1001 15:21:02.041193 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Oct 01 15:21:02 crc kubenswrapper[4869]: I1001 15:21:02.055827 4869 scope.go:117] "RemoveContainer" containerID="2a4d444a3d5e23bf38c8e2f1225cef437847d97521691a2eccf4ae028317772c" Oct 01 15:21:02 crc kubenswrapper[4869]: I1001 15:21:02.090424 4869 scope.go:117] "RemoveContainer" containerID="e908a2914b310dca9c1c19937855f20362a2902fd309d74542f25cd91fef47d9" Oct 01 15:21:02 crc kubenswrapper[4869]: I1001 15:21:02.116542 4869 scope.go:117] "RemoveContainer" containerID="d4ca17658569506170532a80a1c1f9c873d3f66d15512d2b00799f03d85ea88b" Oct 01 15:21:02 crc kubenswrapper[4869]: E1001 15:21:02.117016 4869 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d4ca17658569506170532a80a1c1f9c873d3f66d15512d2b00799f03d85ea88b\": container with ID starting with d4ca17658569506170532a80a1c1f9c873d3f66d15512d2b00799f03d85ea88b not found: ID does not exist" containerID="d4ca17658569506170532a80a1c1f9c873d3f66d15512d2b00799f03d85ea88b" Oct 01 15:21:02 crc kubenswrapper[4869]: I1001 15:21:02.117052 4869 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d4ca17658569506170532a80a1c1f9c873d3f66d15512d2b00799f03d85ea88b"} err="failed to get container status \"d4ca17658569506170532a80a1c1f9c873d3f66d15512d2b00799f03d85ea88b\": rpc error: code = NotFound desc = could not find container \"d4ca17658569506170532a80a1c1f9c873d3f66d15512d2b00799f03d85ea88b\": container with ID starting with d4ca17658569506170532a80a1c1f9c873d3f66d15512d2b00799f03d85ea88b not found: ID does not exist" Oct 01 15:21:02 crc kubenswrapper[4869]: I1001 15:21:02.117077 4869 scope.go:117] "RemoveContainer" containerID="99d0e76b22b29e4947bfb92dfa7c9861448ea38250b4b8538bda3b9c670ddc17" Oct 01 15:21:02 crc kubenswrapper[4869]: E1001 15:21:02.118167 4869 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"99d0e76b22b29e4947bfb92dfa7c9861448ea38250b4b8538bda3b9c670ddc17\": container with ID starting with 99d0e76b22b29e4947bfb92dfa7c9861448ea38250b4b8538bda3b9c670ddc17 not found: ID does not exist" containerID="99d0e76b22b29e4947bfb92dfa7c9861448ea38250b4b8538bda3b9c670ddc17" Oct 01 15:21:02 crc kubenswrapper[4869]: I1001 15:21:02.118210 4869 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"99d0e76b22b29e4947bfb92dfa7c9861448ea38250b4b8538bda3b9c670ddc17"} err="failed to get container status \"99d0e76b22b29e4947bfb92dfa7c9861448ea38250b4b8538bda3b9c670ddc17\": rpc error: code = NotFound desc = could not find container \"99d0e76b22b29e4947bfb92dfa7c9861448ea38250b4b8538bda3b9c670ddc17\": container with ID starting with 99d0e76b22b29e4947bfb92dfa7c9861448ea38250b4b8538bda3b9c670ddc17 not found: ID does not exist" Oct 01 15:21:02 crc kubenswrapper[4869]: I1001 15:21:02.118241 4869 scope.go:117] "RemoveContainer" containerID="2a4d444a3d5e23bf38c8e2f1225cef437847d97521691a2eccf4ae028317772c" Oct 01 15:21:02 crc kubenswrapper[4869]: E1001 15:21:02.118524 4869 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2a4d444a3d5e23bf38c8e2f1225cef437847d97521691a2eccf4ae028317772c\": container with ID starting with 2a4d444a3d5e23bf38c8e2f1225cef437847d97521691a2eccf4ae028317772c not found: ID does not exist" containerID="2a4d444a3d5e23bf38c8e2f1225cef437847d97521691a2eccf4ae028317772c" Oct 01 15:21:02 crc kubenswrapper[4869]: I1001 15:21:02.118562 4869 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2a4d444a3d5e23bf38c8e2f1225cef437847d97521691a2eccf4ae028317772c"} err="failed to get container status \"2a4d444a3d5e23bf38c8e2f1225cef437847d97521691a2eccf4ae028317772c\": rpc error: code = NotFound desc = could not find container \"2a4d444a3d5e23bf38c8e2f1225cef437847d97521691a2eccf4ae028317772c\": container with ID starting with 2a4d444a3d5e23bf38c8e2f1225cef437847d97521691a2eccf4ae028317772c not found: ID does not exist" Oct 01 15:21:02 crc kubenswrapper[4869]: I1001 15:21:02.118583 4869 scope.go:117] "RemoveContainer" containerID="e908a2914b310dca9c1c19937855f20362a2902fd309d74542f25cd91fef47d9" Oct 01 15:21:02 crc kubenswrapper[4869]: E1001 15:21:02.121996 4869 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e908a2914b310dca9c1c19937855f20362a2902fd309d74542f25cd91fef47d9\": container with ID starting with e908a2914b310dca9c1c19937855f20362a2902fd309d74542f25cd91fef47d9 not found: ID does not exist" containerID="e908a2914b310dca9c1c19937855f20362a2902fd309d74542f25cd91fef47d9" Oct 01 15:21:02 crc kubenswrapper[4869]: I1001 15:21:02.122031 4869 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e908a2914b310dca9c1c19937855f20362a2902fd309d74542f25cd91fef47d9"} err="failed to get container status \"e908a2914b310dca9c1c19937855f20362a2902fd309d74542f25cd91fef47d9\": rpc error: code = NotFound desc = could not find container \"e908a2914b310dca9c1c19937855f20362a2902fd309d74542f25cd91fef47d9\": container with ID starting with e908a2914b310dca9c1c19937855f20362a2902fd309d74542f25cd91fef47d9 not found: ID does not exist" Oct 01 15:21:02 crc kubenswrapper[4869]: I1001 15:21:02.147830 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b61db8eb-aaa5-4c45-be80-3dfe4f0bb400-scripts\") pod \"ceilometer-0\" (UID: \"b61db8eb-aaa5-4c45-be80-3dfe4f0bb400\") " pod="openstack/ceilometer-0" Oct 01 15:21:02 crc kubenswrapper[4869]: I1001 15:21:02.147926 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/b61db8eb-aaa5-4c45-be80-3dfe4f0bb400-log-httpd\") pod \"ceilometer-0\" (UID: \"b61db8eb-aaa5-4c45-be80-3dfe4f0bb400\") " pod="openstack/ceilometer-0" Oct 01 15:21:02 crc kubenswrapper[4869]: I1001 15:21:02.147957 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b61db8eb-aaa5-4c45-be80-3dfe4f0bb400-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"b61db8eb-aaa5-4c45-be80-3dfe4f0bb400\") " pod="openstack/ceilometer-0" Oct 01 15:21:02 crc kubenswrapper[4869]: I1001 15:21:02.148057 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b61db8eb-aaa5-4c45-be80-3dfe4f0bb400-config-data\") pod \"ceilometer-0\" (UID: \"b61db8eb-aaa5-4c45-be80-3dfe4f0bb400\") " pod="openstack/ceilometer-0" Oct 01 15:21:02 crc kubenswrapper[4869]: I1001 15:21:02.148084 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/b61db8eb-aaa5-4c45-be80-3dfe4f0bb400-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"b61db8eb-aaa5-4c45-be80-3dfe4f0bb400\") " pod="openstack/ceilometer-0" Oct 01 15:21:02 crc kubenswrapper[4869]: I1001 15:21:02.148117 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-46zzn\" (UniqueName: \"kubernetes.io/projected/b61db8eb-aaa5-4c45-be80-3dfe4f0bb400-kube-api-access-46zzn\") pod \"ceilometer-0\" (UID: \"b61db8eb-aaa5-4c45-be80-3dfe4f0bb400\") " pod="openstack/ceilometer-0" Oct 01 15:21:02 crc kubenswrapper[4869]: I1001 15:21:02.148145 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/b61db8eb-aaa5-4c45-be80-3dfe4f0bb400-run-httpd\") pod \"ceilometer-0\" (UID: \"b61db8eb-aaa5-4c45-be80-3dfe4f0bb400\") " pod="openstack/ceilometer-0" Oct 01 15:21:02 crc kubenswrapper[4869]: I1001 15:21:02.249838 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/b61db8eb-aaa5-4c45-be80-3dfe4f0bb400-log-httpd\") pod \"ceilometer-0\" (UID: \"b61db8eb-aaa5-4c45-be80-3dfe4f0bb400\") " pod="openstack/ceilometer-0" Oct 01 15:21:02 crc kubenswrapper[4869]: I1001 15:21:02.249905 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b61db8eb-aaa5-4c45-be80-3dfe4f0bb400-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"b61db8eb-aaa5-4c45-be80-3dfe4f0bb400\") " pod="openstack/ceilometer-0" Oct 01 15:21:02 crc kubenswrapper[4869]: I1001 15:21:02.249993 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b61db8eb-aaa5-4c45-be80-3dfe4f0bb400-config-data\") pod \"ceilometer-0\" (UID: \"b61db8eb-aaa5-4c45-be80-3dfe4f0bb400\") " pod="openstack/ceilometer-0" Oct 01 15:21:02 crc kubenswrapper[4869]: I1001 15:21:02.250045 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/b61db8eb-aaa5-4c45-be80-3dfe4f0bb400-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"b61db8eb-aaa5-4c45-be80-3dfe4f0bb400\") " pod="openstack/ceilometer-0" Oct 01 15:21:02 crc kubenswrapper[4869]: I1001 15:21:02.250086 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-46zzn\" (UniqueName: \"kubernetes.io/projected/b61db8eb-aaa5-4c45-be80-3dfe4f0bb400-kube-api-access-46zzn\") pod \"ceilometer-0\" (UID: \"b61db8eb-aaa5-4c45-be80-3dfe4f0bb400\") " pod="openstack/ceilometer-0" Oct 01 15:21:02 crc kubenswrapper[4869]: I1001 15:21:02.250123 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/b61db8eb-aaa5-4c45-be80-3dfe4f0bb400-run-httpd\") pod \"ceilometer-0\" (UID: \"b61db8eb-aaa5-4c45-be80-3dfe4f0bb400\") " pod="openstack/ceilometer-0" Oct 01 15:21:02 crc kubenswrapper[4869]: I1001 15:21:02.250185 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b61db8eb-aaa5-4c45-be80-3dfe4f0bb400-scripts\") pod \"ceilometer-0\" (UID: \"b61db8eb-aaa5-4c45-be80-3dfe4f0bb400\") " pod="openstack/ceilometer-0" Oct 01 15:21:02 crc kubenswrapper[4869]: I1001 15:21:02.250598 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/b61db8eb-aaa5-4c45-be80-3dfe4f0bb400-log-httpd\") pod \"ceilometer-0\" (UID: \"b61db8eb-aaa5-4c45-be80-3dfe4f0bb400\") " pod="openstack/ceilometer-0" Oct 01 15:21:02 crc kubenswrapper[4869]: I1001 15:21:02.250665 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/b61db8eb-aaa5-4c45-be80-3dfe4f0bb400-run-httpd\") pod \"ceilometer-0\" (UID: \"b61db8eb-aaa5-4c45-be80-3dfe4f0bb400\") " pod="openstack/ceilometer-0" Oct 01 15:21:02 crc kubenswrapper[4869]: I1001 15:21:02.255940 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b61db8eb-aaa5-4c45-be80-3dfe4f0bb400-scripts\") pod \"ceilometer-0\" (UID: \"b61db8eb-aaa5-4c45-be80-3dfe4f0bb400\") " pod="openstack/ceilometer-0" Oct 01 15:21:02 crc kubenswrapper[4869]: I1001 15:21:02.256312 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b61db8eb-aaa5-4c45-be80-3dfe4f0bb400-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"b61db8eb-aaa5-4c45-be80-3dfe4f0bb400\") " pod="openstack/ceilometer-0" Oct 01 15:21:02 crc kubenswrapper[4869]: I1001 15:21:02.260676 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/b61db8eb-aaa5-4c45-be80-3dfe4f0bb400-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"b61db8eb-aaa5-4c45-be80-3dfe4f0bb400\") " pod="openstack/ceilometer-0" Oct 01 15:21:02 crc kubenswrapper[4869]: I1001 15:21:02.272365 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b61db8eb-aaa5-4c45-be80-3dfe4f0bb400-config-data\") pod \"ceilometer-0\" (UID: \"b61db8eb-aaa5-4c45-be80-3dfe4f0bb400\") " pod="openstack/ceilometer-0" Oct 01 15:21:02 crc kubenswrapper[4869]: I1001 15:21:02.274523 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-46zzn\" (UniqueName: \"kubernetes.io/projected/b61db8eb-aaa5-4c45-be80-3dfe4f0bb400-kube-api-access-46zzn\") pod \"ceilometer-0\" (UID: \"b61db8eb-aaa5-4c45-be80-3dfe4f0bb400\") " pod="openstack/ceilometer-0" Oct 01 15:21:02 crc kubenswrapper[4869]: I1001 15:21:02.319421 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-db-create-tbsmb" Oct 01 15:21:02 crc kubenswrapper[4869]: I1001 15:21:02.361987 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 01 15:21:02 crc kubenswrapper[4869]: I1001 15:21:02.404175 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-db-create-k5cms" Oct 01 15:21:02 crc kubenswrapper[4869]: I1001 15:21:02.418812 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-db-create-gtw2h" Oct 01 15:21:02 crc kubenswrapper[4869]: I1001 15:21:02.453394 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fvv8v\" (UniqueName: \"kubernetes.io/projected/f7f7278d-fa72-44d6-bf31-2a3ca3d3e417-kube-api-access-fvv8v\") pod \"f7f7278d-fa72-44d6-bf31-2a3ca3d3e417\" (UID: \"f7f7278d-fa72-44d6-bf31-2a3ca3d3e417\") " Oct 01 15:21:02 crc kubenswrapper[4869]: I1001 15:21:02.458005 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f7f7278d-fa72-44d6-bf31-2a3ca3d3e417-kube-api-access-fvv8v" (OuterVolumeSpecName: "kube-api-access-fvv8v") pod "f7f7278d-fa72-44d6-bf31-2a3ca3d3e417" (UID: "f7f7278d-fa72-44d6-bf31-2a3ca3d3e417"). InnerVolumeSpecName "kube-api-access-fvv8v". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 15:21:02 crc kubenswrapper[4869]: I1001 15:21:02.554901 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pczt5\" (UniqueName: \"kubernetes.io/projected/f10e4cd8-ebba-4aec-a029-0fd2536e170d-kube-api-access-pczt5\") pod \"f10e4cd8-ebba-4aec-a029-0fd2536e170d\" (UID: \"f10e4cd8-ebba-4aec-a029-0fd2536e170d\") " Oct 01 15:21:02 crc kubenswrapper[4869]: I1001 15:21:02.554996 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fwrjl\" (UniqueName: \"kubernetes.io/projected/88a0cb67-d211-4d9f-b794-c6d3f2a552b7-kube-api-access-fwrjl\") pod \"88a0cb67-d211-4d9f-b794-c6d3f2a552b7\" (UID: \"88a0cb67-d211-4d9f-b794-c6d3f2a552b7\") " Oct 01 15:21:02 crc kubenswrapper[4869]: I1001 15:21:02.555386 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fvv8v\" (UniqueName: \"kubernetes.io/projected/f7f7278d-fa72-44d6-bf31-2a3ca3d3e417-kube-api-access-fvv8v\") on node \"crc\" DevicePath \"\"" Oct 01 15:21:02 crc kubenswrapper[4869]: I1001 15:21:02.559088 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/88a0cb67-d211-4d9f-b794-c6d3f2a552b7-kube-api-access-fwrjl" (OuterVolumeSpecName: "kube-api-access-fwrjl") pod "88a0cb67-d211-4d9f-b794-c6d3f2a552b7" (UID: "88a0cb67-d211-4d9f-b794-c6d3f2a552b7"). InnerVolumeSpecName "kube-api-access-fwrjl". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 15:21:02 crc kubenswrapper[4869]: I1001 15:21:02.559521 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f10e4cd8-ebba-4aec-a029-0fd2536e170d-kube-api-access-pczt5" (OuterVolumeSpecName: "kube-api-access-pczt5") pod "f10e4cd8-ebba-4aec-a029-0fd2536e170d" (UID: "f10e4cd8-ebba-4aec-a029-0fd2536e170d"). InnerVolumeSpecName "kube-api-access-pczt5". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 15:21:02 crc kubenswrapper[4869]: I1001 15:21:02.661229 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pczt5\" (UniqueName: \"kubernetes.io/projected/f10e4cd8-ebba-4aec-a029-0fd2536e170d-kube-api-access-pczt5\") on node \"crc\" DevicePath \"\"" Oct 01 15:21:02 crc kubenswrapper[4869]: I1001 15:21:02.661319 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fwrjl\" (UniqueName: \"kubernetes.io/projected/88a0cb67-d211-4d9f-b794-c6d3f2a552b7-kube-api-access-fwrjl\") on node \"crc\" DevicePath \"\"" Oct 01 15:21:02 crc kubenswrapper[4869]: I1001 15:21:02.821053 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Oct 01 15:21:02 crc kubenswrapper[4869]: I1001 15:21:02.843133 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Oct 01 15:21:02 crc kubenswrapper[4869]: I1001 15:21:02.969676 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-db-create-k5cms" event={"ID":"f10e4cd8-ebba-4aec-a029-0fd2536e170d","Type":"ContainerDied","Data":"ff817d4b3c52ba69f12f801fbef00838de22ce6a5d034af143204e557368f214"} Oct 01 15:21:02 crc kubenswrapper[4869]: I1001 15:21:02.969708 4869 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="ff817d4b3c52ba69f12f801fbef00838de22ce6a5d034af143204e557368f214" Oct 01 15:21:02 crc kubenswrapper[4869]: I1001 15:21:02.969760 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-db-create-k5cms" Oct 01 15:21:02 crc kubenswrapper[4869]: I1001 15:21:02.973324 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-db-create-gtw2h" Oct 01 15:21:02 crc kubenswrapper[4869]: I1001 15:21:02.973309 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-db-create-gtw2h" event={"ID":"88a0cb67-d211-4d9f-b794-c6d3f2a552b7","Type":"ContainerDied","Data":"4256e6732370495ea60a6291f14b890b4c7d1018271285db9856120f0845a90e"} Oct 01 15:21:02 crc kubenswrapper[4869]: I1001 15:21:02.973462 4869 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="4256e6732370495ea60a6291f14b890b4c7d1018271285db9856120f0845a90e" Oct 01 15:21:02 crc kubenswrapper[4869]: I1001 15:21:02.974940 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"b61db8eb-aaa5-4c45-be80-3dfe4f0bb400","Type":"ContainerStarted","Data":"939e7a27b994b276433525ec99cb019205395935d00a79b0018e1987e0454200"} Oct 01 15:21:02 crc kubenswrapper[4869]: I1001 15:21:02.976387 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-db-create-tbsmb" event={"ID":"f7f7278d-fa72-44d6-bf31-2a3ca3d3e417","Type":"ContainerDied","Data":"97cc334a14760a15cbc0c4378148e0ffd5fc10ad0b116845debab340d3982c10"} Oct 01 15:21:02 crc kubenswrapper[4869]: I1001 15:21:02.976415 4869 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="97cc334a14760a15cbc0c4378148e0ffd5fc10ad0b116845debab340d3982c10" Oct 01 15:21:02 crc kubenswrapper[4869]: I1001 15:21:02.976429 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-db-create-tbsmb" Oct 01 15:21:03 crc kubenswrapper[4869]: I1001 15:21:03.595358 4869 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="cc0dd8d7-8230-4c07-87b1-6f4fc5e2e631" path="/var/lib/kubelet/pods/cc0dd8d7-8230-4c07-87b1-6f4fc5e2e631/volumes" Oct 01 15:21:03 crc kubenswrapper[4869]: I1001 15:21:03.992480 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"b61db8eb-aaa5-4c45-be80-3dfe4f0bb400","Type":"ContainerStarted","Data":"4acb6afc8d2a9020861ab74506a0e83cf0cd0f1434ce3b95efd7935d07636d30"} Oct 01 15:21:05 crc kubenswrapper[4869]: I1001 15:21:05.004463 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"b61db8eb-aaa5-4c45-be80-3dfe4f0bb400","Type":"ContainerStarted","Data":"de7f0ec71b128cb279d2720132058684ed96f82e4205a77482b9d17464293023"} Oct 01 15:21:05 crc kubenswrapper[4869]: I1001 15:21:05.005096 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"b61db8eb-aaa5-4c45-be80-3dfe4f0bb400","Type":"ContainerStarted","Data":"a39bc17b3a138d037d4e3d8be2c869793101d1c95ddd4eaa83bd6adae35149ea"} Oct 01 15:21:08 crc kubenswrapper[4869]: I1001 15:21:08.324891 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/neutron-6c8b49859b-vzzr7" Oct 01 15:21:08 crc kubenswrapper[4869]: I1001 15:21:08.636099 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-44c5-account-create-8zw48"] Oct 01 15:21:08 crc kubenswrapper[4869]: E1001 15:21:08.636533 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f7f7278d-fa72-44d6-bf31-2a3ca3d3e417" containerName="mariadb-database-create" Oct 01 15:21:08 crc kubenswrapper[4869]: I1001 15:21:08.636554 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="f7f7278d-fa72-44d6-bf31-2a3ca3d3e417" containerName="mariadb-database-create" Oct 01 15:21:08 crc kubenswrapper[4869]: E1001 15:21:08.636604 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="88a0cb67-d211-4d9f-b794-c6d3f2a552b7" containerName="mariadb-database-create" Oct 01 15:21:08 crc kubenswrapper[4869]: I1001 15:21:08.636614 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="88a0cb67-d211-4d9f-b794-c6d3f2a552b7" containerName="mariadb-database-create" Oct 01 15:21:08 crc kubenswrapper[4869]: E1001 15:21:08.636627 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f10e4cd8-ebba-4aec-a029-0fd2536e170d" containerName="mariadb-database-create" Oct 01 15:21:08 crc kubenswrapper[4869]: I1001 15:21:08.636635 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="f10e4cd8-ebba-4aec-a029-0fd2536e170d" containerName="mariadb-database-create" Oct 01 15:21:08 crc kubenswrapper[4869]: I1001 15:21:08.636824 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="88a0cb67-d211-4d9f-b794-c6d3f2a552b7" containerName="mariadb-database-create" Oct 01 15:21:08 crc kubenswrapper[4869]: I1001 15:21:08.636848 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="f10e4cd8-ebba-4aec-a029-0fd2536e170d" containerName="mariadb-database-create" Oct 01 15:21:08 crc kubenswrapper[4869]: I1001 15:21:08.636865 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="f7f7278d-fa72-44d6-bf31-2a3ca3d3e417" containerName="mariadb-database-create" Oct 01 15:21:08 crc kubenswrapper[4869]: I1001 15:21:08.637582 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-44c5-account-create-8zw48" Oct 01 15:21:08 crc kubenswrapper[4869]: I1001 15:21:08.639670 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-db-secret" Oct 01 15:21:08 crc kubenswrapper[4869]: I1001 15:21:08.645610 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-44c5-account-create-8zw48"] Oct 01 15:21:08 crc kubenswrapper[4869]: I1001 15:21:08.764652 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ckr29\" (UniqueName: \"kubernetes.io/projected/f88907af-39e7-4499-9745-1fe0a8c42774-kube-api-access-ckr29\") pod \"nova-api-44c5-account-create-8zw48\" (UID: \"f88907af-39e7-4499-9745-1fe0a8c42774\") " pod="openstack/nova-api-44c5-account-create-8zw48" Oct 01 15:21:08 crc kubenswrapper[4869]: I1001 15:21:08.820109 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-20ce-account-create-tgz6c"] Oct 01 15:21:08 crc kubenswrapper[4869]: I1001 15:21:08.821729 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-20ce-account-create-tgz6c" Oct 01 15:21:08 crc kubenswrapper[4869]: I1001 15:21:08.830391 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-db-secret" Oct 01 15:21:08 crc kubenswrapper[4869]: I1001 15:21:08.831886 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-20ce-account-create-tgz6c"] Oct 01 15:21:08 crc kubenswrapper[4869]: I1001 15:21:08.866750 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ckr29\" (UniqueName: \"kubernetes.io/projected/f88907af-39e7-4499-9745-1fe0a8c42774-kube-api-access-ckr29\") pod \"nova-api-44c5-account-create-8zw48\" (UID: \"f88907af-39e7-4499-9745-1fe0a8c42774\") " pod="openstack/nova-api-44c5-account-create-8zw48" Oct 01 15:21:08 crc kubenswrapper[4869]: I1001 15:21:08.886034 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ckr29\" (UniqueName: \"kubernetes.io/projected/f88907af-39e7-4499-9745-1fe0a8c42774-kube-api-access-ckr29\") pod \"nova-api-44c5-account-create-8zw48\" (UID: \"f88907af-39e7-4499-9745-1fe0a8c42774\") " pod="openstack/nova-api-44c5-account-create-8zw48" Oct 01 15:21:08 crc kubenswrapper[4869]: I1001 15:21:08.953192 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-44c5-account-create-8zw48" Oct 01 15:21:08 crc kubenswrapper[4869]: I1001 15:21:08.968482 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-b7fcs\" (UniqueName: \"kubernetes.io/projected/709b2b83-2eba-4778-beda-8e312ef3a6d9-kube-api-access-b7fcs\") pod \"nova-cell0-20ce-account-create-tgz6c\" (UID: \"709b2b83-2eba-4778-beda-8e312ef3a6d9\") " pod="openstack/nova-cell0-20ce-account-create-tgz6c" Oct 01 15:21:09 crc kubenswrapper[4869]: I1001 15:21:09.032699 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-3a05-account-create-q2kxl"] Oct 01 15:21:09 crc kubenswrapper[4869]: I1001 15:21:09.033953 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-3a05-account-create-q2kxl" Oct 01 15:21:09 crc kubenswrapper[4869]: I1001 15:21:09.040592 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-db-secret" Oct 01 15:21:09 crc kubenswrapper[4869]: I1001 15:21:09.041407 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-3a05-account-create-q2kxl"] Oct 01 15:21:09 crc kubenswrapper[4869]: I1001 15:21:09.053531 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"b61db8eb-aaa5-4c45-be80-3dfe4f0bb400","Type":"ContainerStarted","Data":"07fb2ccf54a8593c2defebf0b186423b15cfe2eb2fbd21b0f024ea6775c9f66a"} Oct 01 15:21:09 crc kubenswrapper[4869]: I1001 15:21:09.053754 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Oct 01 15:21:09 crc kubenswrapper[4869]: I1001 15:21:09.053825 4869 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="b61db8eb-aaa5-4c45-be80-3dfe4f0bb400" containerName="sg-core" containerID="cri-o://de7f0ec71b128cb279d2720132058684ed96f82e4205a77482b9d17464293023" gracePeriod=30 Oct 01 15:21:09 crc kubenswrapper[4869]: I1001 15:21:09.053854 4869 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="b61db8eb-aaa5-4c45-be80-3dfe4f0bb400" containerName="proxy-httpd" containerID="cri-o://07fb2ccf54a8593c2defebf0b186423b15cfe2eb2fbd21b0f024ea6775c9f66a" gracePeriod=30 Oct 01 15:21:09 crc kubenswrapper[4869]: I1001 15:21:09.053874 4869 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="b61db8eb-aaa5-4c45-be80-3dfe4f0bb400" containerName="ceilometer-notification-agent" containerID="cri-o://a39bc17b3a138d037d4e3d8be2c869793101d1c95ddd4eaa83bd6adae35149ea" gracePeriod=30 Oct 01 15:21:09 crc kubenswrapper[4869]: I1001 15:21:09.053681 4869 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="b61db8eb-aaa5-4c45-be80-3dfe4f0bb400" containerName="ceilometer-central-agent" containerID="cri-o://4acb6afc8d2a9020861ab74506a0e83cf0cd0f1434ce3b95efd7935d07636d30" gracePeriod=30 Oct 01 15:21:09 crc kubenswrapper[4869]: I1001 15:21:09.069976 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-b7fcs\" (UniqueName: \"kubernetes.io/projected/709b2b83-2eba-4778-beda-8e312ef3a6d9-kube-api-access-b7fcs\") pod \"nova-cell0-20ce-account-create-tgz6c\" (UID: \"709b2b83-2eba-4778-beda-8e312ef3a6d9\") " pod="openstack/nova-cell0-20ce-account-create-tgz6c" Oct 01 15:21:09 crc kubenswrapper[4869]: I1001 15:21:09.093394 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-b7fcs\" (UniqueName: \"kubernetes.io/projected/709b2b83-2eba-4778-beda-8e312ef3a6d9-kube-api-access-b7fcs\") pod \"nova-cell0-20ce-account-create-tgz6c\" (UID: \"709b2b83-2eba-4778-beda-8e312ef3a6d9\") " pod="openstack/nova-cell0-20ce-account-create-tgz6c" Oct 01 15:21:09 crc kubenswrapper[4869]: I1001 15:21:09.100545 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=2.131432269 podStartE2EDuration="7.100521762s" podCreationTimestamp="2025-10-01 15:21:02 +0000 UTC" firstStartedPulling="2025-10-01 15:21:02.828964652 +0000 UTC m=+971.975807778" lastFinishedPulling="2025-10-01 15:21:07.798054155 +0000 UTC m=+976.944897271" observedRunningTime="2025-10-01 15:21:09.082310552 +0000 UTC m=+978.229153668" watchObservedRunningTime="2025-10-01 15:21:09.100521762 +0000 UTC m=+978.247364888" Oct 01 15:21:09 crc kubenswrapper[4869]: I1001 15:21:09.143360 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-20ce-account-create-tgz6c" Oct 01 15:21:09 crc kubenswrapper[4869]: I1001 15:21:09.173020 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hz2d9\" (UniqueName: \"kubernetes.io/projected/0ce74364-60ae-42f0-9151-265fe3a38e1a-kube-api-access-hz2d9\") pod \"nova-cell1-3a05-account-create-q2kxl\" (UID: \"0ce74364-60ae-42f0-9151-265fe3a38e1a\") " pod="openstack/nova-cell1-3a05-account-create-q2kxl" Oct 01 15:21:09 crc kubenswrapper[4869]: I1001 15:21:09.212159 4869 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/horizon-5766b74c9d-wpxpf" podUID="011bdc54-d9ef-4ae9-a16b-5eaf4e97b2cf" containerName="horizon" probeResult="failure" output="Get \"https://10.217.0.140:8443/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.0.140:8443: connect: connection refused" Oct 01 15:21:09 crc kubenswrapper[4869]: I1001 15:21:09.212483 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/horizon-5766b74c9d-wpxpf" Oct 01 15:21:09 crc kubenswrapper[4869]: I1001 15:21:09.275647 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hz2d9\" (UniqueName: \"kubernetes.io/projected/0ce74364-60ae-42f0-9151-265fe3a38e1a-kube-api-access-hz2d9\") pod \"nova-cell1-3a05-account-create-q2kxl\" (UID: \"0ce74364-60ae-42f0-9151-265fe3a38e1a\") " pod="openstack/nova-cell1-3a05-account-create-q2kxl" Oct 01 15:21:09 crc kubenswrapper[4869]: I1001 15:21:09.297181 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hz2d9\" (UniqueName: \"kubernetes.io/projected/0ce74364-60ae-42f0-9151-265fe3a38e1a-kube-api-access-hz2d9\") pod \"nova-cell1-3a05-account-create-q2kxl\" (UID: \"0ce74364-60ae-42f0-9151-265fe3a38e1a\") " pod="openstack/nova-cell1-3a05-account-create-q2kxl" Oct 01 15:21:09 crc kubenswrapper[4869]: I1001 15:21:09.386072 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-3a05-account-create-q2kxl" Oct 01 15:21:09 crc kubenswrapper[4869]: I1001 15:21:09.431429 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-44c5-account-create-8zw48"] Oct 01 15:21:09 crc kubenswrapper[4869]: I1001 15:21:09.618150 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-20ce-account-create-tgz6c"] Oct 01 15:21:09 crc kubenswrapper[4869]: I1001 15:21:09.855620 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 01 15:21:09 crc kubenswrapper[4869]: I1001 15:21:09.856058 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-3a05-account-create-q2kxl"] Oct 01 15:21:09 crc kubenswrapper[4869]: W1001 15:21:09.859152 4869 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod0ce74364_60ae_42f0_9151_265fe3a38e1a.slice/crio-46d848047f023b85a7b94d2ebac3db25a1ea10aa17496905888ca0f91ce7ba0d WatchSource:0}: Error finding container 46d848047f023b85a7b94d2ebac3db25a1ea10aa17496905888ca0f91ce7ba0d: Status 404 returned error can't find the container with id 46d848047f023b85a7b94d2ebac3db25a1ea10aa17496905888ca0f91ce7ba0d Oct 01 15:21:09 crc kubenswrapper[4869]: I1001 15:21:09.991829 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/b61db8eb-aaa5-4c45-be80-3dfe4f0bb400-sg-core-conf-yaml\") pod \"b61db8eb-aaa5-4c45-be80-3dfe4f0bb400\" (UID: \"b61db8eb-aaa5-4c45-be80-3dfe4f0bb400\") " Oct 01 15:21:09 crc kubenswrapper[4869]: I1001 15:21:09.991915 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b61db8eb-aaa5-4c45-be80-3dfe4f0bb400-config-data\") pod \"b61db8eb-aaa5-4c45-be80-3dfe4f0bb400\" (UID: \"b61db8eb-aaa5-4c45-be80-3dfe4f0bb400\") " Oct 01 15:21:09 crc kubenswrapper[4869]: I1001 15:21:09.991944 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b61db8eb-aaa5-4c45-be80-3dfe4f0bb400-scripts\") pod \"b61db8eb-aaa5-4c45-be80-3dfe4f0bb400\" (UID: \"b61db8eb-aaa5-4c45-be80-3dfe4f0bb400\") " Oct 01 15:21:09 crc kubenswrapper[4869]: I1001 15:21:09.991979 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-46zzn\" (UniqueName: \"kubernetes.io/projected/b61db8eb-aaa5-4c45-be80-3dfe4f0bb400-kube-api-access-46zzn\") pod \"b61db8eb-aaa5-4c45-be80-3dfe4f0bb400\" (UID: \"b61db8eb-aaa5-4c45-be80-3dfe4f0bb400\") " Oct 01 15:21:09 crc kubenswrapper[4869]: I1001 15:21:09.992025 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b61db8eb-aaa5-4c45-be80-3dfe4f0bb400-combined-ca-bundle\") pod \"b61db8eb-aaa5-4c45-be80-3dfe4f0bb400\" (UID: \"b61db8eb-aaa5-4c45-be80-3dfe4f0bb400\") " Oct 01 15:21:09 crc kubenswrapper[4869]: I1001 15:21:09.992123 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/b61db8eb-aaa5-4c45-be80-3dfe4f0bb400-log-httpd\") pod \"b61db8eb-aaa5-4c45-be80-3dfe4f0bb400\" (UID: \"b61db8eb-aaa5-4c45-be80-3dfe4f0bb400\") " Oct 01 15:21:09 crc kubenswrapper[4869]: I1001 15:21:09.992146 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/b61db8eb-aaa5-4c45-be80-3dfe4f0bb400-run-httpd\") pod \"b61db8eb-aaa5-4c45-be80-3dfe4f0bb400\" (UID: \"b61db8eb-aaa5-4c45-be80-3dfe4f0bb400\") " Oct 01 15:21:09 crc kubenswrapper[4869]: I1001 15:21:09.992621 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b61db8eb-aaa5-4c45-be80-3dfe4f0bb400-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "b61db8eb-aaa5-4c45-be80-3dfe4f0bb400" (UID: "b61db8eb-aaa5-4c45-be80-3dfe4f0bb400"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 15:21:09 crc kubenswrapper[4869]: I1001 15:21:09.992656 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b61db8eb-aaa5-4c45-be80-3dfe4f0bb400-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "b61db8eb-aaa5-4c45-be80-3dfe4f0bb400" (UID: "b61db8eb-aaa5-4c45-be80-3dfe4f0bb400"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 15:21:09 crc kubenswrapper[4869]: I1001 15:21:09.996199 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b61db8eb-aaa5-4c45-be80-3dfe4f0bb400-kube-api-access-46zzn" (OuterVolumeSpecName: "kube-api-access-46zzn") pod "b61db8eb-aaa5-4c45-be80-3dfe4f0bb400" (UID: "b61db8eb-aaa5-4c45-be80-3dfe4f0bb400"). InnerVolumeSpecName "kube-api-access-46zzn". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 15:21:10 crc kubenswrapper[4869]: I1001 15:21:10.018860 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b61db8eb-aaa5-4c45-be80-3dfe4f0bb400-scripts" (OuterVolumeSpecName: "scripts") pod "b61db8eb-aaa5-4c45-be80-3dfe4f0bb400" (UID: "b61db8eb-aaa5-4c45-be80-3dfe4f0bb400"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 15:21:10 crc kubenswrapper[4869]: I1001 15:21:10.025684 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b61db8eb-aaa5-4c45-be80-3dfe4f0bb400-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "b61db8eb-aaa5-4c45-be80-3dfe4f0bb400" (UID: "b61db8eb-aaa5-4c45-be80-3dfe4f0bb400"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 15:21:10 crc kubenswrapper[4869]: I1001 15:21:10.063675 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b61db8eb-aaa5-4c45-be80-3dfe4f0bb400-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "b61db8eb-aaa5-4c45-be80-3dfe4f0bb400" (UID: "b61db8eb-aaa5-4c45-be80-3dfe4f0bb400"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 15:21:10 crc kubenswrapper[4869]: I1001 15:21:10.069791 4869 generic.go:334] "Generic (PLEG): container finished" podID="709b2b83-2eba-4778-beda-8e312ef3a6d9" containerID="8d120ffac4c62007201ef1382a40498f8c7ad1994d4c95c61f10b5565efd2225" exitCode=0 Oct 01 15:21:10 crc kubenswrapper[4869]: I1001 15:21:10.069884 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-20ce-account-create-tgz6c" event={"ID":"709b2b83-2eba-4778-beda-8e312ef3a6d9","Type":"ContainerDied","Data":"8d120ffac4c62007201ef1382a40498f8c7ad1994d4c95c61f10b5565efd2225"} Oct 01 15:21:10 crc kubenswrapper[4869]: I1001 15:21:10.069915 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-20ce-account-create-tgz6c" event={"ID":"709b2b83-2eba-4778-beda-8e312ef3a6d9","Type":"ContainerStarted","Data":"91a560112cd1763da8c005be7c2a920b31bb72231a64c7d86f15e0ee32a70beb"} Oct 01 15:21:10 crc kubenswrapper[4869]: I1001 15:21:10.073991 4869 generic.go:334] "Generic (PLEG): container finished" podID="b61db8eb-aaa5-4c45-be80-3dfe4f0bb400" containerID="07fb2ccf54a8593c2defebf0b186423b15cfe2eb2fbd21b0f024ea6775c9f66a" exitCode=0 Oct 01 15:21:10 crc kubenswrapper[4869]: I1001 15:21:10.074025 4869 generic.go:334] "Generic (PLEG): container finished" podID="b61db8eb-aaa5-4c45-be80-3dfe4f0bb400" containerID="de7f0ec71b128cb279d2720132058684ed96f82e4205a77482b9d17464293023" exitCode=2 Oct 01 15:21:10 crc kubenswrapper[4869]: I1001 15:21:10.074037 4869 generic.go:334] "Generic (PLEG): container finished" podID="b61db8eb-aaa5-4c45-be80-3dfe4f0bb400" containerID="a39bc17b3a138d037d4e3d8be2c869793101d1c95ddd4eaa83bd6adae35149ea" exitCode=0 Oct 01 15:21:10 crc kubenswrapper[4869]: I1001 15:21:10.074046 4869 generic.go:334] "Generic (PLEG): container finished" podID="b61db8eb-aaa5-4c45-be80-3dfe4f0bb400" containerID="4acb6afc8d2a9020861ab74506a0e83cf0cd0f1434ce3b95efd7935d07636d30" exitCode=0 Oct 01 15:21:10 crc kubenswrapper[4869]: I1001 15:21:10.074056 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 01 15:21:10 crc kubenswrapper[4869]: I1001 15:21:10.074098 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"b61db8eb-aaa5-4c45-be80-3dfe4f0bb400","Type":"ContainerDied","Data":"07fb2ccf54a8593c2defebf0b186423b15cfe2eb2fbd21b0f024ea6775c9f66a"} Oct 01 15:21:10 crc kubenswrapper[4869]: I1001 15:21:10.074124 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"b61db8eb-aaa5-4c45-be80-3dfe4f0bb400","Type":"ContainerDied","Data":"de7f0ec71b128cb279d2720132058684ed96f82e4205a77482b9d17464293023"} Oct 01 15:21:10 crc kubenswrapper[4869]: I1001 15:21:10.074137 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"b61db8eb-aaa5-4c45-be80-3dfe4f0bb400","Type":"ContainerDied","Data":"a39bc17b3a138d037d4e3d8be2c869793101d1c95ddd4eaa83bd6adae35149ea"} Oct 01 15:21:10 crc kubenswrapper[4869]: I1001 15:21:10.074149 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"b61db8eb-aaa5-4c45-be80-3dfe4f0bb400","Type":"ContainerDied","Data":"4acb6afc8d2a9020861ab74506a0e83cf0cd0f1434ce3b95efd7935d07636d30"} Oct 01 15:21:10 crc kubenswrapper[4869]: I1001 15:21:10.074160 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"b61db8eb-aaa5-4c45-be80-3dfe4f0bb400","Type":"ContainerDied","Data":"939e7a27b994b276433525ec99cb019205395935d00a79b0018e1987e0454200"} Oct 01 15:21:10 crc kubenswrapper[4869]: I1001 15:21:10.074177 4869 scope.go:117] "RemoveContainer" containerID="07fb2ccf54a8593c2defebf0b186423b15cfe2eb2fbd21b0f024ea6775c9f66a" Oct 01 15:21:10 crc kubenswrapper[4869]: I1001 15:21:10.076420 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-3a05-account-create-q2kxl" event={"ID":"0ce74364-60ae-42f0-9151-265fe3a38e1a","Type":"ContainerStarted","Data":"46d848047f023b85a7b94d2ebac3db25a1ea10aa17496905888ca0f91ce7ba0d"} Oct 01 15:21:10 crc kubenswrapper[4869]: I1001 15:21:10.088574 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b61db8eb-aaa5-4c45-be80-3dfe4f0bb400-config-data" (OuterVolumeSpecName: "config-data") pod "b61db8eb-aaa5-4c45-be80-3dfe4f0bb400" (UID: "b61db8eb-aaa5-4c45-be80-3dfe4f0bb400"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 15:21:10 crc kubenswrapper[4869]: I1001 15:21:10.088821 4869 generic.go:334] "Generic (PLEG): container finished" podID="f88907af-39e7-4499-9745-1fe0a8c42774" containerID="7716692b47a29a7f4208d74481f25906b16eabd51acdb9c242e3824d8349e096" exitCode=0 Oct 01 15:21:10 crc kubenswrapper[4869]: I1001 15:21:10.088861 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-44c5-account-create-8zw48" event={"ID":"f88907af-39e7-4499-9745-1fe0a8c42774","Type":"ContainerDied","Data":"7716692b47a29a7f4208d74481f25906b16eabd51acdb9c242e3824d8349e096"} Oct 01 15:21:10 crc kubenswrapper[4869]: I1001 15:21:10.088890 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-44c5-account-create-8zw48" event={"ID":"f88907af-39e7-4499-9745-1fe0a8c42774","Type":"ContainerStarted","Data":"707c3ecb75770062d34e7cc40259101e1d7da524bf7a1b0c67753c1bd1b36c77"} Oct 01 15:21:10 crc kubenswrapper[4869]: I1001 15:21:10.093608 4869 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b61db8eb-aaa5-4c45-be80-3dfe4f0bb400-config-data\") on node \"crc\" DevicePath \"\"" Oct 01 15:21:10 crc kubenswrapper[4869]: I1001 15:21:10.093642 4869 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b61db8eb-aaa5-4c45-be80-3dfe4f0bb400-scripts\") on node \"crc\" DevicePath \"\"" Oct 01 15:21:10 crc kubenswrapper[4869]: I1001 15:21:10.093652 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-46zzn\" (UniqueName: \"kubernetes.io/projected/b61db8eb-aaa5-4c45-be80-3dfe4f0bb400-kube-api-access-46zzn\") on node \"crc\" DevicePath \"\"" Oct 01 15:21:10 crc kubenswrapper[4869]: I1001 15:21:10.093661 4869 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b61db8eb-aaa5-4c45-be80-3dfe4f0bb400-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 01 15:21:10 crc kubenswrapper[4869]: I1001 15:21:10.093671 4869 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/b61db8eb-aaa5-4c45-be80-3dfe4f0bb400-log-httpd\") on node \"crc\" DevicePath \"\"" Oct 01 15:21:10 crc kubenswrapper[4869]: I1001 15:21:10.093678 4869 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/b61db8eb-aaa5-4c45-be80-3dfe4f0bb400-run-httpd\") on node \"crc\" DevicePath \"\"" Oct 01 15:21:10 crc kubenswrapper[4869]: I1001 15:21:10.093686 4869 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/b61db8eb-aaa5-4c45-be80-3dfe4f0bb400-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Oct 01 15:21:10 crc kubenswrapper[4869]: I1001 15:21:10.162189 4869 scope.go:117] "RemoveContainer" containerID="de7f0ec71b128cb279d2720132058684ed96f82e4205a77482b9d17464293023" Oct 01 15:21:10 crc kubenswrapper[4869]: I1001 15:21:10.179861 4869 scope.go:117] "RemoveContainer" containerID="a39bc17b3a138d037d4e3d8be2c869793101d1c95ddd4eaa83bd6adae35149ea" Oct 01 15:21:10 crc kubenswrapper[4869]: I1001 15:21:10.197599 4869 scope.go:117] "RemoveContainer" containerID="4acb6afc8d2a9020861ab74506a0e83cf0cd0f1434ce3b95efd7935d07636d30" Oct 01 15:21:10 crc kubenswrapper[4869]: I1001 15:21:10.216001 4869 scope.go:117] "RemoveContainer" containerID="07fb2ccf54a8593c2defebf0b186423b15cfe2eb2fbd21b0f024ea6775c9f66a" Oct 01 15:21:10 crc kubenswrapper[4869]: E1001 15:21:10.216402 4869 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"07fb2ccf54a8593c2defebf0b186423b15cfe2eb2fbd21b0f024ea6775c9f66a\": container with ID starting with 07fb2ccf54a8593c2defebf0b186423b15cfe2eb2fbd21b0f024ea6775c9f66a not found: ID does not exist" containerID="07fb2ccf54a8593c2defebf0b186423b15cfe2eb2fbd21b0f024ea6775c9f66a" Oct 01 15:21:10 crc kubenswrapper[4869]: I1001 15:21:10.216447 4869 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"07fb2ccf54a8593c2defebf0b186423b15cfe2eb2fbd21b0f024ea6775c9f66a"} err="failed to get container status \"07fb2ccf54a8593c2defebf0b186423b15cfe2eb2fbd21b0f024ea6775c9f66a\": rpc error: code = NotFound desc = could not find container \"07fb2ccf54a8593c2defebf0b186423b15cfe2eb2fbd21b0f024ea6775c9f66a\": container with ID starting with 07fb2ccf54a8593c2defebf0b186423b15cfe2eb2fbd21b0f024ea6775c9f66a not found: ID does not exist" Oct 01 15:21:10 crc kubenswrapper[4869]: I1001 15:21:10.216473 4869 scope.go:117] "RemoveContainer" containerID="de7f0ec71b128cb279d2720132058684ed96f82e4205a77482b9d17464293023" Oct 01 15:21:10 crc kubenswrapper[4869]: E1001 15:21:10.217110 4869 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"de7f0ec71b128cb279d2720132058684ed96f82e4205a77482b9d17464293023\": container with ID starting with de7f0ec71b128cb279d2720132058684ed96f82e4205a77482b9d17464293023 not found: ID does not exist" containerID="de7f0ec71b128cb279d2720132058684ed96f82e4205a77482b9d17464293023" Oct 01 15:21:10 crc kubenswrapper[4869]: I1001 15:21:10.217139 4869 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"de7f0ec71b128cb279d2720132058684ed96f82e4205a77482b9d17464293023"} err="failed to get container status \"de7f0ec71b128cb279d2720132058684ed96f82e4205a77482b9d17464293023\": rpc error: code = NotFound desc = could not find container \"de7f0ec71b128cb279d2720132058684ed96f82e4205a77482b9d17464293023\": container with ID starting with de7f0ec71b128cb279d2720132058684ed96f82e4205a77482b9d17464293023 not found: ID does not exist" Oct 01 15:21:10 crc kubenswrapper[4869]: I1001 15:21:10.217155 4869 scope.go:117] "RemoveContainer" containerID="a39bc17b3a138d037d4e3d8be2c869793101d1c95ddd4eaa83bd6adae35149ea" Oct 01 15:21:10 crc kubenswrapper[4869]: E1001 15:21:10.217482 4869 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a39bc17b3a138d037d4e3d8be2c869793101d1c95ddd4eaa83bd6adae35149ea\": container with ID starting with a39bc17b3a138d037d4e3d8be2c869793101d1c95ddd4eaa83bd6adae35149ea not found: ID does not exist" containerID="a39bc17b3a138d037d4e3d8be2c869793101d1c95ddd4eaa83bd6adae35149ea" Oct 01 15:21:10 crc kubenswrapper[4869]: I1001 15:21:10.217520 4869 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a39bc17b3a138d037d4e3d8be2c869793101d1c95ddd4eaa83bd6adae35149ea"} err="failed to get container status \"a39bc17b3a138d037d4e3d8be2c869793101d1c95ddd4eaa83bd6adae35149ea\": rpc error: code = NotFound desc = could not find container \"a39bc17b3a138d037d4e3d8be2c869793101d1c95ddd4eaa83bd6adae35149ea\": container with ID starting with a39bc17b3a138d037d4e3d8be2c869793101d1c95ddd4eaa83bd6adae35149ea not found: ID does not exist" Oct 01 15:21:10 crc kubenswrapper[4869]: I1001 15:21:10.217548 4869 scope.go:117] "RemoveContainer" containerID="4acb6afc8d2a9020861ab74506a0e83cf0cd0f1434ce3b95efd7935d07636d30" Oct 01 15:21:10 crc kubenswrapper[4869]: E1001 15:21:10.217832 4869 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4acb6afc8d2a9020861ab74506a0e83cf0cd0f1434ce3b95efd7935d07636d30\": container with ID starting with 4acb6afc8d2a9020861ab74506a0e83cf0cd0f1434ce3b95efd7935d07636d30 not found: ID does not exist" containerID="4acb6afc8d2a9020861ab74506a0e83cf0cd0f1434ce3b95efd7935d07636d30" Oct 01 15:21:10 crc kubenswrapper[4869]: I1001 15:21:10.217864 4869 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4acb6afc8d2a9020861ab74506a0e83cf0cd0f1434ce3b95efd7935d07636d30"} err="failed to get container status \"4acb6afc8d2a9020861ab74506a0e83cf0cd0f1434ce3b95efd7935d07636d30\": rpc error: code = NotFound desc = could not find container \"4acb6afc8d2a9020861ab74506a0e83cf0cd0f1434ce3b95efd7935d07636d30\": container with ID starting with 4acb6afc8d2a9020861ab74506a0e83cf0cd0f1434ce3b95efd7935d07636d30 not found: ID does not exist" Oct 01 15:21:10 crc kubenswrapper[4869]: I1001 15:21:10.217883 4869 scope.go:117] "RemoveContainer" containerID="07fb2ccf54a8593c2defebf0b186423b15cfe2eb2fbd21b0f024ea6775c9f66a" Oct 01 15:21:10 crc kubenswrapper[4869]: I1001 15:21:10.218097 4869 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"07fb2ccf54a8593c2defebf0b186423b15cfe2eb2fbd21b0f024ea6775c9f66a"} err="failed to get container status \"07fb2ccf54a8593c2defebf0b186423b15cfe2eb2fbd21b0f024ea6775c9f66a\": rpc error: code = NotFound desc = could not find container \"07fb2ccf54a8593c2defebf0b186423b15cfe2eb2fbd21b0f024ea6775c9f66a\": container with ID starting with 07fb2ccf54a8593c2defebf0b186423b15cfe2eb2fbd21b0f024ea6775c9f66a not found: ID does not exist" Oct 01 15:21:10 crc kubenswrapper[4869]: I1001 15:21:10.218118 4869 scope.go:117] "RemoveContainer" containerID="de7f0ec71b128cb279d2720132058684ed96f82e4205a77482b9d17464293023" Oct 01 15:21:10 crc kubenswrapper[4869]: I1001 15:21:10.218321 4869 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"de7f0ec71b128cb279d2720132058684ed96f82e4205a77482b9d17464293023"} err="failed to get container status \"de7f0ec71b128cb279d2720132058684ed96f82e4205a77482b9d17464293023\": rpc error: code = NotFound desc = could not find container \"de7f0ec71b128cb279d2720132058684ed96f82e4205a77482b9d17464293023\": container with ID starting with de7f0ec71b128cb279d2720132058684ed96f82e4205a77482b9d17464293023 not found: ID does not exist" Oct 01 15:21:10 crc kubenswrapper[4869]: I1001 15:21:10.218343 4869 scope.go:117] "RemoveContainer" containerID="a39bc17b3a138d037d4e3d8be2c869793101d1c95ddd4eaa83bd6adae35149ea" Oct 01 15:21:10 crc kubenswrapper[4869]: I1001 15:21:10.218551 4869 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a39bc17b3a138d037d4e3d8be2c869793101d1c95ddd4eaa83bd6adae35149ea"} err="failed to get container status \"a39bc17b3a138d037d4e3d8be2c869793101d1c95ddd4eaa83bd6adae35149ea\": rpc error: code = NotFound desc = could not find container \"a39bc17b3a138d037d4e3d8be2c869793101d1c95ddd4eaa83bd6adae35149ea\": container with ID starting with a39bc17b3a138d037d4e3d8be2c869793101d1c95ddd4eaa83bd6adae35149ea not found: ID does not exist" Oct 01 15:21:10 crc kubenswrapper[4869]: I1001 15:21:10.218575 4869 scope.go:117] "RemoveContainer" containerID="4acb6afc8d2a9020861ab74506a0e83cf0cd0f1434ce3b95efd7935d07636d30" Oct 01 15:21:10 crc kubenswrapper[4869]: I1001 15:21:10.218752 4869 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4acb6afc8d2a9020861ab74506a0e83cf0cd0f1434ce3b95efd7935d07636d30"} err="failed to get container status \"4acb6afc8d2a9020861ab74506a0e83cf0cd0f1434ce3b95efd7935d07636d30\": rpc error: code = NotFound desc = could not find container \"4acb6afc8d2a9020861ab74506a0e83cf0cd0f1434ce3b95efd7935d07636d30\": container with ID starting with 4acb6afc8d2a9020861ab74506a0e83cf0cd0f1434ce3b95efd7935d07636d30 not found: ID does not exist" Oct 01 15:21:10 crc kubenswrapper[4869]: I1001 15:21:10.218776 4869 scope.go:117] "RemoveContainer" containerID="07fb2ccf54a8593c2defebf0b186423b15cfe2eb2fbd21b0f024ea6775c9f66a" Oct 01 15:21:10 crc kubenswrapper[4869]: I1001 15:21:10.219460 4869 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"07fb2ccf54a8593c2defebf0b186423b15cfe2eb2fbd21b0f024ea6775c9f66a"} err="failed to get container status \"07fb2ccf54a8593c2defebf0b186423b15cfe2eb2fbd21b0f024ea6775c9f66a\": rpc error: code = NotFound desc = could not find container \"07fb2ccf54a8593c2defebf0b186423b15cfe2eb2fbd21b0f024ea6775c9f66a\": container with ID starting with 07fb2ccf54a8593c2defebf0b186423b15cfe2eb2fbd21b0f024ea6775c9f66a not found: ID does not exist" Oct 01 15:21:10 crc kubenswrapper[4869]: I1001 15:21:10.219484 4869 scope.go:117] "RemoveContainer" containerID="de7f0ec71b128cb279d2720132058684ed96f82e4205a77482b9d17464293023" Oct 01 15:21:10 crc kubenswrapper[4869]: I1001 15:21:10.219702 4869 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"de7f0ec71b128cb279d2720132058684ed96f82e4205a77482b9d17464293023"} err="failed to get container status \"de7f0ec71b128cb279d2720132058684ed96f82e4205a77482b9d17464293023\": rpc error: code = NotFound desc = could not find container \"de7f0ec71b128cb279d2720132058684ed96f82e4205a77482b9d17464293023\": container with ID starting with de7f0ec71b128cb279d2720132058684ed96f82e4205a77482b9d17464293023 not found: ID does not exist" Oct 01 15:21:10 crc kubenswrapper[4869]: I1001 15:21:10.219730 4869 scope.go:117] "RemoveContainer" containerID="a39bc17b3a138d037d4e3d8be2c869793101d1c95ddd4eaa83bd6adae35149ea" Oct 01 15:21:10 crc kubenswrapper[4869]: I1001 15:21:10.220072 4869 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a39bc17b3a138d037d4e3d8be2c869793101d1c95ddd4eaa83bd6adae35149ea"} err="failed to get container status \"a39bc17b3a138d037d4e3d8be2c869793101d1c95ddd4eaa83bd6adae35149ea\": rpc error: code = NotFound desc = could not find container \"a39bc17b3a138d037d4e3d8be2c869793101d1c95ddd4eaa83bd6adae35149ea\": container with ID starting with a39bc17b3a138d037d4e3d8be2c869793101d1c95ddd4eaa83bd6adae35149ea not found: ID does not exist" Oct 01 15:21:10 crc kubenswrapper[4869]: I1001 15:21:10.220101 4869 scope.go:117] "RemoveContainer" containerID="4acb6afc8d2a9020861ab74506a0e83cf0cd0f1434ce3b95efd7935d07636d30" Oct 01 15:21:10 crc kubenswrapper[4869]: I1001 15:21:10.220599 4869 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4acb6afc8d2a9020861ab74506a0e83cf0cd0f1434ce3b95efd7935d07636d30"} err="failed to get container status \"4acb6afc8d2a9020861ab74506a0e83cf0cd0f1434ce3b95efd7935d07636d30\": rpc error: code = NotFound desc = could not find container \"4acb6afc8d2a9020861ab74506a0e83cf0cd0f1434ce3b95efd7935d07636d30\": container with ID starting with 4acb6afc8d2a9020861ab74506a0e83cf0cd0f1434ce3b95efd7935d07636d30 not found: ID does not exist" Oct 01 15:21:10 crc kubenswrapper[4869]: I1001 15:21:10.220644 4869 scope.go:117] "RemoveContainer" containerID="07fb2ccf54a8593c2defebf0b186423b15cfe2eb2fbd21b0f024ea6775c9f66a" Oct 01 15:21:10 crc kubenswrapper[4869]: I1001 15:21:10.221022 4869 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"07fb2ccf54a8593c2defebf0b186423b15cfe2eb2fbd21b0f024ea6775c9f66a"} err="failed to get container status \"07fb2ccf54a8593c2defebf0b186423b15cfe2eb2fbd21b0f024ea6775c9f66a\": rpc error: code = NotFound desc = could not find container \"07fb2ccf54a8593c2defebf0b186423b15cfe2eb2fbd21b0f024ea6775c9f66a\": container with ID starting with 07fb2ccf54a8593c2defebf0b186423b15cfe2eb2fbd21b0f024ea6775c9f66a not found: ID does not exist" Oct 01 15:21:10 crc kubenswrapper[4869]: I1001 15:21:10.221065 4869 scope.go:117] "RemoveContainer" containerID="de7f0ec71b128cb279d2720132058684ed96f82e4205a77482b9d17464293023" Oct 01 15:21:10 crc kubenswrapper[4869]: I1001 15:21:10.222157 4869 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"de7f0ec71b128cb279d2720132058684ed96f82e4205a77482b9d17464293023"} err="failed to get container status \"de7f0ec71b128cb279d2720132058684ed96f82e4205a77482b9d17464293023\": rpc error: code = NotFound desc = could not find container \"de7f0ec71b128cb279d2720132058684ed96f82e4205a77482b9d17464293023\": container with ID starting with de7f0ec71b128cb279d2720132058684ed96f82e4205a77482b9d17464293023 not found: ID does not exist" Oct 01 15:21:10 crc kubenswrapper[4869]: I1001 15:21:10.222206 4869 scope.go:117] "RemoveContainer" containerID="a39bc17b3a138d037d4e3d8be2c869793101d1c95ddd4eaa83bd6adae35149ea" Oct 01 15:21:10 crc kubenswrapper[4869]: I1001 15:21:10.222483 4869 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a39bc17b3a138d037d4e3d8be2c869793101d1c95ddd4eaa83bd6adae35149ea"} err="failed to get container status \"a39bc17b3a138d037d4e3d8be2c869793101d1c95ddd4eaa83bd6adae35149ea\": rpc error: code = NotFound desc = could not find container \"a39bc17b3a138d037d4e3d8be2c869793101d1c95ddd4eaa83bd6adae35149ea\": container with ID starting with a39bc17b3a138d037d4e3d8be2c869793101d1c95ddd4eaa83bd6adae35149ea not found: ID does not exist" Oct 01 15:21:10 crc kubenswrapper[4869]: I1001 15:21:10.222506 4869 scope.go:117] "RemoveContainer" containerID="4acb6afc8d2a9020861ab74506a0e83cf0cd0f1434ce3b95efd7935d07636d30" Oct 01 15:21:10 crc kubenswrapper[4869]: I1001 15:21:10.222904 4869 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4acb6afc8d2a9020861ab74506a0e83cf0cd0f1434ce3b95efd7935d07636d30"} err="failed to get container status \"4acb6afc8d2a9020861ab74506a0e83cf0cd0f1434ce3b95efd7935d07636d30\": rpc error: code = NotFound desc = could not find container \"4acb6afc8d2a9020861ab74506a0e83cf0cd0f1434ce3b95efd7935d07636d30\": container with ID starting with 4acb6afc8d2a9020861ab74506a0e83cf0cd0f1434ce3b95efd7935d07636d30 not found: ID does not exist" Oct 01 15:21:10 crc kubenswrapper[4869]: I1001 15:21:10.440984 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Oct 01 15:21:10 crc kubenswrapper[4869]: I1001 15:21:10.446524 4869 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Oct 01 15:21:10 crc kubenswrapper[4869]: I1001 15:21:10.455731 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Oct 01 15:21:10 crc kubenswrapper[4869]: E1001 15:21:10.456271 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b61db8eb-aaa5-4c45-be80-3dfe4f0bb400" containerName="proxy-httpd" Oct 01 15:21:10 crc kubenswrapper[4869]: I1001 15:21:10.456284 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="b61db8eb-aaa5-4c45-be80-3dfe4f0bb400" containerName="proxy-httpd" Oct 01 15:21:10 crc kubenswrapper[4869]: E1001 15:21:10.456298 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b61db8eb-aaa5-4c45-be80-3dfe4f0bb400" containerName="sg-core" Oct 01 15:21:10 crc kubenswrapper[4869]: I1001 15:21:10.456304 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="b61db8eb-aaa5-4c45-be80-3dfe4f0bb400" containerName="sg-core" Oct 01 15:21:10 crc kubenswrapper[4869]: E1001 15:21:10.456324 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b61db8eb-aaa5-4c45-be80-3dfe4f0bb400" containerName="ceilometer-notification-agent" Oct 01 15:21:10 crc kubenswrapper[4869]: I1001 15:21:10.456331 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="b61db8eb-aaa5-4c45-be80-3dfe4f0bb400" containerName="ceilometer-notification-agent" Oct 01 15:21:10 crc kubenswrapper[4869]: E1001 15:21:10.456339 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b61db8eb-aaa5-4c45-be80-3dfe4f0bb400" containerName="ceilometer-central-agent" Oct 01 15:21:10 crc kubenswrapper[4869]: I1001 15:21:10.456344 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="b61db8eb-aaa5-4c45-be80-3dfe4f0bb400" containerName="ceilometer-central-agent" Oct 01 15:21:10 crc kubenswrapper[4869]: I1001 15:21:10.456498 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="b61db8eb-aaa5-4c45-be80-3dfe4f0bb400" containerName="proxy-httpd" Oct 01 15:21:10 crc kubenswrapper[4869]: I1001 15:21:10.456521 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="b61db8eb-aaa5-4c45-be80-3dfe4f0bb400" containerName="sg-core" Oct 01 15:21:10 crc kubenswrapper[4869]: I1001 15:21:10.456536 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="b61db8eb-aaa5-4c45-be80-3dfe4f0bb400" containerName="ceilometer-notification-agent" Oct 01 15:21:10 crc kubenswrapper[4869]: I1001 15:21:10.456546 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="b61db8eb-aaa5-4c45-be80-3dfe4f0bb400" containerName="ceilometer-central-agent" Oct 01 15:21:10 crc kubenswrapper[4869]: I1001 15:21:10.458468 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 01 15:21:10 crc kubenswrapper[4869]: I1001 15:21:10.460958 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Oct 01 15:21:10 crc kubenswrapper[4869]: I1001 15:21:10.461067 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Oct 01 15:21:10 crc kubenswrapper[4869]: I1001 15:21:10.476494 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Oct 01 15:21:10 crc kubenswrapper[4869]: I1001 15:21:10.621161 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/6743ef9f-f686-4a93-aa19-0f152e9f5438-log-httpd\") pod \"ceilometer-0\" (UID: \"6743ef9f-f686-4a93-aa19-0f152e9f5438\") " pod="openstack/ceilometer-0" Oct 01 15:21:10 crc kubenswrapper[4869]: I1001 15:21:10.621208 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/6743ef9f-f686-4a93-aa19-0f152e9f5438-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"6743ef9f-f686-4a93-aa19-0f152e9f5438\") " pod="openstack/ceilometer-0" Oct 01 15:21:10 crc kubenswrapper[4869]: I1001 15:21:10.621227 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6743ef9f-f686-4a93-aa19-0f152e9f5438-scripts\") pod \"ceilometer-0\" (UID: \"6743ef9f-f686-4a93-aa19-0f152e9f5438\") " pod="openstack/ceilometer-0" Oct 01 15:21:10 crc kubenswrapper[4869]: I1001 15:21:10.621295 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qspt7\" (UniqueName: \"kubernetes.io/projected/6743ef9f-f686-4a93-aa19-0f152e9f5438-kube-api-access-qspt7\") pod \"ceilometer-0\" (UID: \"6743ef9f-f686-4a93-aa19-0f152e9f5438\") " pod="openstack/ceilometer-0" Oct 01 15:21:10 crc kubenswrapper[4869]: I1001 15:21:10.621326 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/6743ef9f-f686-4a93-aa19-0f152e9f5438-run-httpd\") pod \"ceilometer-0\" (UID: \"6743ef9f-f686-4a93-aa19-0f152e9f5438\") " pod="openstack/ceilometer-0" Oct 01 15:21:10 crc kubenswrapper[4869]: I1001 15:21:10.622107 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6743ef9f-f686-4a93-aa19-0f152e9f5438-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"6743ef9f-f686-4a93-aa19-0f152e9f5438\") " pod="openstack/ceilometer-0" Oct 01 15:21:10 crc kubenswrapper[4869]: I1001 15:21:10.622153 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6743ef9f-f686-4a93-aa19-0f152e9f5438-config-data\") pod \"ceilometer-0\" (UID: \"6743ef9f-f686-4a93-aa19-0f152e9f5438\") " pod="openstack/ceilometer-0" Oct 01 15:21:10 crc kubenswrapper[4869]: I1001 15:21:10.723553 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6743ef9f-f686-4a93-aa19-0f152e9f5438-config-data\") pod \"ceilometer-0\" (UID: \"6743ef9f-f686-4a93-aa19-0f152e9f5438\") " pod="openstack/ceilometer-0" Oct 01 15:21:10 crc kubenswrapper[4869]: I1001 15:21:10.723945 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/6743ef9f-f686-4a93-aa19-0f152e9f5438-log-httpd\") pod \"ceilometer-0\" (UID: \"6743ef9f-f686-4a93-aa19-0f152e9f5438\") " pod="openstack/ceilometer-0" Oct 01 15:21:10 crc kubenswrapper[4869]: I1001 15:21:10.723969 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/6743ef9f-f686-4a93-aa19-0f152e9f5438-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"6743ef9f-f686-4a93-aa19-0f152e9f5438\") " pod="openstack/ceilometer-0" Oct 01 15:21:10 crc kubenswrapper[4869]: I1001 15:21:10.724027 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6743ef9f-f686-4a93-aa19-0f152e9f5438-scripts\") pod \"ceilometer-0\" (UID: \"6743ef9f-f686-4a93-aa19-0f152e9f5438\") " pod="openstack/ceilometer-0" Oct 01 15:21:10 crc kubenswrapper[4869]: I1001 15:21:10.724067 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qspt7\" (UniqueName: \"kubernetes.io/projected/6743ef9f-f686-4a93-aa19-0f152e9f5438-kube-api-access-qspt7\") pod \"ceilometer-0\" (UID: \"6743ef9f-f686-4a93-aa19-0f152e9f5438\") " pod="openstack/ceilometer-0" Oct 01 15:21:10 crc kubenswrapper[4869]: I1001 15:21:10.724107 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/6743ef9f-f686-4a93-aa19-0f152e9f5438-run-httpd\") pod \"ceilometer-0\" (UID: \"6743ef9f-f686-4a93-aa19-0f152e9f5438\") " pod="openstack/ceilometer-0" Oct 01 15:21:10 crc kubenswrapper[4869]: I1001 15:21:10.724151 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6743ef9f-f686-4a93-aa19-0f152e9f5438-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"6743ef9f-f686-4a93-aa19-0f152e9f5438\") " pod="openstack/ceilometer-0" Oct 01 15:21:10 crc kubenswrapper[4869]: I1001 15:21:10.726842 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/6743ef9f-f686-4a93-aa19-0f152e9f5438-log-httpd\") pod \"ceilometer-0\" (UID: \"6743ef9f-f686-4a93-aa19-0f152e9f5438\") " pod="openstack/ceilometer-0" Oct 01 15:21:10 crc kubenswrapper[4869]: I1001 15:21:10.727394 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/6743ef9f-f686-4a93-aa19-0f152e9f5438-run-httpd\") pod \"ceilometer-0\" (UID: \"6743ef9f-f686-4a93-aa19-0f152e9f5438\") " pod="openstack/ceilometer-0" Oct 01 15:21:10 crc kubenswrapper[4869]: I1001 15:21:10.729070 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6743ef9f-f686-4a93-aa19-0f152e9f5438-scripts\") pod \"ceilometer-0\" (UID: \"6743ef9f-f686-4a93-aa19-0f152e9f5438\") " pod="openstack/ceilometer-0" Oct 01 15:21:10 crc kubenswrapper[4869]: I1001 15:21:10.729287 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6743ef9f-f686-4a93-aa19-0f152e9f5438-config-data\") pod \"ceilometer-0\" (UID: \"6743ef9f-f686-4a93-aa19-0f152e9f5438\") " pod="openstack/ceilometer-0" Oct 01 15:21:10 crc kubenswrapper[4869]: I1001 15:21:10.731012 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/6743ef9f-f686-4a93-aa19-0f152e9f5438-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"6743ef9f-f686-4a93-aa19-0f152e9f5438\") " pod="openstack/ceilometer-0" Oct 01 15:21:10 crc kubenswrapper[4869]: I1001 15:21:10.741800 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6743ef9f-f686-4a93-aa19-0f152e9f5438-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"6743ef9f-f686-4a93-aa19-0f152e9f5438\") " pod="openstack/ceilometer-0" Oct 01 15:21:10 crc kubenswrapper[4869]: I1001 15:21:10.746099 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qspt7\" (UniqueName: \"kubernetes.io/projected/6743ef9f-f686-4a93-aa19-0f152e9f5438-kube-api-access-qspt7\") pod \"ceilometer-0\" (UID: \"6743ef9f-f686-4a93-aa19-0f152e9f5438\") " pod="openstack/ceilometer-0" Oct 01 15:21:10 crc kubenswrapper[4869]: I1001 15:21:10.783517 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 01 15:21:10 crc kubenswrapper[4869]: I1001 15:21:10.820910 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/neutron-96b4fb6d7-g25hj" Oct 01 15:21:10 crc kubenswrapper[4869]: I1001 15:21:10.895014 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-6c8b49859b-vzzr7"] Oct 01 15:21:10 crc kubenswrapper[4869]: I1001 15:21:10.895419 4869 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/neutron-6c8b49859b-vzzr7" podUID="91b925da-fe68-4787-8a77-1f49f04cd917" containerName="neutron-api" containerID="cri-o://54eeba3e2bdb10d9d0dd2c62f76c5e26affc83398074893c9ee7cf58bd30f5fe" gracePeriod=30 Oct 01 15:21:10 crc kubenswrapper[4869]: I1001 15:21:10.895559 4869 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/neutron-6c8b49859b-vzzr7" podUID="91b925da-fe68-4787-8a77-1f49f04cd917" containerName="neutron-httpd" containerID="cri-o://ee76674fc7efa494d4f99f5af61cc48f47f33b2bb6125efa8b907f758d65fd24" gracePeriod=30 Oct 01 15:21:11 crc kubenswrapper[4869]: I1001 15:21:11.099588 4869 generic.go:334] "Generic (PLEG): container finished" podID="0ce74364-60ae-42f0-9151-265fe3a38e1a" containerID="362b0cf8b20f92cb86aa76db20dd0cdb552a66f501bff1efb744f875b3060801" exitCode=0 Oct 01 15:21:11 crc kubenswrapper[4869]: I1001 15:21:11.099736 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-3a05-account-create-q2kxl" event={"ID":"0ce74364-60ae-42f0-9151-265fe3a38e1a","Type":"ContainerDied","Data":"362b0cf8b20f92cb86aa76db20dd0cdb552a66f501bff1efb744f875b3060801"} Oct 01 15:21:11 crc kubenswrapper[4869]: I1001 15:21:11.103204 4869 generic.go:334] "Generic (PLEG): container finished" podID="91b925da-fe68-4787-8a77-1f49f04cd917" containerID="ee76674fc7efa494d4f99f5af61cc48f47f33b2bb6125efa8b907f758d65fd24" exitCode=0 Oct 01 15:21:11 crc kubenswrapper[4869]: I1001 15:21:11.103504 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-6c8b49859b-vzzr7" event={"ID":"91b925da-fe68-4787-8a77-1f49f04cd917","Type":"ContainerDied","Data":"ee76674fc7efa494d4f99f5af61cc48f47f33b2bb6125efa8b907f758d65fd24"} Oct 01 15:21:11 crc kubenswrapper[4869]: I1001 15:21:11.314032 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Oct 01 15:21:11 crc kubenswrapper[4869]: W1001 15:21:11.335939 4869 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod6743ef9f_f686_4a93_aa19_0f152e9f5438.slice/crio-f1b62c2b40e028e82987ef336bb2daa3a48f079fb4924edbc1c9bb1a26d6897f WatchSource:0}: Error finding container f1b62c2b40e028e82987ef336bb2daa3a48f079fb4924edbc1c9bb1a26d6897f: Status 404 returned error can't find the container with id f1b62c2b40e028e82987ef336bb2daa3a48f079fb4924edbc1c9bb1a26d6897f Oct 01 15:21:11 crc kubenswrapper[4869]: I1001 15:21:11.422938 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-20ce-account-create-tgz6c" Oct 01 15:21:11 crc kubenswrapper[4869]: I1001 15:21:11.485991 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-44c5-account-create-8zw48" Oct 01 15:21:11 crc kubenswrapper[4869]: I1001 15:21:11.546008 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-b7fcs\" (UniqueName: \"kubernetes.io/projected/709b2b83-2eba-4778-beda-8e312ef3a6d9-kube-api-access-b7fcs\") pod \"709b2b83-2eba-4778-beda-8e312ef3a6d9\" (UID: \"709b2b83-2eba-4778-beda-8e312ef3a6d9\") " Oct 01 15:21:11 crc kubenswrapper[4869]: I1001 15:21:11.551191 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/709b2b83-2eba-4778-beda-8e312ef3a6d9-kube-api-access-b7fcs" (OuterVolumeSpecName: "kube-api-access-b7fcs") pod "709b2b83-2eba-4778-beda-8e312ef3a6d9" (UID: "709b2b83-2eba-4778-beda-8e312ef3a6d9"). InnerVolumeSpecName "kube-api-access-b7fcs". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 15:21:11 crc kubenswrapper[4869]: I1001 15:21:11.594940 4869 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b61db8eb-aaa5-4c45-be80-3dfe4f0bb400" path="/var/lib/kubelet/pods/b61db8eb-aaa5-4c45-be80-3dfe4f0bb400/volumes" Oct 01 15:21:11 crc kubenswrapper[4869]: I1001 15:21:11.647693 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ckr29\" (UniqueName: \"kubernetes.io/projected/f88907af-39e7-4499-9745-1fe0a8c42774-kube-api-access-ckr29\") pod \"f88907af-39e7-4499-9745-1fe0a8c42774\" (UID: \"f88907af-39e7-4499-9745-1fe0a8c42774\") " Oct 01 15:21:11 crc kubenswrapper[4869]: I1001 15:21:11.648506 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-b7fcs\" (UniqueName: \"kubernetes.io/projected/709b2b83-2eba-4778-beda-8e312ef3a6d9-kube-api-access-b7fcs\") on node \"crc\" DevicePath \"\"" Oct 01 15:21:11 crc kubenswrapper[4869]: I1001 15:21:11.650779 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f88907af-39e7-4499-9745-1fe0a8c42774-kube-api-access-ckr29" (OuterVolumeSpecName: "kube-api-access-ckr29") pod "f88907af-39e7-4499-9745-1fe0a8c42774" (UID: "f88907af-39e7-4499-9745-1fe0a8c42774"). InnerVolumeSpecName "kube-api-access-ckr29". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 15:21:11 crc kubenswrapper[4869]: I1001 15:21:11.750376 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ckr29\" (UniqueName: \"kubernetes.io/projected/f88907af-39e7-4499-9745-1fe0a8c42774-kube-api-access-ckr29\") on node \"crc\" DevicePath \"\"" Oct 01 15:21:12 crc kubenswrapper[4869]: I1001 15:21:12.113873 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-44c5-account-create-8zw48" event={"ID":"f88907af-39e7-4499-9745-1fe0a8c42774","Type":"ContainerDied","Data":"707c3ecb75770062d34e7cc40259101e1d7da524bf7a1b0c67753c1bd1b36c77"} Oct 01 15:21:12 crc kubenswrapper[4869]: I1001 15:21:12.113920 4869 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="707c3ecb75770062d34e7cc40259101e1d7da524bf7a1b0c67753c1bd1b36c77" Oct 01 15:21:12 crc kubenswrapper[4869]: I1001 15:21:12.114034 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-44c5-account-create-8zw48" Oct 01 15:21:12 crc kubenswrapper[4869]: I1001 15:21:12.115420 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-20ce-account-create-tgz6c" event={"ID":"709b2b83-2eba-4778-beda-8e312ef3a6d9","Type":"ContainerDied","Data":"91a560112cd1763da8c005be7c2a920b31bb72231a64c7d86f15e0ee32a70beb"} Oct 01 15:21:12 crc kubenswrapper[4869]: I1001 15:21:12.115511 4869 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="91a560112cd1763da8c005be7c2a920b31bb72231a64c7d86f15e0ee32a70beb" Oct 01 15:21:12 crc kubenswrapper[4869]: I1001 15:21:12.115452 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-20ce-account-create-tgz6c" Oct 01 15:21:12 crc kubenswrapper[4869]: I1001 15:21:12.116913 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"6743ef9f-f686-4a93-aa19-0f152e9f5438","Type":"ContainerStarted","Data":"ce4e48eac67200d3bab3758da0d8a9ab2bcf3d5792bf091b67e79dfbcb0a138e"} Oct 01 15:21:12 crc kubenswrapper[4869]: I1001 15:21:12.116952 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"6743ef9f-f686-4a93-aa19-0f152e9f5438","Type":"ContainerStarted","Data":"f1b62c2b40e028e82987ef336bb2daa3a48f079fb4924edbc1c9bb1a26d6897f"} Oct 01 15:21:12 crc kubenswrapper[4869]: I1001 15:21:12.433322 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-3a05-account-create-q2kxl" Oct 01 15:21:12 crc kubenswrapper[4869]: I1001 15:21:12.562657 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hz2d9\" (UniqueName: \"kubernetes.io/projected/0ce74364-60ae-42f0-9151-265fe3a38e1a-kube-api-access-hz2d9\") pod \"0ce74364-60ae-42f0-9151-265fe3a38e1a\" (UID: \"0ce74364-60ae-42f0-9151-265fe3a38e1a\") " Oct 01 15:21:12 crc kubenswrapper[4869]: I1001 15:21:12.571480 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0ce74364-60ae-42f0-9151-265fe3a38e1a-kube-api-access-hz2d9" (OuterVolumeSpecName: "kube-api-access-hz2d9") pod "0ce74364-60ae-42f0-9151-265fe3a38e1a" (UID: "0ce74364-60ae-42f0-9151-265fe3a38e1a"). InnerVolumeSpecName "kube-api-access-hz2d9". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 15:21:12 crc kubenswrapper[4869]: I1001 15:21:12.664888 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hz2d9\" (UniqueName: \"kubernetes.io/projected/0ce74364-60ae-42f0-9151-265fe3a38e1a-kube-api-access-hz2d9\") on node \"crc\" DevicePath \"\"" Oct 01 15:21:13 crc kubenswrapper[4869]: I1001 15:21:13.126736 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-3a05-account-create-q2kxl" event={"ID":"0ce74364-60ae-42f0-9151-265fe3a38e1a","Type":"ContainerDied","Data":"46d848047f023b85a7b94d2ebac3db25a1ea10aa17496905888ca0f91ce7ba0d"} Oct 01 15:21:13 crc kubenswrapper[4869]: I1001 15:21:13.127002 4869 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="46d848047f023b85a7b94d2ebac3db25a1ea10aa17496905888ca0f91ce7ba0d" Oct 01 15:21:13 crc kubenswrapper[4869]: I1001 15:21:13.126824 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-3a05-account-create-q2kxl" Oct 01 15:21:13 crc kubenswrapper[4869]: I1001 15:21:13.128961 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"6743ef9f-f686-4a93-aa19-0f152e9f5438","Type":"ContainerStarted","Data":"210a2e09562c12e7f45f4db21c831e715e43df270aa41828287b1f5a128158c1"} Oct 01 15:21:14 crc kubenswrapper[4869]: I1001 15:21:14.043633 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-5766b74c9d-wpxpf" Oct 01 15:21:14 crc kubenswrapper[4869]: I1001 15:21:14.138908 4869 generic.go:334] "Generic (PLEG): container finished" podID="011bdc54-d9ef-4ae9-a16b-5eaf4e97b2cf" containerID="848fa104b788b0a643bef9f8bc271d7b678ee782e58d1385dc06d84691994654" exitCode=137 Oct 01 15:21:14 crc kubenswrapper[4869]: I1001 15:21:14.138971 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-5766b74c9d-wpxpf" Oct 01 15:21:14 crc kubenswrapper[4869]: I1001 15:21:14.138986 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-5766b74c9d-wpxpf" event={"ID":"011bdc54-d9ef-4ae9-a16b-5eaf4e97b2cf","Type":"ContainerDied","Data":"848fa104b788b0a643bef9f8bc271d7b678ee782e58d1385dc06d84691994654"} Oct 01 15:21:14 crc kubenswrapper[4869]: I1001 15:21:14.139030 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-5766b74c9d-wpxpf" event={"ID":"011bdc54-d9ef-4ae9-a16b-5eaf4e97b2cf","Type":"ContainerDied","Data":"c1f6b4084c9e3c40d69d22c80de2e3b38bb0088828476cf3ff96e1c75d405d0e"} Oct 01 15:21:14 crc kubenswrapper[4869]: I1001 15:21:14.139047 4869 scope.go:117] "RemoveContainer" containerID="26d24bc7aa47d955a5fcbe67784e963e6b5f386b86c229c48e9a56720466c88e" Oct 01 15:21:14 crc kubenswrapper[4869]: I1001 15:21:14.141702 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"6743ef9f-f686-4a93-aa19-0f152e9f5438","Type":"ContainerStarted","Data":"4cd148349badea52f7b074e61d6f4c67c579b5f10c598c3d3aa0c594c76dd0f9"} Oct 01 15:21:14 crc kubenswrapper[4869]: I1001 15:21:14.193803 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/011bdc54-d9ef-4ae9-a16b-5eaf4e97b2cf-horizon-secret-key\") pod \"011bdc54-d9ef-4ae9-a16b-5eaf4e97b2cf\" (UID: \"011bdc54-d9ef-4ae9-a16b-5eaf4e97b2cf\") " Oct 01 15:21:14 crc kubenswrapper[4869]: I1001 15:21:14.194160 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/011bdc54-d9ef-4ae9-a16b-5eaf4e97b2cf-combined-ca-bundle\") pod \"011bdc54-d9ef-4ae9-a16b-5eaf4e97b2cf\" (UID: \"011bdc54-d9ef-4ae9-a16b-5eaf4e97b2cf\") " Oct 01 15:21:14 crc kubenswrapper[4869]: I1001 15:21:14.194839 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pq6dl\" (UniqueName: \"kubernetes.io/projected/011bdc54-d9ef-4ae9-a16b-5eaf4e97b2cf-kube-api-access-pq6dl\") pod \"011bdc54-d9ef-4ae9-a16b-5eaf4e97b2cf\" (UID: \"011bdc54-d9ef-4ae9-a16b-5eaf4e97b2cf\") " Oct 01 15:21:14 crc kubenswrapper[4869]: I1001 15:21:14.195001 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/011bdc54-d9ef-4ae9-a16b-5eaf4e97b2cf-scripts\") pod \"011bdc54-d9ef-4ae9-a16b-5eaf4e97b2cf\" (UID: \"011bdc54-d9ef-4ae9-a16b-5eaf4e97b2cf\") " Oct 01 15:21:14 crc kubenswrapper[4869]: I1001 15:21:14.195079 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/011bdc54-d9ef-4ae9-a16b-5eaf4e97b2cf-logs\") pod \"011bdc54-d9ef-4ae9-a16b-5eaf4e97b2cf\" (UID: \"011bdc54-d9ef-4ae9-a16b-5eaf4e97b2cf\") " Oct 01 15:21:14 crc kubenswrapper[4869]: I1001 15:21:14.195198 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/011bdc54-d9ef-4ae9-a16b-5eaf4e97b2cf-config-data\") pod \"011bdc54-d9ef-4ae9-a16b-5eaf4e97b2cf\" (UID: \"011bdc54-d9ef-4ae9-a16b-5eaf4e97b2cf\") " Oct 01 15:21:14 crc kubenswrapper[4869]: I1001 15:21:14.195334 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"horizon-tls-certs\" (UniqueName: \"kubernetes.io/secret/011bdc54-d9ef-4ae9-a16b-5eaf4e97b2cf-horizon-tls-certs\") pod \"011bdc54-d9ef-4ae9-a16b-5eaf4e97b2cf\" (UID: \"011bdc54-d9ef-4ae9-a16b-5eaf4e97b2cf\") " Oct 01 15:21:14 crc kubenswrapper[4869]: I1001 15:21:14.196247 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/011bdc54-d9ef-4ae9-a16b-5eaf4e97b2cf-logs" (OuterVolumeSpecName: "logs") pod "011bdc54-d9ef-4ae9-a16b-5eaf4e97b2cf" (UID: "011bdc54-d9ef-4ae9-a16b-5eaf4e97b2cf"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 15:21:14 crc kubenswrapper[4869]: I1001 15:21:14.216440 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/011bdc54-d9ef-4ae9-a16b-5eaf4e97b2cf-horizon-secret-key" (OuterVolumeSpecName: "horizon-secret-key") pod "011bdc54-d9ef-4ae9-a16b-5eaf4e97b2cf" (UID: "011bdc54-d9ef-4ae9-a16b-5eaf4e97b2cf"). InnerVolumeSpecName "horizon-secret-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 15:21:14 crc kubenswrapper[4869]: I1001 15:21:14.216721 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/011bdc54-d9ef-4ae9-a16b-5eaf4e97b2cf-kube-api-access-pq6dl" (OuterVolumeSpecName: "kube-api-access-pq6dl") pod "011bdc54-d9ef-4ae9-a16b-5eaf4e97b2cf" (UID: "011bdc54-d9ef-4ae9-a16b-5eaf4e97b2cf"). InnerVolumeSpecName "kube-api-access-pq6dl". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 15:21:14 crc kubenswrapper[4869]: I1001 15:21:14.226418 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/011bdc54-d9ef-4ae9-a16b-5eaf4e97b2cf-scripts" (OuterVolumeSpecName: "scripts") pod "011bdc54-d9ef-4ae9-a16b-5eaf4e97b2cf" (UID: "011bdc54-d9ef-4ae9-a16b-5eaf4e97b2cf"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 15:21:14 crc kubenswrapper[4869]: I1001 15:21:14.228210 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/011bdc54-d9ef-4ae9-a16b-5eaf4e97b2cf-config-data" (OuterVolumeSpecName: "config-data") pod "011bdc54-d9ef-4ae9-a16b-5eaf4e97b2cf" (UID: "011bdc54-d9ef-4ae9-a16b-5eaf4e97b2cf"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 15:21:14 crc kubenswrapper[4869]: I1001 15:21:14.237291 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/011bdc54-d9ef-4ae9-a16b-5eaf4e97b2cf-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "011bdc54-d9ef-4ae9-a16b-5eaf4e97b2cf" (UID: "011bdc54-d9ef-4ae9-a16b-5eaf4e97b2cf"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 15:21:14 crc kubenswrapper[4869]: I1001 15:21:14.282226 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/011bdc54-d9ef-4ae9-a16b-5eaf4e97b2cf-horizon-tls-certs" (OuterVolumeSpecName: "horizon-tls-certs") pod "011bdc54-d9ef-4ae9-a16b-5eaf4e97b2cf" (UID: "011bdc54-d9ef-4ae9-a16b-5eaf4e97b2cf"). InnerVolumeSpecName "horizon-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 15:21:14 crc kubenswrapper[4869]: I1001 15:21:14.297367 4869 reconciler_common.go:293] "Volume detached for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/011bdc54-d9ef-4ae9-a16b-5eaf4e97b2cf-horizon-secret-key\") on node \"crc\" DevicePath \"\"" Oct 01 15:21:14 crc kubenswrapper[4869]: I1001 15:21:14.297399 4869 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/011bdc54-d9ef-4ae9-a16b-5eaf4e97b2cf-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 01 15:21:14 crc kubenswrapper[4869]: I1001 15:21:14.297411 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pq6dl\" (UniqueName: \"kubernetes.io/projected/011bdc54-d9ef-4ae9-a16b-5eaf4e97b2cf-kube-api-access-pq6dl\") on node \"crc\" DevicePath \"\"" Oct 01 15:21:14 crc kubenswrapper[4869]: I1001 15:21:14.297422 4869 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/011bdc54-d9ef-4ae9-a16b-5eaf4e97b2cf-scripts\") on node \"crc\" DevicePath \"\"" Oct 01 15:21:14 crc kubenswrapper[4869]: I1001 15:21:14.297430 4869 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/011bdc54-d9ef-4ae9-a16b-5eaf4e97b2cf-logs\") on node \"crc\" DevicePath \"\"" Oct 01 15:21:14 crc kubenswrapper[4869]: I1001 15:21:14.297438 4869 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/011bdc54-d9ef-4ae9-a16b-5eaf4e97b2cf-config-data\") on node \"crc\" DevicePath \"\"" Oct 01 15:21:14 crc kubenswrapper[4869]: I1001 15:21:14.297448 4869 reconciler_common.go:293] "Volume detached for volume \"horizon-tls-certs\" (UniqueName: \"kubernetes.io/secret/011bdc54-d9ef-4ae9-a16b-5eaf4e97b2cf-horizon-tls-certs\") on node \"crc\" DevicePath \"\"" Oct 01 15:21:14 crc kubenswrapper[4869]: I1001 15:21:14.301323 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-conductor-db-sync-vs5p9"] Oct 01 15:21:14 crc kubenswrapper[4869]: E1001 15:21:14.301658 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="011bdc54-d9ef-4ae9-a16b-5eaf4e97b2cf" containerName="horizon" Oct 01 15:21:14 crc kubenswrapper[4869]: I1001 15:21:14.301675 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="011bdc54-d9ef-4ae9-a16b-5eaf4e97b2cf" containerName="horizon" Oct 01 15:21:14 crc kubenswrapper[4869]: E1001 15:21:14.301695 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="011bdc54-d9ef-4ae9-a16b-5eaf4e97b2cf" containerName="horizon-log" Oct 01 15:21:14 crc kubenswrapper[4869]: I1001 15:21:14.301702 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="011bdc54-d9ef-4ae9-a16b-5eaf4e97b2cf" containerName="horizon-log" Oct 01 15:21:14 crc kubenswrapper[4869]: E1001 15:21:14.301709 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f88907af-39e7-4499-9745-1fe0a8c42774" containerName="mariadb-account-create" Oct 01 15:21:14 crc kubenswrapper[4869]: I1001 15:21:14.301715 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="f88907af-39e7-4499-9745-1fe0a8c42774" containerName="mariadb-account-create" Oct 01 15:21:14 crc kubenswrapper[4869]: E1001 15:21:14.301734 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="709b2b83-2eba-4778-beda-8e312ef3a6d9" containerName="mariadb-account-create" Oct 01 15:21:14 crc kubenswrapper[4869]: I1001 15:21:14.301740 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="709b2b83-2eba-4778-beda-8e312ef3a6d9" containerName="mariadb-account-create" Oct 01 15:21:14 crc kubenswrapper[4869]: E1001 15:21:14.301751 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0ce74364-60ae-42f0-9151-265fe3a38e1a" containerName="mariadb-account-create" Oct 01 15:21:14 crc kubenswrapper[4869]: I1001 15:21:14.301759 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="0ce74364-60ae-42f0-9151-265fe3a38e1a" containerName="mariadb-account-create" Oct 01 15:21:14 crc kubenswrapper[4869]: I1001 15:21:14.301918 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="0ce74364-60ae-42f0-9151-265fe3a38e1a" containerName="mariadb-account-create" Oct 01 15:21:14 crc kubenswrapper[4869]: I1001 15:21:14.301928 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="011bdc54-d9ef-4ae9-a16b-5eaf4e97b2cf" containerName="horizon-log" Oct 01 15:21:14 crc kubenswrapper[4869]: I1001 15:21:14.301941 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="f88907af-39e7-4499-9745-1fe0a8c42774" containerName="mariadb-account-create" Oct 01 15:21:14 crc kubenswrapper[4869]: I1001 15:21:14.301951 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="011bdc54-d9ef-4ae9-a16b-5eaf4e97b2cf" containerName="horizon" Oct 01 15:21:14 crc kubenswrapper[4869]: I1001 15:21:14.301957 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="709b2b83-2eba-4778-beda-8e312ef3a6d9" containerName="mariadb-account-create" Oct 01 15:21:14 crc kubenswrapper[4869]: I1001 15:21:14.302493 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-db-sync-vs5p9" Oct 01 15:21:14 crc kubenswrapper[4869]: I1001 15:21:14.305810 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-conductor-config-data" Oct 01 15:21:14 crc kubenswrapper[4869]: I1001 15:21:14.306051 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-conductor-scripts" Oct 01 15:21:14 crc kubenswrapper[4869]: I1001 15:21:14.309826 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-nova-dockercfg-5mgh7" Oct 01 15:21:14 crc kubenswrapper[4869]: I1001 15:21:14.317067 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-db-sync-vs5p9"] Oct 01 15:21:14 crc kubenswrapper[4869]: I1001 15:21:14.355027 4869 scope.go:117] "RemoveContainer" containerID="848fa104b788b0a643bef9f8bc271d7b678ee782e58d1385dc06d84691994654" Oct 01 15:21:14 crc kubenswrapper[4869]: I1001 15:21:14.369475 4869 scope.go:117] "RemoveContainer" containerID="26d24bc7aa47d955a5fcbe67784e963e6b5f386b86c229c48e9a56720466c88e" Oct 01 15:21:14 crc kubenswrapper[4869]: E1001 15:21:14.369856 4869 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"26d24bc7aa47d955a5fcbe67784e963e6b5f386b86c229c48e9a56720466c88e\": container with ID starting with 26d24bc7aa47d955a5fcbe67784e963e6b5f386b86c229c48e9a56720466c88e not found: ID does not exist" containerID="26d24bc7aa47d955a5fcbe67784e963e6b5f386b86c229c48e9a56720466c88e" Oct 01 15:21:14 crc kubenswrapper[4869]: I1001 15:21:14.369896 4869 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"26d24bc7aa47d955a5fcbe67784e963e6b5f386b86c229c48e9a56720466c88e"} err="failed to get container status \"26d24bc7aa47d955a5fcbe67784e963e6b5f386b86c229c48e9a56720466c88e\": rpc error: code = NotFound desc = could not find container \"26d24bc7aa47d955a5fcbe67784e963e6b5f386b86c229c48e9a56720466c88e\": container with ID starting with 26d24bc7aa47d955a5fcbe67784e963e6b5f386b86c229c48e9a56720466c88e not found: ID does not exist" Oct 01 15:21:14 crc kubenswrapper[4869]: I1001 15:21:14.369921 4869 scope.go:117] "RemoveContainer" containerID="848fa104b788b0a643bef9f8bc271d7b678ee782e58d1385dc06d84691994654" Oct 01 15:21:14 crc kubenswrapper[4869]: E1001 15:21:14.370138 4869 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"848fa104b788b0a643bef9f8bc271d7b678ee782e58d1385dc06d84691994654\": container with ID starting with 848fa104b788b0a643bef9f8bc271d7b678ee782e58d1385dc06d84691994654 not found: ID does not exist" containerID="848fa104b788b0a643bef9f8bc271d7b678ee782e58d1385dc06d84691994654" Oct 01 15:21:14 crc kubenswrapper[4869]: I1001 15:21:14.370165 4869 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"848fa104b788b0a643bef9f8bc271d7b678ee782e58d1385dc06d84691994654"} err="failed to get container status \"848fa104b788b0a643bef9f8bc271d7b678ee782e58d1385dc06d84691994654\": rpc error: code = NotFound desc = could not find container \"848fa104b788b0a643bef9f8bc271d7b678ee782e58d1385dc06d84691994654\": container with ID starting with 848fa104b788b0a643bef9f8bc271d7b678ee782e58d1385dc06d84691994654 not found: ID does not exist" Oct 01 15:21:14 crc kubenswrapper[4869]: I1001 15:21:14.398867 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-s4b8z\" (UniqueName: \"kubernetes.io/projected/5d10a15f-ad57-459e-90e7-9dbe3d0033a4-kube-api-access-s4b8z\") pod \"nova-cell0-conductor-db-sync-vs5p9\" (UID: \"5d10a15f-ad57-459e-90e7-9dbe3d0033a4\") " pod="openstack/nova-cell0-conductor-db-sync-vs5p9" Oct 01 15:21:14 crc kubenswrapper[4869]: I1001 15:21:14.398979 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5d10a15f-ad57-459e-90e7-9dbe3d0033a4-config-data\") pod \"nova-cell0-conductor-db-sync-vs5p9\" (UID: \"5d10a15f-ad57-459e-90e7-9dbe3d0033a4\") " pod="openstack/nova-cell0-conductor-db-sync-vs5p9" Oct 01 15:21:14 crc kubenswrapper[4869]: I1001 15:21:14.399064 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5d10a15f-ad57-459e-90e7-9dbe3d0033a4-combined-ca-bundle\") pod \"nova-cell0-conductor-db-sync-vs5p9\" (UID: \"5d10a15f-ad57-459e-90e7-9dbe3d0033a4\") " pod="openstack/nova-cell0-conductor-db-sync-vs5p9" Oct 01 15:21:14 crc kubenswrapper[4869]: I1001 15:21:14.399091 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5d10a15f-ad57-459e-90e7-9dbe3d0033a4-scripts\") pod \"nova-cell0-conductor-db-sync-vs5p9\" (UID: \"5d10a15f-ad57-459e-90e7-9dbe3d0033a4\") " pod="openstack/nova-cell0-conductor-db-sync-vs5p9" Oct 01 15:21:14 crc kubenswrapper[4869]: I1001 15:21:14.476094 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-5766b74c9d-wpxpf"] Oct 01 15:21:14 crc kubenswrapper[4869]: I1001 15:21:14.480955 4869 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/horizon-5766b74c9d-wpxpf"] Oct 01 15:21:14 crc kubenswrapper[4869]: I1001 15:21:14.500401 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5d10a15f-ad57-459e-90e7-9dbe3d0033a4-config-data\") pod \"nova-cell0-conductor-db-sync-vs5p9\" (UID: \"5d10a15f-ad57-459e-90e7-9dbe3d0033a4\") " pod="openstack/nova-cell0-conductor-db-sync-vs5p9" Oct 01 15:21:14 crc kubenswrapper[4869]: I1001 15:21:14.500485 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5d10a15f-ad57-459e-90e7-9dbe3d0033a4-combined-ca-bundle\") pod \"nova-cell0-conductor-db-sync-vs5p9\" (UID: \"5d10a15f-ad57-459e-90e7-9dbe3d0033a4\") " pod="openstack/nova-cell0-conductor-db-sync-vs5p9" Oct 01 15:21:14 crc kubenswrapper[4869]: I1001 15:21:14.500507 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5d10a15f-ad57-459e-90e7-9dbe3d0033a4-scripts\") pod \"nova-cell0-conductor-db-sync-vs5p9\" (UID: \"5d10a15f-ad57-459e-90e7-9dbe3d0033a4\") " pod="openstack/nova-cell0-conductor-db-sync-vs5p9" Oct 01 15:21:14 crc kubenswrapper[4869]: I1001 15:21:14.500601 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s4b8z\" (UniqueName: \"kubernetes.io/projected/5d10a15f-ad57-459e-90e7-9dbe3d0033a4-kube-api-access-s4b8z\") pod \"nova-cell0-conductor-db-sync-vs5p9\" (UID: \"5d10a15f-ad57-459e-90e7-9dbe3d0033a4\") " pod="openstack/nova-cell0-conductor-db-sync-vs5p9" Oct 01 15:21:14 crc kubenswrapper[4869]: I1001 15:21:14.504777 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5d10a15f-ad57-459e-90e7-9dbe3d0033a4-combined-ca-bundle\") pod \"nova-cell0-conductor-db-sync-vs5p9\" (UID: \"5d10a15f-ad57-459e-90e7-9dbe3d0033a4\") " pod="openstack/nova-cell0-conductor-db-sync-vs5p9" Oct 01 15:21:14 crc kubenswrapper[4869]: I1001 15:21:14.504861 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5d10a15f-ad57-459e-90e7-9dbe3d0033a4-config-data\") pod \"nova-cell0-conductor-db-sync-vs5p9\" (UID: \"5d10a15f-ad57-459e-90e7-9dbe3d0033a4\") " pod="openstack/nova-cell0-conductor-db-sync-vs5p9" Oct 01 15:21:14 crc kubenswrapper[4869]: I1001 15:21:14.506476 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5d10a15f-ad57-459e-90e7-9dbe3d0033a4-scripts\") pod \"nova-cell0-conductor-db-sync-vs5p9\" (UID: \"5d10a15f-ad57-459e-90e7-9dbe3d0033a4\") " pod="openstack/nova-cell0-conductor-db-sync-vs5p9" Oct 01 15:21:14 crc kubenswrapper[4869]: I1001 15:21:14.519158 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s4b8z\" (UniqueName: \"kubernetes.io/projected/5d10a15f-ad57-459e-90e7-9dbe3d0033a4-kube-api-access-s4b8z\") pod \"nova-cell0-conductor-db-sync-vs5p9\" (UID: \"5d10a15f-ad57-459e-90e7-9dbe3d0033a4\") " pod="openstack/nova-cell0-conductor-db-sync-vs5p9" Oct 01 15:21:14 crc kubenswrapper[4869]: I1001 15:21:14.653331 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-db-sync-vs5p9" Oct 01 15:21:15 crc kubenswrapper[4869]: W1001 15:21:15.106033 4869 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod5d10a15f_ad57_459e_90e7_9dbe3d0033a4.slice/crio-cd9bc2e51593ee86994e5e58b7188b31d145aab8308d2e7fc9751d3574eb9205 WatchSource:0}: Error finding container cd9bc2e51593ee86994e5e58b7188b31d145aab8308d2e7fc9751d3574eb9205: Status 404 returned error can't find the container with id cd9bc2e51593ee86994e5e58b7188b31d145aab8308d2e7fc9751d3574eb9205 Oct 01 15:21:15 crc kubenswrapper[4869]: I1001 15:21:15.131345 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-db-sync-vs5p9"] Oct 01 15:21:15 crc kubenswrapper[4869]: I1001 15:21:15.151759 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-db-sync-vs5p9" event={"ID":"5d10a15f-ad57-459e-90e7-9dbe3d0033a4","Type":"ContainerStarted","Data":"cd9bc2e51593ee86994e5e58b7188b31d145aab8308d2e7fc9751d3574eb9205"} Oct 01 15:21:15 crc kubenswrapper[4869]: I1001 15:21:15.158080 4869 generic.go:334] "Generic (PLEG): container finished" podID="91b925da-fe68-4787-8a77-1f49f04cd917" containerID="54eeba3e2bdb10d9d0dd2c62f76c5e26affc83398074893c9ee7cf58bd30f5fe" exitCode=0 Oct 01 15:21:15 crc kubenswrapper[4869]: I1001 15:21:15.158392 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-6c8b49859b-vzzr7" event={"ID":"91b925da-fe68-4787-8a77-1f49f04cd917","Type":"ContainerDied","Data":"54eeba3e2bdb10d9d0dd2c62f76c5e26affc83398074893c9ee7cf58bd30f5fe"} Oct 01 15:21:15 crc kubenswrapper[4869]: I1001 15:21:15.330057 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-6c8b49859b-vzzr7" Oct 01 15:21:15 crc kubenswrapper[4869]: I1001 15:21:15.415284 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/91b925da-fe68-4787-8a77-1f49f04cd917-ovndb-tls-certs\") pod \"91b925da-fe68-4787-8a77-1f49f04cd917\" (UID: \"91b925da-fe68-4787-8a77-1f49f04cd917\") " Oct 01 15:21:15 crc kubenswrapper[4869]: I1001 15:21:15.415378 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/91b925da-fe68-4787-8a77-1f49f04cd917-combined-ca-bundle\") pod \"91b925da-fe68-4787-8a77-1f49f04cd917\" (UID: \"91b925da-fe68-4787-8a77-1f49f04cd917\") " Oct 01 15:21:15 crc kubenswrapper[4869]: I1001 15:21:15.415398 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/91b925da-fe68-4787-8a77-1f49f04cd917-config\") pod \"91b925da-fe68-4787-8a77-1f49f04cd917\" (UID: \"91b925da-fe68-4787-8a77-1f49f04cd917\") " Oct 01 15:21:15 crc kubenswrapper[4869]: I1001 15:21:15.415471 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-k6xrh\" (UniqueName: \"kubernetes.io/projected/91b925da-fe68-4787-8a77-1f49f04cd917-kube-api-access-k6xrh\") pod \"91b925da-fe68-4787-8a77-1f49f04cd917\" (UID: \"91b925da-fe68-4787-8a77-1f49f04cd917\") " Oct 01 15:21:15 crc kubenswrapper[4869]: I1001 15:21:15.415524 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/91b925da-fe68-4787-8a77-1f49f04cd917-httpd-config\") pod \"91b925da-fe68-4787-8a77-1f49f04cd917\" (UID: \"91b925da-fe68-4787-8a77-1f49f04cd917\") " Oct 01 15:21:15 crc kubenswrapper[4869]: I1001 15:21:15.420320 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/91b925da-fe68-4787-8a77-1f49f04cd917-httpd-config" (OuterVolumeSpecName: "httpd-config") pod "91b925da-fe68-4787-8a77-1f49f04cd917" (UID: "91b925da-fe68-4787-8a77-1f49f04cd917"). InnerVolumeSpecName "httpd-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 15:21:15 crc kubenswrapper[4869]: I1001 15:21:15.420764 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/91b925da-fe68-4787-8a77-1f49f04cd917-kube-api-access-k6xrh" (OuterVolumeSpecName: "kube-api-access-k6xrh") pod "91b925da-fe68-4787-8a77-1f49f04cd917" (UID: "91b925da-fe68-4787-8a77-1f49f04cd917"). InnerVolumeSpecName "kube-api-access-k6xrh". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 15:21:15 crc kubenswrapper[4869]: I1001 15:21:15.464043 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/91b925da-fe68-4787-8a77-1f49f04cd917-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "91b925da-fe68-4787-8a77-1f49f04cd917" (UID: "91b925da-fe68-4787-8a77-1f49f04cd917"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 15:21:15 crc kubenswrapper[4869]: I1001 15:21:15.470426 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/91b925da-fe68-4787-8a77-1f49f04cd917-config" (OuterVolumeSpecName: "config") pod "91b925da-fe68-4787-8a77-1f49f04cd917" (UID: "91b925da-fe68-4787-8a77-1f49f04cd917"). InnerVolumeSpecName "config". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 15:21:15 crc kubenswrapper[4869]: I1001 15:21:15.517106 4869 reconciler_common.go:293] "Volume detached for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/91b925da-fe68-4787-8a77-1f49f04cd917-httpd-config\") on node \"crc\" DevicePath \"\"" Oct 01 15:21:15 crc kubenswrapper[4869]: I1001 15:21:15.517142 4869 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/91b925da-fe68-4787-8a77-1f49f04cd917-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 01 15:21:15 crc kubenswrapper[4869]: I1001 15:21:15.517154 4869 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/secret/91b925da-fe68-4787-8a77-1f49f04cd917-config\") on node \"crc\" DevicePath \"\"" Oct 01 15:21:15 crc kubenswrapper[4869]: I1001 15:21:15.517165 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-k6xrh\" (UniqueName: \"kubernetes.io/projected/91b925da-fe68-4787-8a77-1f49f04cd917-kube-api-access-k6xrh\") on node \"crc\" DevicePath \"\"" Oct 01 15:21:15 crc kubenswrapper[4869]: I1001 15:21:15.538408 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/91b925da-fe68-4787-8a77-1f49f04cd917-ovndb-tls-certs" (OuterVolumeSpecName: "ovndb-tls-certs") pod "91b925da-fe68-4787-8a77-1f49f04cd917" (UID: "91b925da-fe68-4787-8a77-1f49f04cd917"). InnerVolumeSpecName "ovndb-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 15:21:15 crc kubenswrapper[4869]: I1001 15:21:15.590225 4869 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="011bdc54-d9ef-4ae9-a16b-5eaf4e97b2cf" path="/var/lib/kubelet/pods/011bdc54-d9ef-4ae9-a16b-5eaf4e97b2cf/volumes" Oct 01 15:21:15 crc kubenswrapper[4869]: I1001 15:21:15.618840 4869 reconciler_common.go:293] "Volume detached for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/91b925da-fe68-4787-8a77-1f49f04cd917-ovndb-tls-certs\") on node \"crc\" DevicePath \"\"" Oct 01 15:21:16 crc kubenswrapper[4869]: I1001 15:21:16.184887 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-6c8b49859b-vzzr7" event={"ID":"91b925da-fe68-4787-8a77-1f49f04cd917","Type":"ContainerDied","Data":"3268cdb704aa8139554398ad54303af6a62c92b9bcd503a021deb1fc49fed0bb"} Oct 01 15:21:16 crc kubenswrapper[4869]: I1001 15:21:16.185284 4869 scope.go:117] "RemoveContainer" containerID="ee76674fc7efa494d4f99f5af61cc48f47f33b2bb6125efa8b907f758d65fd24" Oct 01 15:21:16 crc kubenswrapper[4869]: I1001 15:21:16.185179 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-6c8b49859b-vzzr7" Oct 01 15:21:16 crc kubenswrapper[4869]: I1001 15:21:16.214624 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-6c8b49859b-vzzr7"] Oct 01 15:21:16 crc kubenswrapper[4869]: I1001 15:21:16.221104 4869 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/neutron-6c8b49859b-vzzr7"] Oct 01 15:21:16 crc kubenswrapper[4869]: I1001 15:21:16.225478 4869 scope.go:117] "RemoveContainer" containerID="54eeba3e2bdb10d9d0dd2c62f76c5e26affc83398074893c9ee7cf58bd30f5fe" Oct 01 15:21:17 crc kubenswrapper[4869]: I1001 15:21:17.198915 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"6743ef9f-f686-4a93-aa19-0f152e9f5438","Type":"ContainerStarted","Data":"c4ed3678f33506cdd169476adca164c5c1297f26624c755f14ffac480ba1a29a"} Oct 01 15:21:17 crc kubenswrapper[4869]: I1001 15:21:17.199240 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Oct 01 15:21:17 crc kubenswrapper[4869]: I1001 15:21:17.225147 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=1.830417841 podStartE2EDuration="7.225125585s" podCreationTimestamp="2025-10-01 15:21:10 +0000 UTC" firstStartedPulling="2025-10-01 15:21:11.340171269 +0000 UTC m=+980.487014385" lastFinishedPulling="2025-10-01 15:21:16.734879013 +0000 UTC m=+985.881722129" observedRunningTime="2025-10-01 15:21:17.216365134 +0000 UTC m=+986.363208250" watchObservedRunningTime="2025-10-01 15:21:17.225125585 +0000 UTC m=+986.371968701" Oct 01 15:21:17 crc kubenswrapper[4869]: I1001 15:21:17.593631 4869 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="91b925da-fe68-4787-8a77-1f49f04cd917" path="/var/lib/kubelet/pods/91b925da-fe68-4787-8a77-1f49f04cd917/volumes" Oct 01 15:21:18 crc kubenswrapper[4869]: I1001 15:21:18.970944 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Oct 01 15:21:19 crc kubenswrapper[4869]: I1001 15:21:19.220781 4869 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="6743ef9f-f686-4a93-aa19-0f152e9f5438" containerName="proxy-httpd" containerID="cri-o://c4ed3678f33506cdd169476adca164c5c1297f26624c755f14ffac480ba1a29a" gracePeriod=30 Oct 01 15:21:19 crc kubenswrapper[4869]: I1001 15:21:19.220753 4869 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="6743ef9f-f686-4a93-aa19-0f152e9f5438" containerName="ceilometer-central-agent" containerID="cri-o://ce4e48eac67200d3bab3758da0d8a9ab2bcf3d5792bf091b67e79dfbcb0a138e" gracePeriod=30 Oct 01 15:21:19 crc kubenswrapper[4869]: I1001 15:21:19.220792 4869 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="6743ef9f-f686-4a93-aa19-0f152e9f5438" containerName="sg-core" containerID="cri-o://4cd148349badea52f7b074e61d6f4c67c579b5f10c598c3d3aa0c594c76dd0f9" gracePeriod=30 Oct 01 15:21:19 crc kubenswrapper[4869]: I1001 15:21:19.220881 4869 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="6743ef9f-f686-4a93-aa19-0f152e9f5438" containerName="ceilometer-notification-agent" containerID="cri-o://210a2e09562c12e7f45f4db21c831e715e43df270aa41828287b1f5a128158c1" gracePeriod=30 Oct 01 15:21:20 crc kubenswrapper[4869]: I1001 15:21:20.232711 4869 generic.go:334] "Generic (PLEG): container finished" podID="6743ef9f-f686-4a93-aa19-0f152e9f5438" containerID="c4ed3678f33506cdd169476adca164c5c1297f26624c755f14ffac480ba1a29a" exitCode=0 Oct 01 15:21:20 crc kubenswrapper[4869]: I1001 15:21:20.233146 4869 generic.go:334] "Generic (PLEG): container finished" podID="6743ef9f-f686-4a93-aa19-0f152e9f5438" containerID="4cd148349badea52f7b074e61d6f4c67c579b5f10c598c3d3aa0c594c76dd0f9" exitCode=2 Oct 01 15:21:20 crc kubenswrapper[4869]: I1001 15:21:20.233161 4869 generic.go:334] "Generic (PLEG): container finished" podID="6743ef9f-f686-4a93-aa19-0f152e9f5438" containerID="210a2e09562c12e7f45f4db21c831e715e43df270aa41828287b1f5a128158c1" exitCode=0 Oct 01 15:21:20 crc kubenswrapper[4869]: I1001 15:21:20.233170 4869 generic.go:334] "Generic (PLEG): container finished" podID="6743ef9f-f686-4a93-aa19-0f152e9f5438" containerID="ce4e48eac67200d3bab3758da0d8a9ab2bcf3d5792bf091b67e79dfbcb0a138e" exitCode=0 Oct 01 15:21:20 crc kubenswrapper[4869]: I1001 15:21:20.233190 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"6743ef9f-f686-4a93-aa19-0f152e9f5438","Type":"ContainerDied","Data":"c4ed3678f33506cdd169476adca164c5c1297f26624c755f14ffac480ba1a29a"} Oct 01 15:21:20 crc kubenswrapper[4869]: I1001 15:21:20.233215 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"6743ef9f-f686-4a93-aa19-0f152e9f5438","Type":"ContainerDied","Data":"4cd148349badea52f7b074e61d6f4c67c579b5f10c598c3d3aa0c594c76dd0f9"} Oct 01 15:21:20 crc kubenswrapper[4869]: I1001 15:21:20.233225 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"6743ef9f-f686-4a93-aa19-0f152e9f5438","Type":"ContainerDied","Data":"210a2e09562c12e7f45f4db21c831e715e43df270aa41828287b1f5a128158c1"} Oct 01 15:21:20 crc kubenswrapper[4869]: I1001 15:21:20.233234 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"6743ef9f-f686-4a93-aa19-0f152e9f5438","Type":"ContainerDied","Data":"ce4e48eac67200d3bab3758da0d8a9ab2bcf3d5792bf091b67e79dfbcb0a138e"} Oct 01 15:21:22 crc kubenswrapper[4869]: I1001 15:21:22.614557 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 01 15:21:22 crc kubenswrapper[4869]: I1001 15:21:22.753744 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6743ef9f-f686-4a93-aa19-0f152e9f5438-config-data\") pod \"6743ef9f-f686-4a93-aa19-0f152e9f5438\" (UID: \"6743ef9f-f686-4a93-aa19-0f152e9f5438\") " Oct 01 15:21:22 crc kubenswrapper[4869]: I1001 15:21:22.753792 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/6743ef9f-f686-4a93-aa19-0f152e9f5438-log-httpd\") pod \"6743ef9f-f686-4a93-aa19-0f152e9f5438\" (UID: \"6743ef9f-f686-4a93-aa19-0f152e9f5438\") " Oct 01 15:21:22 crc kubenswrapper[4869]: I1001 15:21:22.753825 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qspt7\" (UniqueName: \"kubernetes.io/projected/6743ef9f-f686-4a93-aa19-0f152e9f5438-kube-api-access-qspt7\") pod \"6743ef9f-f686-4a93-aa19-0f152e9f5438\" (UID: \"6743ef9f-f686-4a93-aa19-0f152e9f5438\") " Oct 01 15:21:22 crc kubenswrapper[4869]: I1001 15:21:22.753895 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/6743ef9f-f686-4a93-aa19-0f152e9f5438-sg-core-conf-yaml\") pod \"6743ef9f-f686-4a93-aa19-0f152e9f5438\" (UID: \"6743ef9f-f686-4a93-aa19-0f152e9f5438\") " Oct 01 15:21:22 crc kubenswrapper[4869]: I1001 15:21:22.753920 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/6743ef9f-f686-4a93-aa19-0f152e9f5438-run-httpd\") pod \"6743ef9f-f686-4a93-aa19-0f152e9f5438\" (UID: \"6743ef9f-f686-4a93-aa19-0f152e9f5438\") " Oct 01 15:21:22 crc kubenswrapper[4869]: I1001 15:21:22.754035 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6743ef9f-f686-4a93-aa19-0f152e9f5438-combined-ca-bundle\") pod \"6743ef9f-f686-4a93-aa19-0f152e9f5438\" (UID: \"6743ef9f-f686-4a93-aa19-0f152e9f5438\") " Oct 01 15:21:22 crc kubenswrapper[4869]: I1001 15:21:22.754053 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6743ef9f-f686-4a93-aa19-0f152e9f5438-scripts\") pod \"6743ef9f-f686-4a93-aa19-0f152e9f5438\" (UID: \"6743ef9f-f686-4a93-aa19-0f152e9f5438\") " Oct 01 15:21:22 crc kubenswrapper[4869]: I1001 15:21:22.754634 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/6743ef9f-f686-4a93-aa19-0f152e9f5438-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "6743ef9f-f686-4a93-aa19-0f152e9f5438" (UID: "6743ef9f-f686-4a93-aa19-0f152e9f5438"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 15:21:22 crc kubenswrapper[4869]: I1001 15:21:22.754863 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/6743ef9f-f686-4a93-aa19-0f152e9f5438-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "6743ef9f-f686-4a93-aa19-0f152e9f5438" (UID: "6743ef9f-f686-4a93-aa19-0f152e9f5438"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 15:21:22 crc kubenswrapper[4869]: I1001 15:21:22.755212 4869 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/6743ef9f-f686-4a93-aa19-0f152e9f5438-log-httpd\") on node \"crc\" DevicePath \"\"" Oct 01 15:21:22 crc kubenswrapper[4869]: I1001 15:21:22.755233 4869 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/6743ef9f-f686-4a93-aa19-0f152e9f5438-run-httpd\") on node \"crc\" DevicePath \"\"" Oct 01 15:21:22 crc kubenswrapper[4869]: I1001 15:21:22.771714 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6743ef9f-f686-4a93-aa19-0f152e9f5438-kube-api-access-qspt7" (OuterVolumeSpecName: "kube-api-access-qspt7") pod "6743ef9f-f686-4a93-aa19-0f152e9f5438" (UID: "6743ef9f-f686-4a93-aa19-0f152e9f5438"). InnerVolumeSpecName "kube-api-access-qspt7". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 15:21:22 crc kubenswrapper[4869]: I1001 15:21:22.771806 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6743ef9f-f686-4a93-aa19-0f152e9f5438-scripts" (OuterVolumeSpecName: "scripts") pod "6743ef9f-f686-4a93-aa19-0f152e9f5438" (UID: "6743ef9f-f686-4a93-aa19-0f152e9f5438"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 15:21:22 crc kubenswrapper[4869]: I1001 15:21:22.777836 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6743ef9f-f686-4a93-aa19-0f152e9f5438-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "6743ef9f-f686-4a93-aa19-0f152e9f5438" (UID: "6743ef9f-f686-4a93-aa19-0f152e9f5438"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 15:21:22 crc kubenswrapper[4869]: I1001 15:21:22.825903 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6743ef9f-f686-4a93-aa19-0f152e9f5438-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "6743ef9f-f686-4a93-aa19-0f152e9f5438" (UID: "6743ef9f-f686-4a93-aa19-0f152e9f5438"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 15:21:22 crc kubenswrapper[4869]: I1001 15:21:22.857045 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qspt7\" (UniqueName: \"kubernetes.io/projected/6743ef9f-f686-4a93-aa19-0f152e9f5438-kube-api-access-qspt7\") on node \"crc\" DevicePath \"\"" Oct 01 15:21:22 crc kubenswrapper[4869]: I1001 15:21:22.857074 4869 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/6743ef9f-f686-4a93-aa19-0f152e9f5438-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Oct 01 15:21:22 crc kubenswrapper[4869]: I1001 15:21:22.857083 4869 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6743ef9f-f686-4a93-aa19-0f152e9f5438-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 01 15:21:22 crc kubenswrapper[4869]: I1001 15:21:22.857094 4869 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6743ef9f-f686-4a93-aa19-0f152e9f5438-scripts\") on node \"crc\" DevicePath \"\"" Oct 01 15:21:22 crc kubenswrapper[4869]: I1001 15:21:22.857190 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6743ef9f-f686-4a93-aa19-0f152e9f5438-config-data" (OuterVolumeSpecName: "config-data") pod "6743ef9f-f686-4a93-aa19-0f152e9f5438" (UID: "6743ef9f-f686-4a93-aa19-0f152e9f5438"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 15:21:22 crc kubenswrapper[4869]: I1001 15:21:22.958904 4869 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6743ef9f-f686-4a93-aa19-0f152e9f5438-config-data\") on node \"crc\" DevicePath \"\"" Oct 01 15:21:23 crc kubenswrapper[4869]: I1001 15:21:23.268626 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"6743ef9f-f686-4a93-aa19-0f152e9f5438","Type":"ContainerDied","Data":"f1b62c2b40e028e82987ef336bb2daa3a48f079fb4924edbc1c9bb1a26d6897f"} Oct 01 15:21:23 crc kubenswrapper[4869]: I1001 15:21:23.268668 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 01 15:21:23 crc kubenswrapper[4869]: I1001 15:21:23.268697 4869 scope.go:117] "RemoveContainer" containerID="c4ed3678f33506cdd169476adca164c5c1297f26624c755f14ffac480ba1a29a" Oct 01 15:21:23 crc kubenswrapper[4869]: I1001 15:21:23.272191 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-db-sync-vs5p9" event={"ID":"5d10a15f-ad57-459e-90e7-9dbe3d0033a4","Type":"ContainerStarted","Data":"e033c41ddad75eec30a51c662b6b6647edceda69c44ec10eac99c04612c42039"} Oct 01 15:21:23 crc kubenswrapper[4869]: I1001 15:21:23.331796 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell0-conductor-db-sync-vs5p9" podStartSLOduration=2.054507038 podStartE2EDuration="9.33177794s" podCreationTimestamp="2025-10-01 15:21:14 +0000 UTC" firstStartedPulling="2025-10-01 15:21:15.112417894 +0000 UTC m=+984.259261050" lastFinishedPulling="2025-10-01 15:21:22.389688836 +0000 UTC m=+991.536531952" observedRunningTime="2025-10-01 15:21:23.302523951 +0000 UTC m=+992.449367117" watchObservedRunningTime="2025-10-01 15:21:23.33177794 +0000 UTC m=+992.478621066" Oct 01 15:21:23 crc kubenswrapper[4869]: I1001 15:21:23.335483 4869 scope.go:117] "RemoveContainer" containerID="4cd148349badea52f7b074e61d6f4c67c579b5f10c598c3d3aa0c594c76dd0f9" Oct 01 15:21:23 crc kubenswrapper[4869]: I1001 15:21:23.344838 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Oct 01 15:21:23 crc kubenswrapper[4869]: I1001 15:21:23.351389 4869 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Oct 01 15:21:23 crc kubenswrapper[4869]: I1001 15:21:23.358475 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Oct 01 15:21:23 crc kubenswrapper[4869]: E1001 15:21:23.358892 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="91b925da-fe68-4787-8a77-1f49f04cd917" containerName="neutron-httpd" Oct 01 15:21:23 crc kubenswrapper[4869]: I1001 15:21:23.358908 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="91b925da-fe68-4787-8a77-1f49f04cd917" containerName="neutron-httpd" Oct 01 15:21:23 crc kubenswrapper[4869]: E1001 15:21:23.358921 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="91b925da-fe68-4787-8a77-1f49f04cd917" containerName="neutron-api" Oct 01 15:21:23 crc kubenswrapper[4869]: I1001 15:21:23.358929 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="91b925da-fe68-4787-8a77-1f49f04cd917" containerName="neutron-api" Oct 01 15:21:23 crc kubenswrapper[4869]: E1001 15:21:23.358957 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6743ef9f-f686-4a93-aa19-0f152e9f5438" containerName="proxy-httpd" Oct 01 15:21:23 crc kubenswrapper[4869]: I1001 15:21:23.358963 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="6743ef9f-f686-4a93-aa19-0f152e9f5438" containerName="proxy-httpd" Oct 01 15:21:23 crc kubenswrapper[4869]: E1001 15:21:23.358972 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6743ef9f-f686-4a93-aa19-0f152e9f5438" containerName="ceilometer-central-agent" Oct 01 15:21:23 crc kubenswrapper[4869]: I1001 15:21:23.358978 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="6743ef9f-f686-4a93-aa19-0f152e9f5438" containerName="ceilometer-central-agent" Oct 01 15:21:23 crc kubenswrapper[4869]: E1001 15:21:23.358991 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6743ef9f-f686-4a93-aa19-0f152e9f5438" containerName="ceilometer-notification-agent" Oct 01 15:21:23 crc kubenswrapper[4869]: I1001 15:21:23.358997 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="6743ef9f-f686-4a93-aa19-0f152e9f5438" containerName="ceilometer-notification-agent" Oct 01 15:21:23 crc kubenswrapper[4869]: E1001 15:21:23.359009 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6743ef9f-f686-4a93-aa19-0f152e9f5438" containerName="sg-core" Oct 01 15:21:23 crc kubenswrapper[4869]: I1001 15:21:23.359015 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="6743ef9f-f686-4a93-aa19-0f152e9f5438" containerName="sg-core" Oct 01 15:21:23 crc kubenswrapper[4869]: I1001 15:21:23.359160 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="6743ef9f-f686-4a93-aa19-0f152e9f5438" containerName="ceilometer-notification-agent" Oct 01 15:21:23 crc kubenswrapper[4869]: I1001 15:21:23.359171 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="6743ef9f-f686-4a93-aa19-0f152e9f5438" containerName="sg-core" Oct 01 15:21:23 crc kubenswrapper[4869]: I1001 15:21:23.359181 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="91b925da-fe68-4787-8a77-1f49f04cd917" containerName="neutron-api" Oct 01 15:21:23 crc kubenswrapper[4869]: I1001 15:21:23.359190 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="6743ef9f-f686-4a93-aa19-0f152e9f5438" containerName="proxy-httpd" Oct 01 15:21:23 crc kubenswrapper[4869]: I1001 15:21:23.359197 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="6743ef9f-f686-4a93-aa19-0f152e9f5438" containerName="ceilometer-central-agent" Oct 01 15:21:23 crc kubenswrapper[4869]: I1001 15:21:23.359206 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="91b925da-fe68-4787-8a77-1f49f04cd917" containerName="neutron-httpd" Oct 01 15:21:23 crc kubenswrapper[4869]: I1001 15:21:23.360899 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 01 15:21:23 crc kubenswrapper[4869]: I1001 15:21:23.365079 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Oct 01 15:21:23 crc kubenswrapper[4869]: I1001 15:21:23.365374 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Oct 01 15:21:23 crc kubenswrapper[4869]: I1001 15:21:23.379115 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Oct 01 15:21:23 crc kubenswrapper[4869]: I1001 15:21:23.392251 4869 scope.go:117] "RemoveContainer" containerID="210a2e09562c12e7f45f4db21c831e715e43df270aa41828287b1f5a128158c1" Oct 01 15:21:23 crc kubenswrapper[4869]: I1001 15:21:23.416658 4869 scope.go:117] "RemoveContainer" containerID="ce4e48eac67200d3bab3758da0d8a9ab2bcf3d5792bf091b67e79dfbcb0a138e" Oct 01 15:21:23 crc kubenswrapper[4869]: I1001 15:21:23.468287 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5ede1f51-a56a-4ac8-ba8b-6963803cb869-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"5ede1f51-a56a-4ac8-ba8b-6963803cb869\") " pod="openstack/ceilometer-0" Oct 01 15:21:23 crc kubenswrapper[4869]: I1001 15:21:23.468331 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/5ede1f51-a56a-4ac8-ba8b-6963803cb869-log-httpd\") pod \"ceilometer-0\" (UID: \"5ede1f51-a56a-4ac8-ba8b-6963803cb869\") " pod="openstack/ceilometer-0" Oct 01 15:21:23 crc kubenswrapper[4869]: I1001 15:21:23.468354 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/5ede1f51-a56a-4ac8-ba8b-6963803cb869-run-httpd\") pod \"ceilometer-0\" (UID: \"5ede1f51-a56a-4ac8-ba8b-6963803cb869\") " pod="openstack/ceilometer-0" Oct 01 15:21:23 crc kubenswrapper[4869]: I1001 15:21:23.468749 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bngb8\" (UniqueName: \"kubernetes.io/projected/5ede1f51-a56a-4ac8-ba8b-6963803cb869-kube-api-access-bngb8\") pod \"ceilometer-0\" (UID: \"5ede1f51-a56a-4ac8-ba8b-6963803cb869\") " pod="openstack/ceilometer-0" Oct 01 15:21:23 crc kubenswrapper[4869]: I1001 15:21:23.468829 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5ede1f51-a56a-4ac8-ba8b-6963803cb869-scripts\") pod \"ceilometer-0\" (UID: \"5ede1f51-a56a-4ac8-ba8b-6963803cb869\") " pod="openstack/ceilometer-0" Oct 01 15:21:23 crc kubenswrapper[4869]: I1001 15:21:23.468858 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/5ede1f51-a56a-4ac8-ba8b-6963803cb869-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"5ede1f51-a56a-4ac8-ba8b-6963803cb869\") " pod="openstack/ceilometer-0" Oct 01 15:21:23 crc kubenswrapper[4869]: I1001 15:21:23.468945 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5ede1f51-a56a-4ac8-ba8b-6963803cb869-config-data\") pod \"ceilometer-0\" (UID: \"5ede1f51-a56a-4ac8-ba8b-6963803cb869\") " pod="openstack/ceilometer-0" Oct 01 15:21:23 crc kubenswrapper[4869]: I1001 15:21:23.570720 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/5ede1f51-a56a-4ac8-ba8b-6963803cb869-log-httpd\") pod \"ceilometer-0\" (UID: \"5ede1f51-a56a-4ac8-ba8b-6963803cb869\") " pod="openstack/ceilometer-0" Oct 01 15:21:23 crc kubenswrapper[4869]: I1001 15:21:23.570808 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/5ede1f51-a56a-4ac8-ba8b-6963803cb869-run-httpd\") pod \"ceilometer-0\" (UID: \"5ede1f51-a56a-4ac8-ba8b-6963803cb869\") " pod="openstack/ceilometer-0" Oct 01 15:21:23 crc kubenswrapper[4869]: I1001 15:21:23.571041 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bngb8\" (UniqueName: \"kubernetes.io/projected/5ede1f51-a56a-4ac8-ba8b-6963803cb869-kube-api-access-bngb8\") pod \"ceilometer-0\" (UID: \"5ede1f51-a56a-4ac8-ba8b-6963803cb869\") " pod="openstack/ceilometer-0" Oct 01 15:21:23 crc kubenswrapper[4869]: I1001 15:21:23.571102 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/5ede1f51-a56a-4ac8-ba8b-6963803cb869-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"5ede1f51-a56a-4ac8-ba8b-6963803cb869\") " pod="openstack/ceilometer-0" Oct 01 15:21:23 crc kubenswrapper[4869]: I1001 15:21:23.571206 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5ede1f51-a56a-4ac8-ba8b-6963803cb869-scripts\") pod \"ceilometer-0\" (UID: \"5ede1f51-a56a-4ac8-ba8b-6963803cb869\") " pod="openstack/ceilometer-0" Oct 01 15:21:23 crc kubenswrapper[4869]: I1001 15:21:23.571315 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5ede1f51-a56a-4ac8-ba8b-6963803cb869-config-data\") pod \"ceilometer-0\" (UID: \"5ede1f51-a56a-4ac8-ba8b-6963803cb869\") " pod="openstack/ceilometer-0" Oct 01 15:21:23 crc kubenswrapper[4869]: I1001 15:21:23.571409 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/5ede1f51-a56a-4ac8-ba8b-6963803cb869-log-httpd\") pod \"ceilometer-0\" (UID: \"5ede1f51-a56a-4ac8-ba8b-6963803cb869\") " pod="openstack/ceilometer-0" Oct 01 15:21:23 crc kubenswrapper[4869]: I1001 15:21:23.571471 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5ede1f51-a56a-4ac8-ba8b-6963803cb869-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"5ede1f51-a56a-4ac8-ba8b-6963803cb869\") " pod="openstack/ceilometer-0" Oct 01 15:21:23 crc kubenswrapper[4869]: I1001 15:21:23.571491 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/5ede1f51-a56a-4ac8-ba8b-6963803cb869-run-httpd\") pod \"ceilometer-0\" (UID: \"5ede1f51-a56a-4ac8-ba8b-6963803cb869\") " pod="openstack/ceilometer-0" Oct 01 15:21:23 crc kubenswrapper[4869]: I1001 15:21:23.576813 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/5ede1f51-a56a-4ac8-ba8b-6963803cb869-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"5ede1f51-a56a-4ac8-ba8b-6963803cb869\") " pod="openstack/ceilometer-0" Oct 01 15:21:23 crc kubenswrapper[4869]: I1001 15:21:23.578759 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5ede1f51-a56a-4ac8-ba8b-6963803cb869-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"5ede1f51-a56a-4ac8-ba8b-6963803cb869\") " pod="openstack/ceilometer-0" Oct 01 15:21:23 crc kubenswrapper[4869]: I1001 15:21:23.589200 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5ede1f51-a56a-4ac8-ba8b-6963803cb869-scripts\") pod \"ceilometer-0\" (UID: \"5ede1f51-a56a-4ac8-ba8b-6963803cb869\") " pod="openstack/ceilometer-0" Oct 01 15:21:23 crc kubenswrapper[4869]: I1001 15:21:23.594177 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5ede1f51-a56a-4ac8-ba8b-6963803cb869-config-data\") pod \"ceilometer-0\" (UID: \"5ede1f51-a56a-4ac8-ba8b-6963803cb869\") " pod="openstack/ceilometer-0" Oct 01 15:21:23 crc kubenswrapper[4869]: I1001 15:21:23.596205 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bngb8\" (UniqueName: \"kubernetes.io/projected/5ede1f51-a56a-4ac8-ba8b-6963803cb869-kube-api-access-bngb8\") pod \"ceilometer-0\" (UID: \"5ede1f51-a56a-4ac8-ba8b-6963803cb869\") " pod="openstack/ceilometer-0" Oct 01 15:21:23 crc kubenswrapper[4869]: I1001 15:21:23.596512 4869 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6743ef9f-f686-4a93-aa19-0f152e9f5438" path="/var/lib/kubelet/pods/6743ef9f-f686-4a93-aa19-0f152e9f5438/volumes" Oct 01 15:21:23 crc kubenswrapper[4869]: I1001 15:21:23.698003 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 01 15:21:24 crc kubenswrapper[4869]: I1001 15:21:24.199121 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Oct 01 15:21:24 crc kubenswrapper[4869]: I1001 15:21:24.285927 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"5ede1f51-a56a-4ac8-ba8b-6963803cb869","Type":"ContainerStarted","Data":"28bb802fbec15c7baf1cc03344ed450877e1e8071106e0f5e195e01464e60b61"} Oct 01 15:21:25 crc kubenswrapper[4869]: I1001 15:21:25.297738 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"5ede1f51-a56a-4ac8-ba8b-6963803cb869","Type":"ContainerStarted","Data":"ee67ced28f4ef58abd0ffbb7214b118c4b5b7ebf87b0bfa49a417b6076fe9eaa"} Oct 01 15:21:26 crc kubenswrapper[4869]: I1001 15:21:26.327507 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"5ede1f51-a56a-4ac8-ba8b-6963803cb869","Type":"ContainerStarted","Data":"c5a524ecf5caf9bdd7cfd14e0c7b5533b2b4f50b3b3c624bdb38f0b60ac369c4"} Oct 01 15:21:27 crc kubenswrapper[4869]: I1001 15:21:27.348875 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"5ede1f51-a56a-4ac8-ba8b-6963803cb869","Type":"ContainerStarted","Data":"2e28667845ee459879144661065b84c9ea1a9d3218470cc0665485c530f3e4c7"} Oct 01 15:21:28 crc kubenswrapper[4869]: I1001 15:21:28.363938 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"5ede1f51-a56a-4ac8-ba8b-6963803cb869","Type":"ContainerStarted","Data":"9c7eaec79522726d91645e0f278f0c7e1bc6ccd4fa3ef331cc40a1c9238264b4"} Oct 01 15:21:28 crc kubenswrapper[4869]: I1001 15:21:28.365563 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Oct 01 15:21:28 crc kubenswrapper[4869]: I1001 15:21:28.393426 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=1.77734408 podStartE2EDuration="5.39338711s" podCreationTimestamp="2025-10-01 15:21:23 +0000 UTC" firstStartedPulling="2025-10-01 15:21:24.198405569 +0000 UTC m=+993.345248685" lastFinishedPulling="2025-10-01 15:21:27.814448559 +0000 UTC m=+996.961291715" observedRunningTime="2025-10-01 15:21:28.383785398 +0000 UTC m=+997.530628534" watchObservedRunningTime="2025-10-01 15:21:28.39338711 +0000 UTC m=+997.540230226" Oct 01 15:21:33 crc kubenswrapper[4869]: I1001 15:21:33.420897 4869 generic.go:334] "Generic (PLEG): container finished" podID="5d10a15f-ad57-459e-90e7-9dbe3d0033a4" containerID="e033c41ddad75eec30a51c662b6b6647edceda69c44ec10eac99c04612c42039" exitCode=0 Oct 01 15:21:33 crc kubenswrapper[4869]: I1001 15:21:33.421008 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-db-sync-vs5p9" event={"ID":"5d10a15f-ad57-459e-90e7-9dbe3d0033a4","Type":"ContainerDied","Data":"e033c41ddad75eec30a51c662b6b6647edceda69c44ec10eac99c04612c42039"} Oct 01 15:21:34 crc kubenswrapper[4869]: I1001 15:21:34.891958 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-db-sync-vs5p9" Oct 01 15:21:35 crc kubenswrapper[4869]: I1001 15:21:35.021443 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5d10a15f-ad57-459e-90e7-9dbe3d0033a4-config-data\") pod \"5d10a15f-ad57-459e-90e7-9dbe3d0033a4\" (UID: \"5d10a15f-ad57-459e-90e7-9dbe3d0033a4\") " Oct 01 15:21:35 crc kubenswrapper[4869]: I1001 15:21:35.021623 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-s4b8z\" (UniqueName: \"kubernetes.io/projected/5d10a15f-ad57-459e-90e7-9dbe3d0033a4-kube-api-access-s4b8z\") pod \"5d10a15f-ad57-459e-90e7-9dbe3d0033a4\" (UID: \"5d10a15f-ad57-459e-90e7-9dbe3d0033a4\") " Oct 01 15:21:35 crc kubenswrapper[4869]: I1001 15:21:35.021713 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5d10a15f-ad57-459e-90e7-9dbe3d0033a4-scripts\") pod \"5d10a15f-ad57-459e-90e7-9dbe3d0033a4\" (UID: \"5d10a15f-ad57-459e-90e7-9dbe3d0033a4\") " Oct 01 15:21:35 crc kubenswrapper[4869]: I1001 15:21:35.021760 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5d10a15f-ad57-459e-90e7-9dbe3d0033a4-combined-ca-bundle\") pod \"5d10a15f-ad57-459e-90e7-9dbe3d0033a4\" (UID: \"5d10a15f-ad57-459e-90e7-9dbe3d0033a4\") " Oct 01 15:21:35 crc kubenswrapper[4869]: I1001 15:21:35.028579 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5d10a15f-ad57-459e-90e7-9dbe3d0033a4-kube-api-access-s4b8z" (OuterVolumeSpecName: "kube-api-access-s4b8z") pod "5d10a15f-ad57-459e-90e7-9dbe3d0033a4" (UID: "5d10a15f-ad57-459e-90e7-9dbe3d0033a4"). InnerVolumeSpecName "kube-api-access-s4b8z". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 15:21:35 crc kubenswrapper[4869]: I1001 15:21:35.030361 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5d10a15f-ad57-459e-90e7-9dbe3d0033a4-scripts" (OuterVolumeSpecName: "scripts") pod "5d10a15f-ad57-459e-90e7-9dbe3d0033a4" (UID: "5d10a15f-ad57-459e-90e7-9dbe3d0033a4"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 15:21:35 crc kubenswrapper[4869]: I1001 15:21:35.067937 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5d10a15f-ad57-459e-90e7-9dbe3d0033a4-config-data" (OuterVolumeSpecName: "config-data") pod "5d10a15f-ad57-459e-90e7-9dbe3d0033a4" (UID: "5d10a15f-ad57-459e-90e7-9dbe3d0033a4"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 15:21:35 crc kubenswrapper[4869]: I1001 15:21:35.069511 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5d10a15f-ad57-459e-90e7-9dbe3d0033a4-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "5d10a15f-ad57-459e-90e7-9dbe3d0033a4" (UID: "5d10a15f-ad57-459e-90e7-9dbe3d0033a4"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 15:21:35 crc kubenswrapper[4869]: I1001 15:21:35.125215 4869 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5d10a15f-ad57-459e-90e7-9dbe3d0033a4-config-data\") on node \"crc\" DevicePath \"\"" Oct 01 15:21:35 crc kubenswrapper[4869]: I1001 15:21:35.125283 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-s4b8z\" (UniqueName: \"kubernetes.io/projected/5d10a15f-ad57-459e-90e7-9dbe3d0033a4-kube-api-access-s4b8z\") on node \"crc\" DevicePath \"\"" Oct 01 15:21:35 crc kubenswrapper[4869]: I1001 15:21:35.125299 4869 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5d10a15f-ad57-459e-90e7-9dbe3d0033a4-scripts\") on node \"crc\" DevicePath \"\"" Oct 01 15:21:35 crc kubenswrapper[4869]: I1001 15:21:35.125313 4869 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5d10a15f-ad57-459e-90e7-9dbe3d0033a4-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 01 15:21:35 crc kubenswrapper[4869]: I1001 15:21:35.448396 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-db-sync-vs5p9" event={"ID":"5d10a15f-ad57-459e-90e7-9dbe3d0033a4","Type":"ContainerDied","Data":"cd9bc2e51593ee86994e5e58b7188b31d145aab8308d2e7fc9751d3574eb9205"} Oct 01 15:21:35 crc kubenswrapper[4869]: I1001 15:21:35.448463 4869 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="cd9bc2e51593ee86994e5e58b7188b31d145aab8308d2e7fc9751d3574eb9205" Oct 01 15:21:35 crc kubenswrapper[4869]: I1001 15:21:35.448517 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-db-sync-vs5p9" Oct 01 15:21:35 crc kubenswrapper[4869]: I1001 15:21:35.621511 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-conductor-0"] Oct 01 15:21:35 crc kubenswrapper[4869]: E1001 15:21:35.622109 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5d10a15f-ad57-459e-90e7-9dbe3d0033a4" containerName="nova-cell0-conductor-db-sync" Oct 01 15:21:35 crc kubenswrapper[4869]: I1001 15:21:35.622145 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="5d10a15f-ad57-459e-90e7-9dbe3d0033a4" containerName="nova-cell0-conductor-db-sync" Oct 01 15:21:35 crc kubenswrapper[4869]: I1001 15:21:35.622547 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="5d10a15f-ad57-459e-90e7-9dbe3d0033a4" containerName="nova-cell0-conductor-db-sync" Oct 01 15:21:35 crc kubenswrapper[4869]: I1001 15:21:35.623464 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-0" Oct 01 15:21:35 crc kubenswrapper[4869]: I1001 15:21:35.626066 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-nova-dockercfg-5mgh7" Oct 01 15:21:35 crc kubenswrapper[4869]: I1001 15:21:35.626930 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-conductor-config-data" Oct 01 15:21:35 crc kubenswrapper[4869]: I1001 15:21:35.638815 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-0"] Oct 01 15:21:35 crc kubenswrapper[4869]: I1001 15:21:35.735903 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ec5954ef-7c3a-46ad-b42c-f2fa802fedcf-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"ec5954ef-7c3a-46ad-b42c-f2fa802fedcf\") " pod="openstack/nova-cell0-conductor-0" Oct 01 15:21:35 crc kubenswrapper[4869]: I1001 15:21:35.736237 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7jxr6\" (UniqueName: \"kubernetes.io/projected/ec5954ef-7c3a-46ad-b42c-f2fa802fedcf-kube-api-access-7jxr6\") pod \"nova-cell0-conductor-0\" (UID: \"ec5954ef-7c3a-46ad-b42c-f2fa802fedcf\") " pod="openstack/nova-cell0-conductor-0" Oct 01 15:21:35 crc kubenswrapper[4869]: I1001 15:21:35.736330 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ec5954ef-7c3a-46ad-b42c-f2fa802fedcf-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"ec5954ef-7c3a-46ad-b42c-f2fa802fedcf\") " pod="openstack/nova-cell0-conductor-0" Oct 01 15:21:35 crc kubenswrapper[4869]: I1001 15:21:35.844537 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ec5954ef-7c3a-46ad-b42c-f2fa802fedcf-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"ec5954ef-7c3a-46ad-b42c-f2fa802fedcf\") " pod="openstack/nova-cell0-conductor-0" Oct 01 15:21:35 crc kubenswrapper[4869]: I1001 15:21:35.844936 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7jxr6\" (UniqueName: \"kubernetes.io/projected/ec5954ef-7c3a-46ad-b42c-f2fa802fedcf-kube-api-access-7jxr6\") pod \"nova-cell0-conductor-0\" (UID: \"ec5954ef-7c3a-46ad-b42c-f2fa802fedcf\") " pod="openstack/nova-cell0-conductor-0" Oct 01 15:21:35 crc kubenswrapper[4869]: I1001 15:21:35.845293 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ec5954ef-7c3a-46ad-b42c-f2fa802fedcf-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"ec5954ef-7c3a-46ad-b42c-f2fa802fedcf\") " pod="openstack/nova-cell0-conductor-0" Oct 01 15:21:35 crc kubenswrapper[4869]: I1001 15:21:35.853090 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ec5954ef-7c3a-46ad-b42c-f2fa802fedcf-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"ec5954ef-7c3a-46ad-b42c-f2fa802fedcf\") " pod="openstack/nova-cell0-conductor-0" Oct 01 15:21:35 crc kubenswrapper[4869]: I1001 15:21:35.853148 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ec5954ef-7c3a-46ad-b42c-f2fa802fedcf-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"ec5954ef-7c3a-46ad-b42c-f2fa802fedcf\") " pod="openstack/nova-cell0-conductor-0" Oct 01 15:21:35 crc kubenswrapper[4869]: I1001 15:21:35.866250 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7jxr6\" (UniqueName: \"kubernetes.io/projected/ec5954ef-7c3a-46ad-b42c-f2fa802fedcf-kube-api-access-7jxr6\") pod \"nova-cell0-conductor-0\" (UID: \"ec5954ef-7c3a-46ad-b42c-f2fa802fedcf\") " pod="openstack/nova-cell0-conductor-0" Oct 01 15:21:35 crc kubenswrapper[4869]: I1001 15:21:35.956298 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-0" Oct 01 15:21:36 crc kubenswrapper[4869]: I1001 15:21:36.226649 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-0"] Oct 01 15:21:36 crc kubenswrapper[4869]: I1001 15:21:36.460746 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-0" event={"ID":"ec5954ef-7c3a-46ad-b42c-f2fa802fedcf","Type":"ContainerStarted","Data":"9d5f21bd2b6dc188b76fa65eb8a7bfeeb136e426495eedc720a4d598c040e360"} Oct 01 15:21:37 crc kubenswrapper[4869]: I1001 15:21:37.468591 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-0" event={"ID":"ec5954ef-7c3a-46ad-b42c-f2fa802fedcf","Type":"ContainerStarted","Data":"7cd4d132a3bf38547a92a5a2f2aa530101b91ecb9822ce4bcc677718b9854669"} Oct 01 15:21:37 crc kubenswrapper[4869]: I1001 15:21:37.469091 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell0-conductor-0" Oct 01 15:21:37 crc kubenswrapper[4869]: I1001 15:21:37.502716 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell0-conductor-0" podStartSLOduration=2.502696564 podStartE2EDuration="2.502696564s" podCreationTimestamp="2025-10-01 15:21:35 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-01 15:21:37.486935316 +0000 UTC m=+1006.633778432" watchObservedRunningTime="2025-10-01 15:21:37.502696564 +0000 UTC m=+1006.649539690" Oct 01 15:21:45 crc kubenswrapper[4869]: I1001 15:21:45.992346 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-cell0-conductor-0" Oct 01 15:21:46 crc kubenswrapper[4869]: I1001 15:21:46.530562 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-cell-mapping-g8tmw"] Oct 01 15:21:46 crc kubenswrapper[4869]: I1001 15:21:46.533207 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-cell-mapping-g8tmw" Oct 01 15:21:46 crc kubenswrapper[4869]: I1001 15:21:46.536137 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-manage-config-data" Oct 01 15:21:46 crc kubenswrapper[4869]: I1001 15:21:46.536781 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-manage-scripts" Oct 01 15:21:46 crc kubenswrapper[4869]: I1001 15:21:46.548313 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-cell-mapping-g8tmw"] Oct 01 15:21:46 crc kubenswrapper[4869]: I1001 15:21:46.669361 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d7bd9313-7a67-41cd-9ab7-efd58f5ab44f-scripts\") pod \"nova-cell0-cell-mapping-g8tmw\" (UID: \"d7bd9313-7a67-41cd-9ab7-efd58f5ab44f\") " pod="openstack/nova-cell0-cell-mapping-g8tmw" Oct 01 15:21:46 crc kubenswrapper[4869]: I1001 15:21:46.669619 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d7bd9313-7a67-41cd-9ab7-efd58f5ab44f-combined-ca-bundle\") pod \"nova-cell0-cell-mapping-g8tmw\" (UID: \"d7bd9313-7a67-41cd-9ab7-efd58f5ab44f\") " pod="openstack/nova-cell0-cell-mapping-g8tmw" Oct 01 15:21:46 crc kubenswrapper[4869]: I1001 15:21:46.669902 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d7bd9313-7a67-41cd-9ab7-efd58f5ab44f-config-data\") pod \"nova-cell0-cell-mapping-g8tmw\" (UID: \"d7bd9313-7a67-41cd-9ab7-efd58f5ab44f\") " pod="openstack/nova-cell0-cell-mapping-g8tmw" Oct 01 15:21:46 crc kubenswrapper[4869]: I1001 15:21:46.670045 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8kxl7\" (UniqueName: \"kubernetes.io/projected/d7bd9313-7a67-41cd-9ab7-efd58f5ab44f-kube-api-access-8kxl7\") pod \"nova-cell0-cell-mapping-g8tmw\" (UID: \"d7bd9313-7a67-41cd-9ab7-efd58f5ab44f\") " pod="openstack/nova-cell0-cell-mapping-g8tmw" Oct 01 15:21:46 crc kubenswrapper[4869]: I1001 15:21:46.721360 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Oct 01 15:21:46 crc kubenswrapper[4869]: I1001 15:21:46.722828 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Oct 01 15:21:46 crc kubenswrapper[4869]: I1001 15:21:46.724861 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-novncproxy-config-data" Oct 01 15:21:46 crc kubenswrapper[4869]: I1001 15:21:46.730416 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Oct 01 15:21:46 crc kubenswrapper[4869]: I1001 15:21:46.771304 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8kxl7\" (UniqueName: \"kubernetes.io/projected/d7bd9313-7a67-41cd-9ab7-efd58f5ab44f-kube-api-access-8kxl7\") pod \"nova-cell0-cell-mapping-g8tmw\" (UID: \"d7bd9313-7a67-41cd-9ab7-efd58f5ab44f\") " pod="openstack/nova-cell0-cell-mapping-g8tmw" Oct 01 15:21:46 crc kubenswrapper[4869]: I1001 15:21:46.771368 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d7bd9313-7a67-41cd-9ab7-efd58f5ab44f-scripts\") pod \"nova-cell0-cell-mapping-g8tmw\" (UID: \"d7bd9313-7a67-41cd-9ab7-efd58f5ab44f\") " pod="openstack/nova-cell0-cell-mapping-g8tmw" Oct 01 15:21:46 crc kubenswrapper[4869]: I1001 15:21:46.771443 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d7bd9313-7a67-41cd-9ab7-efd58f5ab44f-combined-ca-bundle\") pod \"nova-cell0-cell-mapping-g8tmw\" (UID: \"d7bd9313-7a67-41cd-9ab7-efd58f5ab44f\") " pod="openstack/nova-cell0-cell-mapping-g8tmw" Oct 01 15:21:46 crc kubenswrapper[4869]: I1001 15:21:46.771500 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d7bd9313-7a67-41cd-9ab7-efd58f5ab44f-config-data\") pod \"nova-cell0-cell-mapping-g8tmw\" (UID: \"d7bd9313-7a67-41cd-9ab7-efd58f5ab44f\") " pod="openstack/nova-cell0-cell-mapping-g8tmw" Oct 01 15:21:46 crc kubenswrapper[4869]: I1001 15:21:46.778025 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d7bd9313-7a67-41cd-9ab7-efd58f5ab44f-scripts\") pod \"nova-cell0-cell-mapping-g8tmw\" (UID: \"d7bd9313-7a67-41cd-9ab7-efd58f5ab44f\") " pod="openstack/nova-cell0-cell-mapping-g8tmw" Oct 01 15:21:46 crc kubenswrapper[4869]: I1001 15:21:46.778614 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d7bd9313-7a67-41cd-9ab7-efd58f5ab44f-config-data\") pod \"nova-cell0-cell-mapping-g8tmw\" (UID: \"d7bd9313-7a67-41cd-9ab7-efd58f5ab44f\") " pod="openstack/nova-cell0-cell-mapping-g8tmw" Oct 01 15:21:46 crc kubenswrapper[4869]: I1001 15:21:46.780340 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d7bd9313-7a67-41cd-9ab7-efd58f5ab44f-combined-ca-bundle\") pod \"nova-cell0-cell-mapping-g8tmw\" (UID: \"d7bd9313-7a67-41cd-9ab7-efd58f5ab44f\") " pod="openstack/nova-cell0-cell-mapping-g8tmw" Oct 01 15:21:46 crc kubenswrapper[4869]: I1001 15:21:46.795434 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8kxl7\" (UniqueName: \"kubernetes.io/projected/d7bd9313-7a67-41cd-9ab7-efd58f5ab44f-kube-api-access-8kxl7\") pod \"nova-cell0-cell-mapping-g8tmw\" (UID: \"d7bd9313-7a67-41cd-9ab7-efd58f5ab44f\") " pod="openstack/nova-cell0-cell-mapping-g8tmw" Oct 01 15:21:46 crc kubenswrapper[4869]: I1001 15:21:46.809168 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-0"] Oct 01 15:21:46 crc kubenswrapper[4869]: I1001 15:21:46.811544 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Oct 01 15:21:46 crc kubenswrapper[4869]: I1001 15:21:46.816589 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-config-data" Oct 01 15:21:46 crc kubenswrapper[4869]: I1001 15:21:46.823327 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Oct 01 15:21:46 crc kubenswrapper[4869]: I1001 15:21:46.859048 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-cell-mapping-g8tmw" Oct 01 15:21:46 crc kubenswrapper[4869]: I1001 15:21:46.872949 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5f6cc53d-ee0e-480c-9a9c-47085a16bf8e-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"5f6cc53d-ee0e-480c-9a9c-47085a16bf8e\") " pod="openstack/nova-cell1-novncproxy-0" Oct 01 15:21:46 crc kubenswrapper[4869]: I1001 15:21:46.872988 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pr5lh\" (UniqueName: \"kubernetes.io/projected/5f6cc53d-ee0e-480c-9a9c-47085a16bf8e-kube-api-access-pr5lh\") pod \"nova-cell1-novncproxy-0\" (UID: \"5f6cc53d-ee0e-480c-9a9c-47085a16bf8e\") " pod="openstack/nova-cell1-novncproxy-0" Oct 01 15:21:46 crc kubenswrapper[4869]: I1001 15:21:46.873050 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5f6cc53d-ee0e-480c-9a9c-47085a16bf8e-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"5f6cc53d-ee0e-480c-9a9c-47085a16bf8e\") " pod="openstack/nova-cell1-novncproxy-0" Oct 01 15:21:46 crc kubenswrapper[4869]: I1001 15:21:46.905165 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-scheduler-0"] Oct 01 15:21:46 crc kubenswrapper[4869]: I1001 15:21:46.906400 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Oct 01 15:21:46 crc kubenswrapper[4869]: I1001 15:21:46.908127 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-scheduler-config-data" Oct 01 15:21:46 crc kubenswrapper[4869]: I1001 15:21:46.928310 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-metadata-0"] Oct 01 15:21:46 crc kubenswrapper[4869]: I1001 15:21:46.929895 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Oct 01 15:21:46 crc kubenswrapper[4869]: I1001 15:21:46.934887 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-config-data" Oct 01 15:21:46 crc kubenswrapper[4869]: I1001 15:21:46.945666 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Oct 01 15:21:46 crc kubenswrapper[4869]: I1001 15:21:46.957893 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Oct 01 15:21:46 crc kubenswrapper[4869]: I1001 15:21:46.974810 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-x9klt\" (UniqueName: \"kubernetes.io/projected/34744dc3-77cb-4347-b1ad-fbce05019042-kube-api-access-x9klt\") pod \"nova-scheduler-0\" (UID: \"34744dc3-77cb-4347-b1ad-fbce05019042\") " pod="openstack/nova-scheduler-0" Oct 01 15:21:46 crc kubenswrapper[4869]: I1001 15:21:46.974881 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5f6cc53d-ee0e-480c-9a9c-47085a16bf8e-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"5f6cc53d-ee0e-480c-9a9c-47085a16bf8e\") " pod="openstack/nova-cell1-novncproxy-0" Oct 01 15:21:46 crc kubenswrapper[4869]: I1001 15:21:46.974912 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pr5lh\" (UniqueName: \"kubernetes.io/projected/5f6cc53d-ee0e-480c-9a9c-47085a16bf8e-kube-api-access-pr5lh\") pod \"nova-cell1-novncproxy-0\" (UID: \"5f6cc53d-ee0e-480c-9a9c-47085a16bf8e\") " pod="openstack/nova-cell1-novncproxy-0" Oct 01 15:21:46 crc kubenswrapper[4869]: I1001 15:21:46.974970 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5f6cc53d-ee0e-480c-9a9c-47085a16bf8e-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"5f6cc53d-ee0e-480c-9a9c-47085a16bf8e\") " pod="openstack/nova-cell1-novncproxy-0" Oct 01 15:21:46 crc kubenswrapper[4869]: I1001 15:21:46.975010 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/34744dc3-77cb-4347-b1ad-fbce05019042-config-data\") pod \"nova-scheduler-0\" (UID: \"34744dc3-77cb-4347-b1ad-fbce05019042\") " pod="openstack/nova-scheduler-0" Oct 01 15:21:46 crc kubenswrapper[4869]: I1001 15:21:46.975042 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8c2c4d71-1c1e-4627-8e29-c0ffd9862e25-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"8c2c4d71-1c1e-4627-8e29-c0ffd9862e25\") " pod="openstack/nova-api-0" Oct 01 15:21:46 crc kubenswrapper[4869]: I1001 15:21:46.975104 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8c2c4d71-1c1e-4627-8e29-c0ffd9862e25-config-data\") pod \"nova-api-0\" (UID: \"8c2c4d71-1c1e-4627-8e29-c0ffd9862e25\") " pod="openstack/nova-api-0" Oct 01 15:21:46 crc kubenswrapper[4869]: I1001 15:21:46.975127 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/8c2c4d71-1c1e-4627-8e29-c0ffd9862e25-logs\") pod \"nova-api-0\" (UID: \"8c2c4d71-1c1e-4627-8e29-c0ffd9862e25\") " pod="openstack/nova-api-0" Oct 01 15:21:46 crc kubenswrapper[4869]: I1001 15:21:46.975179 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5qbrn\" (UniqueName: \"kubernetes.io/projected/8c2c4d71-1c1e-4627-8e29-c0ffd9862e25-kube-api-access-5qbrn\") pod \"nova-api-0\" (UID: \"8c2c4d71-1c1e-4627-8e29-c0ffd9862e25\") " pod="openstack/nova-api-0" Oct 01 15:21:46 crc kubenswrapper[4869]: I1001 15:21:46.975213 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/34744dc3-77cb-4347-b1ad-fbce05019042-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"34744dc3-77cb-4347-b1ad-fbce05019042\") " pod="openstack/nova-scheduler-0" Oct 01 15:21:46 crc kubenswrapper[4869]: I1001 15:21:46.982409 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5f6cc53d-ee0e-480c-9a9c-47085a16bf8e-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"5f6cc53d-ee0e-480c-9a9c-47085a16bf8e\") " pod="openstack/nova-cell1-novncproxy-0" Oct 01 15:21:47 crc kubenswrapper[4869]: I1001 15:21:46.998046 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5f6cc53d-ee0e-480c-9a9c-47085a16bf8e-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"5f6cc53d-ee0e-480c-9a9c-47085a16bf8e\") " pod="openstack/nova-cell1-novncproxy-0" Oct 01 15:21:47 crc kubenswrapper[4869]: I1001 15:21:47.024851 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pr5lh\" (UniqueName: \"kubernetes.io/projected/5f6cc53d-ee0e-480c-9a9c-47085a16bf8e-kube-api-access-pr5lh\") pod \"nova-cell1-novncproxy-0\" (UID: \"5f6cc53d-ee0e-480c-9a9c-47085a16bf8e\") " pod="openstack/nova-cell1-novncproxy-0" Oct 01 15:21:47 crc kubenswrapper[4869]: I1001 15:21:47.028879 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-7c57d6658c-9jnss"] Oct 01 15:21:47 crc kubenswrapper[4869]: I1001 15:21:47.030371 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7c57d6658c-9jnss" Oct 01 15:21:47 crc kubenswrapper[4869]: I1001 15:21:47.041276 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-7c57d6658c-9jnss"] Oct 01 15:21:47 crc kubenswrapper[4869]: I1001 15:21:47.044787 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Oct 01 15:21:47 crc kubenswrapper[4869]: I1001 15:21:47.077457 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/34744dc3-77cb-4347-b1ad-fbce05019042-config-data\") pod \"nova-scheduler-0\" (UID: \"34744dc3-77cb-4347-b1ad-fbce05019042\") " pod="openstack/nova-scheduler-0" Oct 01 15:21:47 crc kubenswrapper[4869]: I1001 15:21:47.077747 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8c2c4d71-1c1e-4627-8e29-c0ffd9862e25-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"8c2c4d71-1c1e-4627-8e29-c0ffd9862e25\") " pod="openstack/nova-api-0" Oct 01 15:21:47 crc kubenswrapper[4869]: I1001 15:21:47.077794 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/96494ec1-01a0-40fc-96f9-d62fbcfaa5fe-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"96494ec1-01a0-40fc-96f9-d62fbcfaa5fe\") " pod="openstack/nova-metadata-0" Oct 01 15:21:47 crc kubenswrapper[4869]: I1001 15:21:47.077812 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jvjhv\" (UniqueName: \"kubernetes.io/projected/96494ec1-01a0-40fc-96f9-d62fbcfaa5fe-kube-api-access-jvjhv\") pod \"nova-metadata-0\" (UID: \"96494ec1-01a0-40fc-96f9-d62fbcfaa5fe\") " pod="openstack/nova-metadata-0" Oct 01 15:21:47 crc kubenswrapper[4869]: I1001 15:21:47.077860 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8c2c4d71-1c1e-4627-8e29-c0ffd9862e25-config-data\") pod \"nova-api-0\" (UID: \"8c2c4d71-1c1e-4627-8e29-c0ffd9862e25\") " pod="openstack/nova-api-0" Oct 01 15:21:47 crc kubenswrapper[4869]: I1001 15:21:47.077877 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/8c2c4d71-1c1e-4627-8e29-c0ffd9862e25-logs\") pod \"nova-api-0\" (UID: \"8c2c4d71-1c1e-4627-8e29-c0ffd9862e25\") " pod="openstack/nova-api-0" Oct 01 15:21:47 crc kubenswrapper[4869]: I1001 15:21:47.077914 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5qbrn\" (UniqueName: \"kubernetes.io/projected/8c2c4d71-1c1e-4627-8e29-c0ffd9862e25-kube-api-access-5qbrn\") pod \"nova-api-0\" (UID: \"8c2c4d71-1c1e-4627-8e29-c0ffd9862e25\") " pod="openstack/nova-api-0" Oct 01 15:21:47 crc kubenswrapper[4869]: I1001 15:21:47.077936 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/34744dc3-77cb-4347-b1ad-fbce05019042-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"34744dc3-77cb-4347-b1ad-fbce05019042\") " pod="openstack/nova-scheduler-0" Oct 01 15:21:47 crc kubenswrapper[4869]: I1001 15:21:47.077974 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-x9klt\" (UniqueName: \"kubernetes.io/projected/34744dc3-77cb-4347-b1ad-fbce05019042-kube-api-access-x9klt\") pod \"nova-scheduler-0\" (UID: \"34744dc3-77cb-4347-b1ad-fbce05019042\") " pod="openstack/nova-scheduler-0" Oct 01 15:21:47 crc kubenswrapper[4869]: I1001 15:21:47.077989 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/96494ec1-01a0-40fc-96f9-d62fbcfaa5fe-logs\") pod \"nova-metadata-0\" (UID: \"96494ec1-01a0-40fc-96f9-d62fbcfaa5fe\") " pod="openstack/nova-metadata-0" Oct 01 15:21:47 crc kubenswrapper[4869]: I1001 15:21:47.078032 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/96494ec1-01a0-40fc-96f9-d62fbcfaa5fe-config-data\") pod \"nova-metadata-0\" (UID: \"96494ec1-01a0-40fc-96f9-d62fbcfaa5fe\") " pod="openstack/nova-metadata-0" Oct 01 15:21:47 crc kubenswrapper[4869]: I1001 15:21:47.078822 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/8c2c4d71-1c1e-4627-8e29-c0ffd9862e25-logs\") pod \"nova-api-0\" (UID: \"8c2c4d71-1c1e-4627-8e29-c0ffd9862e25\") " pod="openstack/nova-api-0" Oct 01 15:21:47 crc kubenswrapper[4869]: I1001 15:21:47.084680 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8c2c4d71-1c1e-4627-8e29-c0ffd9862e25-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"8c2c4d71-1c1e-4627-8e29-c0ffd9862e25\") " pod="openstack/nova-api-0" Oct 01 15:21:47 crc kubenswrapper[4869]: I1001 15:21:47.086716 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8c2c4d71-1c1e-4627-8e29-c0ffd9862e25-config-data\") pod \"nova-api-0\" (UID: \"8c2c4d71-1c1e-4627-8e29-c0ffd9862e25\") " pod="openstack/nova-api-0" Oct 01 15:21:47 crc kubenswrapper[4869]: I1001 15:21:47.088390 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/34744dc3-77cb-4347-b1ad-fbce05019042-config-data\") pod \"nova-scheduler-0\" (UID: \"34744dc3-77cb-4347-b1ad-fbce05019042\") " pod="openstack/nova-scheduler-0" Oct 01 15:21:47 crc kubenswrapper[4869]: I1001 15:21:47.089155 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/34744dc3-77cb-4347-b1ad-fbce05019042-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"34744dc3-77cb-4347-b1ad-fbce05019042\") " pod="openstack/nova-scheduler-0" Oct 01 15:21:47 crc kubenswrapper[4869]: I1001 15:21:47.116478 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5qbrn\" (UniqueName: \"kubernetes.io/projected/8c2c4d71-1c1e-4627-8e29-c0ffd9862e25-kube-api-access-5qbrn\") pod \"nova-api-0\" (UID: \"8c2c4d71-1c1e-4627-8e29-c0ffd9862e25\") " pod="openstack/nova-api-0" Oct 01 15:21:47 crc kubenswrapper[4869]: I1001 15:21:47.116533 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-x9klt\" (UniqueName: \"kubernetes.io/projected/34744dc3-77cb-4347-b1ad-fbce05019042-kube-api-access-x9klt\") pod \"nova-scheduler-0\" (UID: \"34744dc3-77cb-4347-b1ad-fbce05019042\") " pod="openstack/nova-scheduler-0" Oct 01 15:21:47 crc kubenswrapper[4869]: I1001 15:21:47.180404 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/54903b9a-2e49-43f1-9989-ff8d13276fe7-config\") pod \"dnsmasq-dns-7c57d6658c-9jnss\" (UID: \"54903b9a-2e49-43f1-9989-ff8d13276fe7\") " pod="openstack/dnsmasq-dns-7c57d6658c-9jnss" Oct 01 15:21:47 crc kubenswrapper[4869]: I1001 15:21:47.180753 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mdbj4\" (UniqueName: \"kubernetes.io/projected/54903b9a-2e49-43f1-9989-ff8d13276fe7-kube-api-access-mdbj4\") pod \"dnsmasq-dns-7c57d6658c-9jnss\" (UID: \"54903b9a-2e49-43f1-9989-ff8d13276fe7\") " pod="openstack/dnsmasq-dns-7c57d6658c-9jnss" Oct 01 15:21:47 crc kubenswrapper[4869]: I1001 15:21:47.180977 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/96494ec1-01a0-40fc-96f9-d62fbcfaa5fe-logs\") pod \"nova-metadata-0\" (UID: \"96494ec1-01a0-40fc-96f9-d62fbcfaa5fe\") " pod="openstack/nova-metadata-0" Oct 01 15:21:47 crc kubenswrapper[4869]: I1001 15:21:47.181078 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/54903b9a-2e49-43f1-9989-ff8d13276fe7-ovsdbserver-nb\") pod \"dnsmasq-dns-7c57d6658c-9jnss\" (UID: \"54903b9a-2e49-43f1-9989-ff8d13276fe7\") " pod="openstack/dnsmasq-dns-7c57d6658c-9jnss" Oct 01 15:21:47 crc kubenswrapper[4869]: I1001 15:21:47.181197 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/96494ec1-01a0-40fc-96f9-d62fbcfaa5fe-config-data\") pod \"nova-metadata-0\" (UID: \"96494ec1-01a0-40fc-96f9-d62fbcfaa5fe\") " pod="openstack/nova-metadata-0" Oct 01 15:21:47 crc kubenswrapper[4869]: I1001 15:21:47.182224 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/54903b9a-2e49-43f1-9989-ff8d13276fe7-dns-svc\") pod \"dnsmasq-dns-7c57d6658c-9jnss\" (UID: \"54903b9a-2e49-43f1-9989-ff8d13276fe7\") " pod="openstack/dnsmasq-dns-7c57d6658c-9jnss" Oct 01 15:21:47 crc kubenswrapper[4869]: I1001 15:21:47.182414 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/96494ec1-01a0-40fc-96f9-d62fbcfaa5fe-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"96494ec1-01a0-40fc-96f9-d62fbcfaa5fe\") " pod="openstack/nova-metadata-0" Oct 01 15:21:47 crc kubenswrapper[4869]: I1001 15:21:47.183066 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jvjhv\" (UniqueName: \"kubernetes.io/projected/96494ec1-01a0-40fc-96f9-d62fbcfaa5fe-kube-api-access-jvjhv\") pod \"nova-metadata-0\" (UID: \"96494ec1-01a0-40fc-96f9-d62fbcfaa5fe\") " pod="openstack/nova-metadata-0" Oct 01 15:21:47 crc kubenswrapper[4869]: I1001 15:21:47.183192 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/54903b9a-2e49-43f1-9989-ff8d13276fe7-ovsdbserver-sb\") pod \"dnsmasq-dns-7c57d6658c-9jnss\" (UID: \"54903b9a-2e49-43f1-9989-ff8d13276fe7\") " pod="openstack/dnsmasq-dns-7c57d6658c-9jnss" Oct 01 15:21:47 crc kubenswrapper[4869]: I1001 15:21:47.183015 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/96494ec1-01a0-40fc-96f9-d62fbcfaa5fe-logs\") pod \"nova-metadata-0\" (UID: \"96494ec1-01a0-40fc-96f9-d62fbcfaa5fe\") " pod="openstack/nova-metadata-0" Oct 01 15:21:47 crc kubenswrapper[4869]: I1001 15:21:47.186628 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/96494ec1-01a0-40fc-96f9-d62fbcfaa5fe-config-data\") pod \"nova-metadata-0\" (UID: \"96494ec1-01a0-40fc-96f9-d62fbcfaa5fe\") " pod="openstack/nova-metadata-0" Oct 01 15:21:47 crc kubenswrapper[4869]: I1001 15:21:47.189287 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/96494ec1-01a0-40fc-96f9-d62fbcfaa5fe-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"96494ec1-01a0-40fc-96f9-d62fbcfaa5fe\") " pod="openstack/nova-metadata-0" Oct 01 15:21:47 crc kubenswrapper[4869]: I1001 15:21:47.196771 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Oct 01 15:21:47 crc kubenswrapper[4869]: I1001 15:21:47.198022 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jvjhv\" (UniqueName: \"kubernetes.io/projected/96494ec1-01a0-40fc-96f9-d62fbcfaa5fe-kube-api-access-jvjhv\") pod \"nova-metadata-0\" (UID: \"96494ec1-01a0-40fc-96f9-d62fbcfaa5fe\") " pod="openstack/nova-metadata-0" Oct 01 15:21:47 crc kubenswrapper[4869]: I1001 15:21:47.284363 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mdbj4\" (UniqueName: \"kubernetes.io/projected/54903b9a-2e49-43f1-9989-ff8d13276fe7-kube-api-access-mdbj4\") pod \"dnsmasq-dns-7c57d6658c-9jnss\" (UID: \"54903b9a-2e49-43f1-9989-ff8d13276fe7\") " pod="openstack/dnsmasq-dns-7c57d6658c-9jnss" Oct 01 15:21:47 crc kubenswrapper[4869]: I1001 15:21:47.284420 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/54903b9a-2e49-43f1-9989-ff8d13276fe7-ovsdbserver-nb\") pod \"dnsmasq-dns-7c57d6658c-9jnss\" (UID: \"54903b9a-2e49-43f1-9989-ff8d13276fe7\") " pod="openstack/dnsmasq-dns-7c57d6658c-9jnss" Oct 01 15:21:47 crc kubenswrapper[4869]: I1001 15:21:47.284488 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/54903b9a-2e49-43f1-9989-ff8d13276fe7-dns-svc\") pod \"dnsmasq-dns-7c57d6658c-9jnss\" (UID: \"54903b9a-2e49-43f1-9989-ff8d13276fe7\") " pod="openstack/dnsmasq-dns-7c57d6658c-9jnss" Oct 01 15:21:47 crc kubenswrapper[4869]: I1001 15:21:47.284530 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/54903b9a-2e49-43f1-9989-ff8d13276fe7-ovsdbserver-sb\") pod \"dnsmasq-dns-7c57d6658c-9jnss\" (UID: \"54903b9a-2e49-43f1-9989-ff8d13276fe7\") " pod="openstack/dnsmasq-dns-7c57d6658c-9jnss" Oct 01 15:21:47 crc kubenswrapper[4869]: I1001 15:21:47.284565 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/54903b9a-2e49-43f1-9989-ff8d13276fe7-config\") pod \"dnsmasq-dns-7c57d6658c-9jnss\" (UID: \"54903b9a-2e49-43f1-9989-ff8d13276fe7\") " pod="openstack/dnsmasq-dns-7c57d6658c-9jnss" Oct 01 15:21:47 crc kubenswrapper[4869]: I1001 15:21:47.285402 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/54903b9a-2e49-43f1-9989-ff8d13276fe7-config\") pod \"dnsmasq-dns-7c57d6658c-9jnss\" (UID: \"54903b9a-2e49-43f1-9989-ff8d13276fe7\") " pod="openstack/dnsmasq-dns-7c57d6658c-9jnss" Oct 01 15:21:47 crc kubenswrapper[4869]: I1001 15:21:47.285459 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/54903b9a-2e49-43f1-9989-ff8d13276fe7-dns-svc\") pod \"dnsmasq-dns-7c57d6658c-9jnss\" (UID: \"54903b9a-2e49-43f1-9989-ff8d13276fe7\") " pod="openstack/dnsmasq-dns-7c57d6658c-9jnss" Oct 01 15:21:47 crc kubenswrapper[4869]: I1001 15:21:47.286061 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/54903b9a-2e49-43f1-9989-ff8d13276fe7-ovsdbserver-nb\") pod \"dnsmasq-dns-7c57d6658c-9jnss\" (UID: \"54903b9a-2e49-43f1-9989-ff8d13276fe7\") " pod="openstack/dnsmasq-dns-7c57d6658c-9jnss" Oct 01 15:21:47 crc kubenswrapper[4869]: I1001 15:21:47.286367 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/54903b9a-2e49-43f1-9989-ff8d13276fe7-ovsdbserver-sb\") pod \"dnsmasq-dns-7c57d6658c-9jnss\" (UID: \"54903b9a-2e49-43f1-9989-ff8d13276fe7\") " pod="openstack/dnsmasq-dns-7c57d6658c-9jnss" Oct 01 15:21:47 crc kubenswrapper[4869]: I1001 15:21:47.304426 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mdbj4\" (UniqueName: \"kubernetes.io/projected/54903b9a-2e49-43f1-9989-ff8d13276fe7-kube-api-access-mdbj4\") pod \"dnsmasq-dns-7c57d6658c-9jnss\" (UID: \"54903b9a-2e49-43f1-9989-ff8d13276fe7\") " pod="openstack/dnsmasq-dns-7c57d6658c-9jnss" Oct 01 15:21:47 crc kubenswrapper[4869]: I1001 15:21:47.356566 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Oct 01 15:21:47 crc kubenswrapper[4869]: I1001 15:21:47.378836 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Oct 01 15:21:47 crc kubenswrapper[4869]: I1001 15:21:47.413516 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7c57d6658c-9jnss" Oct 01 15:21:47 crc kubenswrapper[4869]: I1001 15:21:47.440356 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-cell-mapping-g8tmw"] Oct 01 15:21:47 crc kubenswrapper[4869]: I1001 15:21:47.523021 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-conductor-db-sync-fmgks"] Oct 01 15:21:47 crc kubenswrapper[4869]: I1001 15:21:47.524850 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-db-sync-fmgks" Oct 01 15:21:47 crc kubenswrapper[4869]: I1001 15:21:47.528095 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-conductor-scripts" Oct 01 15:21:47 crc kubenswrapper[4869]: I1001 15:21:47.528616 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-conductor-config-data" Oct 01 15:21:47 crc kubenswrapper[4869]: W1001 15:21:47.542361 4869 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod5f6cc53d_ee0e_480c_9a9c_47085a16bf8e.slice/crio-31f1755565263eaa430b3fc420bff95259bfc07b2f4c63b1fc06a81b226a7a23 WatchSource:0}: Error finding container 31f1755565263eaa430b3fc420bff95259bfc07b2f4c63b1fc06a81b226a7a23: Status 404 returned error can't find the container with id 31f1755565263eaa430b3fc420bff95259bfc07b2f4c63b1fc06a81b226a7a23 Oct 01 15:21:47 crc kubenswrapper[4869]: I1001 15:21:47.548384 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Oct 01 15:21:47 crc kubenswrapper[4869]: I1001 15:21:47.569923 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-db-sync-fmgks"] Oct 01 15:21:47 crc kubenswrapper[4869]: I1001 15:21:47.589309 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xqfsv\" (UniqueName: \"kubernetes.io/projected/5d724a70-eb91-4679-9f86-c3d1e874bdc6-kube-api-access-xqfsv\") pod \"nova-cell1-conductor-db-sync-fmgks\" (UID: \"5d724a70-eb91-4679-9f86-c3d1e874bdc6\") " pod="openstack/nova-cell1-conductor-db-sync-fmgks" Oct 01 15:21:47 crc kubenswrapper[4869]: I1001 15:21:47.589372 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5d724a70-eb91-4679-9f86-c3d1e874bdc6-config-data\") pod \"nova-cell1-conductor-db-sync-fmgks\" (UID: \"5d724a70-eb91-4679-9f86-c3d1e874bdc6\") " pod="openstack/nova-cell1-conductor-db-sync-fmgks" Oct 01 15:21:47 crc kubenswrapper[4869]: I1001 15:21:47.589471 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5d724a70-eb91-4679-9f86-c3d1e874bdc6-combined-ca-bundle\") pod \"nova-cell1-conductor-db-sync-fmgks\" (UID: \"5d724a70-eb91-4679-9f86-c3d1e874bdc6\") " pod="openstack/nova-cell1-conductor-db-sync-fmgks" Oct 01 15:21:47 crc kubenswrapper[4869]: I1001 15:21:47.589499 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5d724a70-eb91-4679-9f86-c3d1e874bdc6-scripts\") pod \"nova-cell1-conductor-db-sync-fmgks\" (UID: \"5d724a70-eb91-4679-9f86-c3d1e874bdc6\") " pod="openstack/nova-cell1-conductor-db-sync-fmgks" Oct 01 15:21:47 crc kubenswrapper[4869]: I1001 15:21:47.607688 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-cell-mapping-g8tmw" event={"ID":"d7bd9313-7a67-41cd-9ab7-efd58f5ab44f","Type":"ContainerStarted","Data":"905034e920487c6cfe11916cdb75a7101726c1de81006cc8f31de514c7d67923"} Oct 01 15:21:47 crc kubenswrapper[4869]: I1001 15:21:47.608386 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Oct 01 15:21:47 crc kubenswrapper[4869]: I1001 15:21:47.609470 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"5f6cc53d-ee0e-480c-9a9c-47085a16bf8e","Type":"ContainerStarted","Data":"31f1755565263eaa430b3fc420bff95259bfc07b2f4c63b1fc06a81b226a7a23"} Oct 01 15:21:47 crc kubenswrapper[4869]: I1001 15:21:47.691466 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5d724a70-eb91-4679-9f86-c3d1e874bdc6-combined-ca-bundle\") pod \"nova-cell1-conductor-db-sync-fmgks\" (UID: \"5d724a70-eb91-4679-9f86-c3d1e874bdc6\") " pod="openstack/nova-cell1-conductor-db-sync-fmgks" Oct 01 15:21:47 crc kubenswrapper[4869]: I1001 15:21:47.692939 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5d724a70-eb91-4679-9f86-c3d1e874bdc6-scripts\") pod \"nova-cell1-conductor-db-sync-fmgks\" (UID: \"5d724a70-eb91-4679-9f86-c3d1e874bdc6\") " pod="openstack/nova-cell1-conductor-db-sync-fmgks" Oct 01 15:21:47 crc kubenswrapper[4869]: I1001 15:21:47.694561 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xqfsv\" (UniqueName: \"kubernetes.io/projected/5d724a70-eb91-4679-9f86-c3d1e874bdc6-kube-api-access-xqfsv\") pod \"nova-cell1-conductor-db-sync-fmgks\" (UID: \"5d724a70-eb91-4679-9f86-c3d1e874bdc6\") " pod="openstack/nova-cell1-conductor-db-sync-fmgks" Oct 01 15:21:47 crc kubenswrapper[4869]: I1001 15:21:47.694644 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5d724a70-eb91-4679-9f86-c3d1e874bdc6-config-data\") pod \"nova-cell1-conductor-db-sync-fmgks\" (UID: \"5d724a70-eb91-4679-9f86-c3d1e874bdc6\") " pod="openstack/nova-cell1-conductor-db-sync-fmgks" Oct 01 15:21:47 crc kubenswrapper[4869]: I1001 15:21:47.696904 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5d724a70-eb91-4679-9f86-c3d1e874bdc6-combined-ca-bundle\") pod \"nova-cell1-conductor-db-sync-fmgks\" (UID: \"5d724a70-eb91-4679-9f86-c3d1e874bdc6\") " pod="openstack/nova-cell1-conductor-db-sync-fmgks" Oct 01 15:21:47 crc kubenswrapper[4869]: I1001 15:21:47.698894 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5d724a70-eb91-4679-9f86-c3d1e874bdc6-scripts\") pod \"nova-cell1-conductor-db-sync-fmgks\" (UID: \"5d724a70-eb91-4679-9f86-c3d1e874bdc6\") " pod="openstack/nova-cell1-conductor-db-sync-fmgks" Oct 01 15:21:47 crc kubenswrapper[4869]: I1001 15:21:47.701314 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5d724a70-eb91-4679-9f86-c3d1e874bdc6-config-data\") pod \"nova-cell1-conductor-db-sync-fmgks\" (UID: \"5d724a70-eb91-4679-9f86-c3d1e874bdc6\") " pod="openstack/nova-cell1-conductor-db-sync-fmgks" Oct 01 15:21:47 crc kubenswrapper[4869]: I1001 15:21:47.709937 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xqfsv\" (UniqueName: \"kubernetes.io/projected/5d724a70-eb91-4679-9f86-c3d1e874bdc6-kube-api-access-xqfsv\") pod \"nova-cell1-conductor-db-sync-fmgks\" (UID: \"5d724a70-eb91-4679-9f86-c3d1e874bdc6\") " pod="openstack/nova-cell1-conductor-db-sync-fmgks" Oct 01 15:21:47 crc kubenswrapper[4869]: I1001 15:21:47.851791 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-db-sync-fmgks" Oct 01 15:21:47 crc kubenswrapper[4869]: I1001 15:21:47.857437 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Oct 01 15:21:47 crc kubenswrapper[4869]: I1001 15:21:47.949084 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Oct 01 15:21:47 crc kubenswrapper[4869]: W1001 15:21:47.959447 4869 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod96494ec1_01a0_40fc_96f9_d62fbcfaa5fe.slice/crio-3e5a36ac380e79e2906395ca838a900e391d1931f5d52b3bb122417b4276f1f8 WatchSource:0}: Error finding container 3e5a36ac380e79e2906395ca838a900e391d1931f5d52b3bb122417b4276f1f8: Status 404 returned error can't find the container with id 3e5a36ac380e79e2906395ca838a900e391d1931f5d52b3bb122417b4276f1f8 Oct 01 15:21:48 crc kubenswrapper[4869]: I1001 15:21:48.023941 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-7c57d6658c-9jnss"] Oct 01 15:21:48 crc kubenswrapper[4869]: I1001 15:21:48.326853 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-db-sync-fmgks"] Oct 01 15:21:48 crc kubenswrapper[4869]: W1001 15:21:48.338424 4869 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod5d724a70_eb91_4679_9f86_c3d1e874bdc6.slice/crio-3c6c2b30525e7cd893424c81f57ca55eb47e9c903d88a874e04c8530752e5862 WatchSource:0}: Error finding container 3c6c2b30525e7cd893424c81f57ca55eb47e9c903d88a874e04c8530752e5862: Status 404 returned error can't find the container with id 3c6c2b30525e7cd893424c81f57ca55eb47e9c903d88a874e04c8530752e5862 Oct 01 15:21:48 crc kubenswrapper[4869]: I1001 15:21:48.640508 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-cell-mapping-g8tmw" event={"ID":"d7bd9313-7a67-41cd-9ab7-efd58f5ab44f","Type":"ContainerStarted","Data":"77089dd930d91b9a4efd8ec345e1ee177333454e634521e014a73eb7358a3614"} Oct 01 15:21:48 crc kubenswrapper[4869]: I1001 15:21:48.642357 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"34744dc3-77cb-4347-b1ad-fbce05019042","Type":"ContainerStarted","Data":"fd8997f1b8bbbcc09318bea8206733b23ab418dcec564eb3ab71a9c794c6458c"} Oct 01 15:21:48 crc kubenswrapper[4869]: I1001 15:21:48.643590 4869 generic.go:334] "Generic (PLEG): container finished" podID="54903b9a-2e49-43f1-9989-ff8d13276fe7" containerID="f812380e5b3951d377839bd2e1d6958419ac33f083a8c9278239abf409f5ee74" exitCode=0 Oct 01 15:21:48 crc kubenswrapper[4869]: I1001 15:21:48.643622 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7c57d6658c-9jnss" event={"ID":"54903b9a-2e49-43f1-9989-ff8d13276fe7","Type":"ContainerDied","Data":"f812380e5b3951d377839bd2e1d6958419ac33f083a8c9278239abf409f5ee74"} Oct 01 15:21:48 crc kubenswrapper[4869]: I1001 15:21:48.643637 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7c57d6658c-9jnss" event={"ID":"54903b9a-2e49-43f1-9989-ff8d13276fe7","Type":"ContainerStarted","Data":"667cae2c7a132c2dd606b2ad7f038ddc4bbbcfc3cb8fc8e3cfebf758dedcb473"} Oct 01 15:21:48 crc kubenswrapper[4869]: I1001 15:21:48.648357 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-db-sync-fmgks" event={"ID":"5d724a70-eb91-4679-9f86-c3d1e874bdc6","Type":"ContainerStarted","Data":"3c6c2b30525e7cd893424c81f57ca55eb47e9c903d88a874e04c8530752e5862"} Oct 01 15:21:48 crc kubenswrapper[4869]: I1001 15:21:48.664140 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"96494ec1-01a0-40fc-96f9-d62fbcfaa5fe","Type":"ContainerStarted","Data":"3e5a36ac380e79e2906395ca838a900e391d1931f5d52b3bb122417b4276f1f8"} Oct 01 15:21:48 crc kubenswrapper[4869]: I1001 15:21:48.665774 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"8c2c4d71-1c1e-4627-8e29-c0ffd9862e25","Type":"ContainerStarted","Data":"5d2a02c9fe1fe4819802662a50e09100606694dd9dc1b3080cef358edc560066"} Oct 01 15:21:48 crc kubenswrapper[4869]: I1001 15:21:48.680802 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell0-cell-mapping-g8tmw" podStartSLOduration=2.680780047 podStartE2EDuration="2.680780047s" podCreationTimestamp="2025-10-01 15:21:46 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-01 15:21:48.672826996 +0000 UTC m=+1017.819670112" watchObservedRunningTime="2025-10-01 15:21:48.680780047 +0000 UTC m=+1017.827623173" Oct 01 15:21:49 crc kubenswrapper[4869]: I1001 15:21:49.697689 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7c57d6658c-9jnss" event={"ID":"54903b9a-2e49-43f1-9989-ff8d13276fe7","Type":"ContainerStarted","Data":"7768ef1facd2bcbde779e78384d383b9b7ae89a72a4b666417b8d9b57ac19d7b"} Oct 01 15:21:49 crc kubenswrapper[4869]: I1001 15:21:49.698214 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-7c57d6658c-9jnss" Oct 01 15:21:49 crc kubenswrapper[4869]: I1001 15:21:49.700775 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-db-sync-fmgks" event={"ID":"5d724a70-eb91-4679-9f86-c3d1e874bdc6","Type":"ContainerStarted","Data":"cdee20a8079220f9ce6fb5e85101ee74f6da54f93cbcf0a2371e0ece2638efb8"} Oct 01 15:21:49 crc kubenswrapper[4869]: I1001 15:21:49.731512 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-7c57d6658c-9jnss" podStartSLOduration=3.731493264 podStartE2EDuration="3.731493264s" podCreationTimestamp="2025-10-01 15:21:46 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-01 15:21:49.722799834 +0000 UTC m=+1018.869642960" watchObservedRunningTime="2025-10-01 15:21:49.731493264 +0000 UTC m=+1018.878336380" Oct 01 15:21:49 crc kubenswrapper[4869]: I1001 15:21:49.736726 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-conductor-db-sync-fmgks" podStartSLOduration=2.736710955 podStartE2EDuration="2.736710955s" podCreationTimestamp="2025-10-01 15:21:47 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-01 15:21:49.735731771 +0000 UTC m=+1018.882574887" watchObservedRunningTime="2025-10-01 15:21:49.736710955 +0000 UTC m=+1018.883554071" Oct 01 15:21:50 crc kubenswrapper[4869]: I1001 15:21:50.683354 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Oct 01 15:21:50 crc kubenswrapper[4869]: I1001 15:21:50.699202 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Oct 01 15:21:51 crc kubenswrapper[4869]: I1001 15:21:51.719052 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"34744dc3-77cb-4347-b1ad-fbce05019042","Type":"ContainerStarted","Data":"a2e13a113629984ae09ec5f542825035d53a85440900d718364efe8dda104458"} Oct 01 15:21:51 crc kubenswrapper[4869]: I1001 15:21:51.723306 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"96494ec1-01a0-40fc-96f9-d62fbcfaa5fe","Type":"ContainerStarted","Data":"4a48b2d88313ed0f37d96a68ac867056399a6a868b9f4e6bccc135fc94546dad"} Oct 01 15:21:51 crc kubenswrapper[4869]: I1001 15:21:51.723357 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"96494ec1-01a0-40fc-96f9-d62fbcfaa5fe","Type":"ContainerStarted","Data":"cdbac306216eeaa0ec899a842fb7288264ad958b6361178fdb118bb69c7b0e47"} Oct 01 15:21:51 crc kubenswrapper[4869]: I1001 15:21:51.723519 4869 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="96494ec1-01a0-40fc-96f9-d62fbcfaa5fe" containerName="nova-metadata-log" containerID="cri-o://cdbac306216eeaa0ec899a842fb7288264ad958b6361178fdb118bb69c7b0e47" gracePeriod=30 Oct 01 15:21:51 crc kubenswrapper[4869]: I1001 15:21:51.723651 4869 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="96494ec1-01a0-40fc-96f9-d62fbcfaa5fe" containerName="nova-metadata-metadata" containerID="cri-o://4a48b2d88313ed0f37d96a68ac867056399a6a868b9f4e6bccc135fc94546dad" gracePeriod=30 Oct 01 15:21:51 crc kubenswrapper[4869]: I1001 15:21:51.729825 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"8c2c4d71-1c1e-4627-8e29-c0ffd9862e25","Type":"ContainerStarted","Data":"105e1c4e601b7ff9c6cac949694c7065447a874eab57473debf0e76263a8d8dd"} Oct 01 15:21:51 crc kubenswrapper[4869]: I1001 15:21:51.729878 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"8c2c4d71-1c1e-4627-8e29-c0ffd9862e25","Type":"ContainerStarted","Data":"6708f67a419d26f377783bf3934dd3f13f9047abdebda9ca227f52ec8a2d8ab6"} Oct 01 15:21:51 crc kubenswrapper[4869]: I1001 15:21:51.734524 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"5f6cc53d-ee0e-480c-9a9c-47085a16bf8e","Type":"ContainerStarted","Data":"28c6e575f21e30762a2b996e6e4a30351fafe38f6788a06901e7ddcb74ed8755"} Oct 01 15:21:51 crc kubenswrapper[4869]: I1001 15:21:51.734706 4869 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-cell1-novncproxy-0" podUID="5f6cc53d-ee0e-480c-9a9c-47085a16bf8e" containerName="nova-cell1-novncproxy-novncproxy" containerID="cri-o://28c6e575f21e30762a2b996e6e4a30351fafe38f6788a06901e7ddcb74ed8755" gracePeriod=30 Oct 01 15:21:51 crc kubenswrapper[4869]: I1001 15:21:51.740053 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-scheduler-0" podStartSLOduration=2.8867248979999998 podStartE2EDuration="5.740034284s" podCreationTimestamp="2025-10-01 15:21:46 +0000 UTC" firstStartedPulling="2025-10-01 15:21:47.892404724 +0000 UTC m=+1017.039247840" lastFinishedPulling="2025-10-01 15:21:50.74571411 +0000 UTC m=+1019.892557226" observedRunningTime="2025-10-01 15:21:51.732061633 +0000 UTC m=+1020.878904769" watchObservedRunningTime="2025-10-01 15:21:51.740034284 +0000 UTC m=+1020.886877400" Oct 01 15:21:51 crc kubenswrapper[4869]: I1001 15:21:51.774774 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-0" podStartSLOduration=2.638897169 podStartE2EDuration="5.774754091s" podCreationTimestamp="2025-10-01 15:21:46 +0000 UTC" firstStartedPulling="2025-10-01 15:21:47.608944525 +0000 UTC m=+1016.755787641" lastFinishedPulling="2025-10-01 15:21:50.744801447 +0000 UTC m=+1019.891644563" observedRunningTime="2025-10-01 15:21:51.765924538 +0000 UTC m=+1020.912767654" watchObservedRunningTime="2025-10-01 15:21:51.774754091 +0000 UTC m=+1020.921597217" Oct 01 15:21:51 crc kubenswrapper[4869]: I1001 15:21:51.788773 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-metadata-0" podStartSLOduration=3.002742618 podStartE2EDuration="5.788755634s" podCreationTimestamp="2025-10-01 15:21:46 +0000 UTC" firstStartedPulling="2025-10-01 15:21:47.962672739 +0000 UTC m=+1017.109515855" lastFinishedPulling="2025-10-01 15:21:50.748685755 +0000 UTC m=+1019.895528871" observedRunningTime="2025-10-01 15:21:51.783929173 +0000 UTC m=+1020.930772289" watchObservedRunningTime="2025-10-01 15:21:51.788755634 +0000 UTC m=+1020.935598740" Oct 01 15:21:51 crc kubenswrapper[4869]: I1001 15:21:51.815817 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-novncproxy-0" podStartSLOduration=2.618851392 podStartE2EDuration="5.815795507s" podCreationTimestamp="2025-10-01 15:21:46 +0000 UTC" firstStartedPulling="2025-10-01 15:21:47.546454967 +0000 UTC m=+1016.693298083" lastFinishedPulling="2025-10-01 15:21:50.743399082 +0000 UTC m=+1019.890242198" observedRunningTime="2025-10-01 15:21:51.803758013 +0000 UTC m=+1020.950601159" watchObservedRunningTime="2025-10-01 15:21:51.815795507 +0000 UTC m=+1020.962638623" Oct 01 15:21:52 crc kubenswrapper[4869]: I1001 15:21:52.060206 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell1-novncproxy-0" Oct 01 15:21:52 crc kubenswrapper[4869]: I1001 15:21:52.332315 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Oct 01 15:21:52 crc kubenswrapper[4869]: I1001 15:21:52.360970 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-scheduler-0" Oct 01 15:21:52 crc kubenswrapper[4869]: I1001 15:21:52.383777 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/96494ec1-01a0-40fc-96f9-d62fbcfaa5fe-config-data\") pod \"96494ec1-01a0-40fc-96f9-d62fbcfaa5fe\" (UID: \"96494ec1-01a0-40fc-96f9-d62fbcfaa5fe\") " Oct 01 15:21:52 crc kubenswrapper[4869]: I1001 15:21:52.384054 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/96494ec1-01a0-40fc-96f9-d62fbcfaa5fe-logs\") pod \"96494ec1-01a0-40fc-96f9-d62fbcfaa5fe\" (UID: \"96494ec1-01a0-40fc-96f9-d62fbcfaa5fe\") " Oct 01 15:21:52 crc kubenswrapper[4869]: I1001 15:21:52.384755 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/96494ec1-01a0-40fc-96f9-d62fbcfaa5fe-combined-ca-bundle\") pod \"96494ec1-01a0-40fc-96f9-d62fbcfaa5fe\" (UID: \"96494ec1-01a0-40fc-96f9-d62fbcfaa5fe\") " Oct 01 15:21:52 crc kubenswrapper[4869]: I1001 15:21:52.385155 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jvjhv\" (UniqueName: \"kubernetes.io/projected/96494ec1-01a0-40fc-96f9-d62fbcfaa5fe-kube-api-access-jvjhv\") pod \"96494ec1-01a0-40fc-96f9-d62fbcfaa5fe\" (UID: \"96494ec1-01a0-40fc-96f9-d62fbcfaa5fe\") " Oct 01 15:21:52 crc kubenswrapper[4869]: I1001 15:21:52.384691 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/96494ec1-01a0-40fc-96f9-d62fbcfaa5fe-logs" (OuterVolumeSpecName: "logs") pod "96494ec1-01a0-40fc-96f9-d62fbcfaa5fe" (UID: "96494ec1-01a0-40fc-96f9-d62fbcfaa5fe"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 15:21:52 crc kubenswrapper[4869]: I1001 15:21:52.386461 4869 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/96494ec1-01a0-40fc-96f9-d62fbcfaa5fe-logs\") on node \"crc\" DevicePath \"\"" Oct 01 15:21:52 crc kubenswrapper[4869]: I1001 15:21:52.389407 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/96494ec1-01a0-40fc-96f9-d62fbcfaa5fe-kube-api-access-jvjhv" (OuterVolumeSpecName: "kube-api-access-jvjhv") pod "96494ec1-01a0-40fc-96f9-d62fbcfaa5fe" (UID: "96494ec1-01a0-40fc-96f9-d62fbcfaa5fe"). InnerVolumeSpecName "kube-api-access-jvjhv". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 15:21:52 crc kubenswrapper[4869]: I1001 15:21:52.419394 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/96494ec1-01a0-40fc-96f9-d62fbcfaa5fe-config-data" (OuterVolumeSpecName: "config-data") pod "96494ec1-01a0-40fc-96f9-d62fbcfaa5fe" (UID: "96494ec1-01a0-40fc-96f9-d62fbcfaa5fe"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 15:21:52 crc kubenswrapper[4869]: I1001 15:21:52.422511 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/96494ec1-01a0-40fc-96f9-d62fbcfaa5fe-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "96494ec1-01a0-40fc-96f9-d62fbcfaa5fe" (UID: "96494ec1-01a0-40fc-96f9-d62fbcfaa5fe"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 15:21:52 crc kubenswrapper[4869]: I1001 15:21:52.489937 4869 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/96494ec1-01a0-40fc-96f9-d62fbcfaa5fe-config-data\") on node \"crc\" DevicePath \"\"" Oct 01 15:21:52 crc kubenswrapper[4869]: I1001 15:21:52.490248 4869 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/96494ec1-01a0-40fc-96f9-d62fbcfaa5fe-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 01 15:21:52 crc kubenswrapper[4869]: I1001 15:21:52.490288 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jvjhv\" (UniqueName: \"kubernetes.io/projected/96494ec1-01a0-40fc-96f9-d62fbcfaa5fe-kube-api-access-jvjhv\") on node \"crc\" DevicePath \"\"" Oct 01 15:21:52 crc kubenswrapper[4869]: I1001 15:21:52.754366 4869 generic.go:334] "Generic (PLEG): container finished" podID="96494ec1-01a0-40fc-96f9-d62fbcfaa5fe" containerID="4a48b2d88313ed0f37d96a68ac867056399a6a868b9f4e6bccc135fc94546dad" exitCode=0 Oct 01 15:21:52 crc kubenswrapper[4869]: I1001 15:21:52.754400 4869 generic.go:334] "Generic (PLEG): container finished" podID="96494ec1-01a0-40fc-96f9-d62fbcfaa5fe" containerID="cdbac306216eeaa0ec899a842fb7288264ad958b6361178fdb118bb69c7b0e47" exitCode=143 Oct 01 15:21:52 crc kubenswrapper[4869]: I1001 15:21:52.755172 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Oct 01 15:21:52 crc kubenswrapper[4869]: I1001 15:21:52.764399 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"96494ec1-01a0-40fc-96f9-d62fbcfaa5fe","Type":"ContainerDied","Data":"4a48b2d88313ed0f37d96a68ac867056399a6a868b9f4e6bccc135fc94546dad"} Oct 01 15:21:52 crc kubenswrapper[4869]: I1001 15:21:52.764445 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"96494ec1-01a0-40fc-96f9-d62fbcfaa5fe","Type":"ContainerDied","Data":"cdbac306216eeaa0ec899a842fb7288264ad958b6361178fdb118bb69c7b0e47"} Oct 01 15:21:52 crc kubenswrapper[4869]: I1001 15:21:52.764457 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"96494ec1-01a0-40fc-96f9-d62fbcfaa5fe","Type":"ContainerDied","Data":"3e5a36ac380e79e2906395ca838a900e391d1931f5d52b3bb122417b4276f1f8"} Oct 01 15:21:52 crc kubenswrapper[4869]: I1001 15:21:52.764474 4869 scope.go:117] "RemoveContainer" containerID="4a48b2d88313ed0f37d96a68ac867056399a6a868b9f4e6bccc135fc94546dad" Oct 01 15:21:52 crc kubenswrapper[4869]: I1001 15:21:52.860092 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Oct 01 15:21:52 crc kubenswrapper[4869]: I1001 15:21:52.875853 4869 scope.go:117] "RemoveContainer" containerID="cdbac306216eeaa0ec899a842fb7288264ad958b6361178fdb118bb69c7b0e47" Oct 01 15:21:52 crc kubenswrapper[4869]: I1001 15:21:52.889044 4869 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-metadata-0"] Oct 01 15:21:52 crc kubenswrapper[4869]: I1001 15:21:52.900959 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-metadata-0"] Oct 01 15:21:52 crc kubenswrapper[4869]: E1001 15:21:52.901546 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="96494ec1-01a0-40fc-96f9-d62fbcfaa5fe" containerName="nova-metadata-log" Oct 01 15:21:52 crc kubenswrapper[4869]: I1001 15:21:52.901585 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="96494ec1-01a0-40fc-96f9-d62fbcfaa5fe" containerName="nova-metadata-log" Oct 01 15:21:52 crc kubenswrapper[4869]: E1001 15:21:52.901635 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="96494ec1-01a0-40fc-96f9-d62fbcfaa5fe" containerName="nova-metadata-metadata" Oct 01 15:21:52 crc kubenswrapper[4869]: I1001 15:21:52.901643 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="96494ec1-01a0-40fc-96f9-d62fbcfaa5fe" containerName="nova-metadata-metadata" Oct 01 15:21:52 crc kubenswrapper[4869]: I1001 15:21:52.901970 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="96494ec1-01a0-40fc-96f9-d62fbcfaa5fe" containerName="nova-metadata-log" Oct 01 15:21:52 crc kubenswrapper[4869]: I1001 15:21:52.902021 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="96494ec1-01a0-40fc-96f9-d62fbcfaa5fe" containerName="nova-metadata-metadata" Oct 01 15:21:52 crc kubenswrapper[4869]: I1001 15:21:52.904059 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Oct 01 15:21:52 crc kubenswrapper[4869]: I1001 15:21:52.912403 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-metadata-internal-svc" Oct 01 15:21:52 crc kubenswrapper[4869]: I1001 15:21:52.912642 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-config-data" Oct 01 15:21:52 crc kubenswrapper[4869]: I1001 15:21:52.917629 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Oct 01 15:21:52 crc kubenswrapper[4869]: I1001 15:21:52.932656 4869 scope.go:117] "RemoveContainer" containerID="4a48b2d88313ed0f37d96a68ac867056399a6a868b9f4e6bccc135fc94546dad" Oct 01 15:21:52 crc kubenswrapper[4869]: E1001 15:21:52.933707 4869 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4a48b2d88313ed0f37d96a68ac867056399a6a868b9f4e6bccc135fc94546dad\": container with ID starting with 4a48b2d88313ed0f37d96a68ac867056399a6a868b9f4e6bccc135fc94546dad not found: ID does not exist" containerID="4a48b2d88313ed0f37d96a68ac867056399a6a868b9f4e6bccc135fc94546dad" Oct 01 15:21:52 crc kubenswrapper[4869]: I1001 15:21:52.933739 4869 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4a48b2d88313ed0f37d96a68ac867056399a6a868b9f4e6bccc135fc94546dad"} err="failed to get container status \"4a48b2d88313ed0f37d96a68ac867056399a6a868b9f4e6bccc135fc94546dad\": rpc error: code = NotFound desc = could not find container \"4a48b2d88313ed0f37d96a68ac867056399a6a868b9f4e6bccc135fc94546dad\": container with ID starting with 4a48b2d88313ed0f37d96a68ac867056399a6a868b9f4e6bccc135fc94546dad not found: ID does not exist" Oct 01 15:21:52 crc kubenswrapper[4869]: I1001 15:21:52.933763 4869 scope.go:117] "RemoveContainer" containerID="cdbac306216eeaa0ec899a842fb7288264ad958b6361178fdb118bb69c7b0e47" Oct 01 15:21:52 crc kubenswrapper[4869]: E1001 15:21:52.934155 4869 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"cdbac306216eeaa0ec899a842fb7288264ad958b6361178fdb118bb69c7b0e47\": container with ID starting with cdbac306216eeaa0ec899a842fb7288264ad958b6361178fdb118bb69c7b0e47 not found: ID does not exist" containerID="cdbac306216eeaa0ec899a842fb7288264ad958b6361178fdb118bb69c7b0e47" Oct 01 15:21:52 crc kubenswrapper[4869]: I1001 15:21:52.934209 4869 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"cdbac306216eeaa0ec899a842fb7288264ad958b6361178fdb118bb69c7b0e47"} err="failed to get container status \"cdbac306216eeaa0ec899a842fb7288264ad958b6361178fdb118bb69c7b0e47\": rpc error: code = NotFound desc = could not find container \"cdbac306216eeaa0ec899a842fb7288264ad958b6361178fdb118bb69c7b0e47\": container with ID starting with cdbac306216eeaa0ec899a842fb7288264ad958b6361178fdb118bb69c7b0e47 not found: ID does not exist" Oct 01 15:21:52 crc kubenswrapper[4869]: I1001 15:21:52.934242 4869 scope.go:117] "RemoveContainer" containerID="4a48b2d88313ed0f37d96a68ac867056399a6a868b9f4e6bccc135fc94546dad" Oct 01 15:21:52 crc kubenswrapper[4869]: I1001 15:21:52.934698 4869 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4a48b2d88313ed0f37d96a68ac867056399a6a868b9f4e6bccc135fc94546dad"} err="failed to get container status \"4a48b2d88313ed0f37d96a68ac867056399a6a868b9f4e6bccc135fc94546dad\": rpc error: code = NotFound desc = could not find container \"4a48b2d88313ed0f37d96a68ac867056399a6a868b9f4e6bccc135fc94546dad\": container with ID starting with 4a48b2d88313ed0f37d96a68ac867056399a6a868b9f4e6bccc135fc94546dad not found: ID does not exist" Oct 01 15:21:52 crc kubenswrapper[4869]: I1001 15:21:52.934790 4869 scope.go:117] "RemoveContainer" containerID="cdbac306216eeaa0ec899a842fb7288264ad958b6361178fdb118bb69c7b0e47" Oct 01 15:21:52 crc kubenswrapper[4869]: I1001 15:21:52.935143 4869 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"cdbac306216eeaa0ec899a842fb7288264ad958b6361178fdb118bb69c7b0e47"} err="failed to get container status \"cdbac306216eeaa0ec899a842fb7288264ad958b6361178fdb118bb69c7b0e47\": rpc error: code = NotFound desc = could not find container \"cdbac306216eeaa0ec899a842fb7288264ad958b6361178fdb118bb69c7b0e47\": container with ID starting with cdbac306216eeaa0ec899a842fb7288264ad958b6361178fdb118bb69c7b0e47 not found: ID does not exist" Oct 01 15:21:53 crc kubenswrapper[4869]: I1001 15:21:53.015812 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/98a767a6-6018-452e-acc7-83c2c5ae1e68-logs\") pod \"nova-metadata-0\" (UID: \"98a767a6-6018-452e-acc7-83c2c5ae1e68\") " pod="openstack/nova-metadata-0" Oct 01 15:21:53 crc kubenswrapper[4869]: I1001 15:21:53.016253 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/98a767a6-6018-452e-acc7-83c2c5ae1e68-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"98a767a6-6018-452e-acc7-83c2c5ae1e68\") " pod="openstack/nova-metadata-0" Oct 01 15:21:53 crc kubenswrapper[4869]: I1001 15:21:53.016459 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/98a767a6-6018-452e-acc7-83c2c5ae1e68-config-data\") pod \"nova-metadata-0\" (UID: \"98a767a6-6018-452e-acc7-83c2c5ae1e68\") " pod="openstack/nova-metadata-0" Oct 01 15:21:53 crc kubenswrapper[4869]: I1001 15:21:53.017178 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-thvpg\" (UniqueName: \"kubernetes.io/projected/98a767a6-6018-452e-acc7-83c2c5ae1e68-kube-api-access-thvpg\") pod \"nova-metadata-0\" (UID: \"98a767a6-6018-452e-acc7-83c2c5ae1e68\") " pod="openstack/nova-metadata-0" Oct 01 15:21:53 crc kubenswrapper[4869]: I1001 15:21:53.017356 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/98a767a6-6018-452e-acc7-83c2c5ae1e68-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"98a767a6-6018-452e-acc7-83c2c5ae1e68\") " pod="openstack/nova-metadata-0" Oct 01 15:21:53 crc kubenswrapper[4869]: I1001 15:21:53.119451 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/98a767a6-6018-452e-acc7-83c2c5ae1e68-config-data\") pod \"nova-metadata-0\" (UID: \"98a767a6-6018-452e-acc7-83c2c5ae1e68\") " pod="openstack/nova-metadata-0" Oct 01 15:21:53 crc kubenswrapper[4869]: I1001 15:21:53.120600 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-thvpg\" (UniqueName: \"kubernetes.io/projected/98a767a6-6018-452e-acc7-83c2c5ae1e68-kube-api-access-thvpg\") pod \"nova-metadata-0\" (UID: \"98a767a6-6018-452e-acc7-83c2c5ae1e68\") " pod="openstack/nova-metadata-0" Oct 01 15:21:53 crc kubenswrapper[4869]: I1001 15:21:53.120917 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/98a767a6-6018-452e-acc7-83c2c5ae1e68-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"98a767a6-6018-452e-acc7-83c2c5ae1e68\") " pod="openstack/nova-metadata-0" Oct 01 15:21:53 crc kubenswrapper[4869]: I1001 15:21:53.121254 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/98a767a6-6018-452e-acc7-83c2c5ae1e68-logs\") pod \"nova-metadata-0\" (UID: \"98a767a6-6018-452e-acc7-83c2c5ae1e68\") " pod="openstack/nova-metadata-0" Oct 01 15:21:53 crc kubenswrapper[4869]: I1001 15:21:53.121531 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/98a767a6-6018-452e-acc7-83c2c5ae1e68-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"98a767a6-6018-452e-acc7-83c2c5ae1e68\") " pod="openstack/nova-metadata-0" Oct 01 15:21:53 crc kubenswrapper[4869]: I1001 15:21:53.121799 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/98a767a6-6018-452e-acc7-83c2c5ae1e68-logs\") pod \"nova-metadata-0\" (UID: \"98a767a6-6018-452e-acc7-83c2c5ae1e68\") " pod="openstack/nova-metadata-0" Oct 01 15:21:53 crc kubenswrapper[4869]: I1001 15:21:53.125161 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/98a767a6-6018-452e-acc7-83c2c5ae1e68-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"98a767a6-6018-452e-acc7-83c2c5ae1e68\") " pod="openstack/nova-metadata-0" Oct 01 15:21:53 crc kubenswrapper[4869]: I1001 15:21:53.125901 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/98a767a6-6018-452e-acc7-83c2c5ae1e68-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"98a767a6-6018-452e-acc7-83c2c5ae1e68\") " pod="openstack/nova-metadata-0" Oct 01 15:21:53 crc kubenswrapper[4869]: I1001 15:21:53.126139 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/98a767a6-6018-452e-acc7-83c2c5ae1e68-config-data\") pod \"nova-metadata-0\" (UID: \"98a767a6-6018-452e-acc7-83c2c5ae1e68\") " pod="openstack/nova-metadata-0" Oct 01 15:21:53 crc kubenswrapper[4869]: I1001 15:21:53.146347 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-thvpg\" (UniqueName: \"kubernetes.io/projected/98a767a6-6018-452e-acc7-83c2c5ae1e68-kube-api-access-thvpg\") pod \"nova-metadata-0\" (UID: \"98a767a6-6018-452e-acc7-83c2c5ae1e68\") " pod="openstack/nova-metadata-0" Oct 01 15:21:53 crc kubenswrapper[4869]: I1001 15:21:53.223840 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Oct 01 15:21:53 crc kubenswrapper[4869]: I1001 15:21:53.592104 4869 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="96494ec1-01a0-40fc-96f9-d62fbcfaa5fe" path="/var/lib/kubelet/pods/96494ec1-01a0-40fc-96f9-d62fbcfaa5fe/volumes" Oct 01 15:21:53 crc kubenswrapper[4869]: I1001 15:21:53.689463 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Oct 01 15:21:53 crc kubenswrapper[4869]: W1001 15:21:53.695972 4869 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod98a767a6_6018_452e_acc7_83c2c5ae1e68.slice/crio-fccec572c063b2119d7a223de783ba9190ff2a543553da4e9bd8707cc60858f0 WatchSource:0}: Error finding container fccec572c063b2119d7a223de783ba9190ff2a543553da4e9bd8707cc60858f0: Status 404 returned error can't find the container with id fccec572c063b2119d7a223de783ba9190ff2a543553da4e9bd8707cc60858f0 Oct 01 15:21:53 crc kubenswrapper[4869]: I1001 15:21:53.706025 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ceilometer-0" Oct 01 15:21:53 crc kubenswrapper[4869]: I1001 15:21:53.769628 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"98a767a6-6018-452e-acc7-83c2c5ae1e68","Type":"ContainerStarted","Data":"fccec572c063b2119d7a223de783ba9190ff2a543553da4e9bd8707cc60858f0"} Oct 01 15:21:54 crc kubenswrapper[4869]: I1001 15:21:54.780438 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"98a767a6-6018-452e-acc7-83c2c5ae1e68","Type":"ContainerStarted","Data":"824d481e5db562318cf0b278f7dd15ab28f01bc7d530bc39e39accfe3a48f97c"} Oct 01 15:21:54 crc kubenswrapper[4869]: I1001 15:21:54.780809 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"98a767a6-6018-452e-acc7-83c2c5ae1e68","Type":"ContainerStarted","Data":"aa2dd2a25a5f0b83290e77156934deef68902d9c3efb435e56ba88c5e422ee33"} Oct 01 15:21:54 crc kubenswrapper[4869]: I1001 15:21:54.805848 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-metadata-0" podStartSLOduration=2.805830736 podStartE2EDuration="2.805830736s" podCreationTimestamp="2025-10-01 15:21:52 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-01 15:21:54.803108458 +0000 UTC m=+1023.949951574" watchObservedRunningTime="2025-10-01 15:21:54.805830736 +0000 UTC m=+1023.952673872" Oct 01 15:21:55 crc kubenswrapper[4869]: I1001 15:21:55.791162 4869 generic.go:334] "Generic (PLEG): container finished" podID="5d724a70-eb91-4679-9f86-c3d1e874bdc6" containerID="cdee20a8079220f9ce6fb5e85101ee74f6da54f93cbcf0a2371e0ece2638efb8" exitCode=0 Oct 01 15:21:55 crc kubenswrapper[4869]: I1001 15:21:55.791246 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-db-sync-fmgks" event={"ID":"5d724a70-eb91-4679-9f86-c3d1e874bdc6","Type":"ContainerDied","Data":"cdee20a8079220f9ce6fb5e85101ee74f6da54f93cbcf0a2371e0ece2638efb8"} Oct 01 15:21:55 crc kubenswrapper[4869]: I1001 15:21:55.794229 4869 generic.go:334] "Generic (PLEG): container finished" podID="d7bd9313-7a67-41cd-9ab7-efd58f5ab44f" containerID="77089dd930d91b9a4efd8ec345e1ee177333454e634521e014a73eb7358a3614" exitCode=0 Oct 01 15:21:55 crc kubenswrapper[4869]: I1001 15:21:55.794397 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-cell-mapping-g8tmw" event={"ID":"d7bd9313-7a67-41cd-9ab7-efd58f5ab44f","Type":"ContainerDied","Data":"77089dd930d91b9a4efd8ec345e1ee177333454e634521e014a73eb7358a3614"} Oct 01 15:21:56 crc kubenswrapper[4869]: I1001 15:21:56.524085 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/kube-state-metrics-0"] Oct 01 15:21:56 crc kubenswrapper[4869]: I1001 15:21:56.524600 4869 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/kube-state-metrics-0" podUID="589bdd72-e961-4bbd-bb04-bcff96363cab" containerName="kube-state-metrics" containerID="cri-o://796cb2602fbe212a6a0fff1c0b6dfa43ce47db0f54b471aa1a94dfc4b889263d" gracePeriod=30 Oct 01 15:21:56 crc kubenswrapper[4869]: I1001 15:21:56.813182 4869 generic.go:334] "Generic (PLEG): container finished" podID="589bdd72-e961-4bbd-bb04-bcff96363cab" containerID="796cb2602fbe212a6a0fff1c0b6dfa43ce47db0f54b471aa1a94dfc4b889263d" exitCode=2 Oct 01 15:21:56 crc kubenswrapper[4869]: I1001 15:21:56.813373 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"589bdd72-e961-4bbd-bb04-bcff96363cab","Type":"ContainerDied","Data":"796cb2602fbe212a6a0fff1c0b6dfa43ce47db0f54b471aa1a94dfc4b889263d"} Oct 01 15:21:57 crc kubenswrapper[4869]: I1001 15:21:57.007382 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Oct 01 15:21:57 crc kubenswrapper[4869]: I1001 15:21:57.091114 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bbzt7\" (UniqueName: \"kubernetes.io/projected/589bdd72-e961-4bbd-bb04-bcff96363cab-kube-api-access-bbzt7\") pod \"589bdd72-e961-4bbd-bb04-bcff96363cab\" (UID: \"589bdd72-e961-4bbd-bb04-bcff96363cab\") " Oct 01 15:21:57 crc kubenswrapper[4869]: I1001 15:21:57.114751 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/589bdd72-e961-4bbd-bb04-bcff96363cab-kube-api-access-bbzt7" (OuterVolumeSpecName: "kube-api-access-bbzt7") pod "589bdd72-e961-4bbd-bb04-bcff96363cab" (UID: "589bdd72-e961-4bbd-bb04-bcff96363cab"). InnerVolumeSpecName "kube-api-access-bbzt7". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 15:21:57 crc kubenswrapper[4869]: I1001 15:21:57.187162 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-db-sync-fmgks" Oct 01 15:21:57 crc kubenswrapper[4869]: I1001 15:21:57.193904 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5d724a70-eb91-4679-9f86-c3d1e874bdc6-combined-ca-bundle\") pod \"5d724a70-eb91-4679-9f86-c3d1e874bdc6\" (UID: \"5d724a70-eb91-4679-9f86-c3d1e874bdc6\") " Oct 01 15:21:57 crc kubenswrapper[4869]: I1001 15:21:57.194173 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xqfsv\" (UniqueName: \"kubernetes.io/projected/5d724a70-eb91-4679-9f86-c3d1e874bdc6-kube-api-access-xqfsv\") pod \"5d724a70-eb91-4679-9f86-c3d1e874bdc6\" (UID: \"5d724a70-eb91-4679-9f86-c3d1e874bdc6\") " Oct 01 15:21:57 crc kubenswrapper[4869]: I1001 15:21:57.194241 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5d724a70-eb91-4679-9f86-c3d1e874bdc6-scripts\") pod \"5d724a70-eb91-4679-9f86-c3d1e874bdc6\" (UID: \"5d724a70-eb91-4679-9f86-c3d1e874bdc6\") " Oct 01 15:21:57 crc kubenswrapper[4869]: I1001 15:21:57.194283 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5d724a70-eb91-4679-9f86-c3d1e874bdc6-config-data\") pod \"5d724a70-eb91-4679-9f86-c3d1e874bdc6\" (UID: \"5d724a70-eb91-4679-9f86-c3d1e874bdc6\") " Oct 01 15:21:57 crc kubenswrapper[4869]: I1001 15:21:57.194692 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bbzt7\" (UniqueName: \"kubernetes.io/projected/589bdd72-e961-4bbd-bb04-bcff96363cab-kube-api-access-bbzt7\") on node \"crc\" DevicePath \"\"" Oct 01 15:21:57 crc kubenswrapper[4869]: I1001 15:21:57.199674 4869 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Oct 01 15:21:57 crc kubenswrapper[4869]: I1001 15:21:57.199770 4869 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Oct 01 15:21:57 crc kubenswrapper[4869]: I1001 15:21:57.200104 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5d724a70-eb91-4679-9f86-c3d1e874bdc6-kube-api-access-xqfsv" (OuterVolumeSpecName: "kube-api-access-xqfsv") pod "5d724a70-eb91-4679-9f86-c3d1e874bdc6" (UID: "5d724a70-eb91-4679-9f86-c3d1e874bdc6"). InnerVolumeSpecName "kube-api-access-xqfsv". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 15:21:57 crc kubenswrapper[4869]: I1001 15:21:57.222474 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5d724a70-eb91-4679-9f86-c3d1e874bdc6-scripts" (OuterVolumeSpecName: "scripts") pod "5d724a70-eb91-4679-9f86-c3d1e874bdc6" (UID: "5d724a70-eb91-4679-9f86-c3d1e874bdc6"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 15:21:57 crc kubenswrapper[4869]: I1001 15:21:57.238527 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5d724a70-eb91-4679-9f86-c3d1e874bdc6-config-data" (OuterVolumeSpecName: "config-data") pod "5d724a70-eb91-4679-9f86-c3d1e874bdc6" (UID: "5d724a70-eb91-4679-9f86-c3d1e874bdc6"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 15:21:57 crc kubenswrapper[4869]: I1001 15:21:57.247976 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5d724a70-eb91-4679-9f86-c3d1e874bdc6-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "5d724a70-eb91-4679-9f86-c3d1e874bdc6" (UID: "5d724a70-eb91-4679-9f86-c3d1e874bdc6"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 15:21:57 crc kubenswrapper[4869]: I1001 15:21:57.249778 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-cell-mapping-g8tmw" Oct 01 15:21:57 crc kubenswrapper[4869]: I1001 15:21:57.296664 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d7bd9313-7a67-41cd-9ab7-efd58f5ab44f-scripts\") pod \"d7bd9313-7a67-41cd-9ab7-efd58f5ab44f\" (UID: \"d7bd9313-7a67-41cd-9ab7-efd58f5ab44f\") " Oct 01 15:21:57 crc kubenswrapper[4869]: I1001 15:21:57.296843 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d7bd9313-7a67-41cd-9ab7-efd58f5ab44f-combined-ca-bundle\") pod \"d7bd9313-7a67-41cd-9ab7-efd58f5ab44f\" (UID: \"d7bd9313-7a67-41cd-9ab7-efd58f5ab44f\") " Oct 01 15:21:57 crc kubenswrapper[4869]: I1001 15:21:57.296886 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d7bd9313-7a67-41cd-9ab7-efd58f5ab44f-config-data\") pod \"d7bd9313-7a67-41cd-9ab7-efd58f5ab44f\" (UID: \"d7bd9313-7a67-41cd-9ab7-efd58f5ab44f\") " Oct 01 15:21:57 crc kubenswrapper[4869]: I1001 15:21:57.296964 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8kxl7\" (UniqueName: \"kubernetes.io/projected/d7bd9313-7a67-41cd-9ab7-efd58f5ab44f-kube-api-access-8kxl7\") pod \"d7bd9313-7a67-41cd-9ab7-efd58f5ab44f\" (UID: \"d7bd9313-7a67-41cd-9ab7-efd58f5ab44f\") " Oct 01 15:21:57 crc kubenswrapper[4869]: I1001 15:21:57.297976 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xqfsv\" (UniqueName: \"kubernetes.io/projected/5d724a70-eb91-4679-9f86-c3d1e874bdc6-kube-api-access-xqfsv\") on node \"crc\" DevicePath \"\"" Oct 01 15:21:57 crc kubenswrapper[4869]: I1001 15:21:57.297995 4869 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5d724a70-eb91-4679-9f86-c3d1e874bdc6-scripts\") on node \"crc\" DevicePath \"\"" Oct 01 15:21:57 crc kubenswrapper[4869]: I1001 15:21:57.298022 4869 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5d724a70-eb91-4679-9f86-c3d1e874bdc6-config-data\") on node \"crc\" DevicePath \"\"" Oct 01 15:21:57 crc kubenswrapper[4869]: I1001 15:21:57.298031 4869 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5d724a70-eb91-4679-9f86-c3d1e874bdc6-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 01 15:21:57 crc kubenswrapper[4869]: I1001 15:21:57.300084 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d7bd9313-7a67-41cd-9ab7-efd58f5ab44f-kube-api-access-8kxl7" (OuterVolumeSpecName: "kube-api-access-8kxl7") pod "d7bd9313-7a67-41cd-9ab7-efd58f5ab44f" (UID: "d7bd9313-7a67-41cd-9ab7-efd58f5ab44f"). InnerVolumeSpecName "kube-api-access-8kxl7". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 15:21:57 crc kubenswrapper[4869]: I1001 15:21:57.303279 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d7bd9313-7a67-41cd-9ab7-efd58f5ab44f-scripts" (OuterVolumeSpecName: "scripts") pod "d7bd9313-7a67-41cd-9ab7-efd58f5ab44f" (UID: "d7bd9313-7a67-41cd-9ab7-efd58f5ab44f"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 15:21:57 crc kubenswrapper[4869]: I1001 15:21:57.324218 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d7bd9313-7a67-41cd-9ab7-efd58f5ab44f-config-data" (OuterVolumeSpecName: "config-data") pod "d7bd9313-7a67-41cd-9ab7-efd58f5ab44f" (UID: "d7bd9313-7a67-41cd-9ab7-efd58f5ab44f"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 15:21:57 crc kubenswrapper[4869]: I1001 15:21:57.324958 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d7bd9313-7a67-41cd-9ab7-efd58f5ab44f-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "d7bd9313-7a67-41cd-9ab7-efd58f5ab44f" (UID: "d7bd9313-7a67-41cd-9ab7-efd58f5ab44f"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 15:21:57 crc kubenswrapper[4869]: I1001 15:21:57.357087 4869 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-scheduler-0" Oct 01 15:21:57 crc kubenswrapper[4869]: I1001 15:21:57.390930 4869 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-scheduler-0" Oct 01 15:21:57 crc kubenswrapper[4869]: I1001 15:21:57.399721 4869 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d7bd9313-7a67-41cd-9ab7-efd58f5ab44f-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 01 15:21:57 crc kubenswrapper[4869]: I1001 15:21:57.399759 4869 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d7bd9313-7a67-41cd-9ab7-efd58f5ab44f-config-data\") on node \"crc\" DevicePath \"\"" Oct 01 15:21:57 crc kubenswrapper[4869]: I1001 15:21:57.399772 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8kxl7\" (UniqueName: \"kubernetes.io/projected/d7bd9313-7a67-41cd-9ab7-efd58f5ab44f-kube-api-access-8kxl7\") on node \"crc\" DevicePath \"\"" Oct 01 15:21:57 crc kubenswrapper[4869]: I1001 15:21:57.399786 4869 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d7bd9313-7a67-41cd-9ab7-efd58f5ab44f-scripts\") on node \"crc\" DevicePath \"\"" Oct 01 15:21:57 crc kubenswrapper[4869]: I1001 15:21:57.415463 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-7c57d6658c-9jnss" Oct 01 15:21:57 crc kubenswrapper[4869]: I1001 15:21:57.482208 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-f548b88b9-jqj9f"] Oct 01 15:21:57 crc kubenswrapper[4869]: I1001 15:21:57.482506 4869 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-f548b88b9-jqj9f" podUID="8fdb029f-29fa-47b7-9899-1fd9f14fb383" containerName="dnsmasq-dns" containerID="cri-o://70e0830c58a3ddf1e7d4ee66671f11e4a25d780d6063de9893c2c07ebb04ffc0" gracePeriod=10 Oct 01 15:21:57 crc kubenswrapper[4869]: I1001 15:21:57.728870 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Oct 01 15:21:57 crc kubenswrapper[4869]: I1001 15:21:57.739628 4869 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="5ede1f51-a56a-4ac8-ba8b-6963803cb869" containerName="ceilometer-central-agent" containerID="cri-o://ee67ced28f4ef58abd0ffbb7214b118c4b5b7ebf87b0bfa49a417b6076fe9eaa" gracePeriod=30 Oct 01 15:21:57 crc kubenswrapper[4869]: I1001 15:21:57.740185 4869 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="5ede1f51-a56a-4ac8-ba8b-6963803cb869" containerName="proxy-httpd" containerID="cri-o://9c7eaec79522726d91645e0f278f0c7e1bc6ccd4fa3ef331cc40a1c9238264b4" gracePeriod=30 Oct 01 15:21:57 crc kubenswrapper[4869]: I1001 15:21:57.740267 4869 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="5ede1f51-a56a-4ac8-ba8b-6963803cb869" containerName="sg-core" containerID="cri-o://2e28667845ee459879144661065b84c9ea1a9d3218470cc0665485c530f3e4c7" gracePeriod=30 Oct 01 15:21:57 crc kubenswrapper[4869]: I1001 15:21:57.740335 4869 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="5ede1f51-a56a-4ac8-ba8b-6963803cb869" containerName="ceilometer-notification-agent" containerID="cri-o://c5a524ecf5caf9bdd7cfd14e0c7b5533b2b4f50b3b3c624bdb38f0b60ac369c4" gracePeriod=30 Oct 01 15:21:57 crc kubenswrapper[4869]: I1001 15:21:57.905526 4869 generic.go:334] "Generic (PLEG): container finished" podID="8fdb029f-29fa-47b7-9899-1fd9f14fb383" containerID="70e0830c58a3ddf1e7d4ee66671f11e4a25d780d6063de9893c2c07ebb04ffc0" exitCode=0 Oct 01 15:21:57 crc kubenswrapper[4869]: I1001 15:21:57.905632 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-f548b88b9-jqj9f" event={"ID":"8fdb029f-29fa-47b7-9899-1fd9f14fb383","Type":"ContainerDied","Data":"70e0830c58a3ddf1e7d4ee66671f11e4a25d780d6063de9893c2c07ebb04ffc0"} Oct 01 15:21:57 crc kubenswrapper[4869]: I1001 15:21:57.933995 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-conductor-0"] Oct 01 15:21:57 crc kubenswrapper[4869]: E1001 15:21:57.934562 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5d724a70-eb91-4679-9f86-c3d1e874bdc6" containerName="nova-cell1-conductor-db-sync" Oct 01 15:21:57 crc kubenswrapper[4869]: I1001 15:21:57.934586 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="5d724a70-eb91-4679-9f86-c3d1e874bdc6" containerName="nova-cell1-conductor-db-sync" Oct 01 15:21:57 crc kubenswrapper[4869]: E1001 15:21:57.934600 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d7bd9313-7a67-41cd-9ab7-efd58f5ab44f" containerName="nova-manage" Oct 01 15:21:57 crc kubenswrapper[4869]: I1001 15:21:57.934608 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="d7bd9313-7a67-41cd-9ab7-efd58f5ab44f" containerName="nova-manage" Oct 01 15:21:57 crc kubenswrapper[4869]: E1001 15:21:57.934619 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="589bdd72-e961-4bbd-bb04-bcff96363cab" containerName="kube-state-metrics" Oct 01 15:21:57 crc kubenswrapper[4869]: I1001 15:21:57.934627 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="589bdd72-e961-4bbd-bb04-bcff96363cab" containerName="kube-state-metrics" Oct 01 15:21:57 crc kubenswrapper[4869]: I1001 15:21:57.934838 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="589bdd72-e961-4bbd-bb04-bcff96363cab" containerName="kube-state-metrics" Oct 01 15:21:57 crc kubenswrapper[4869]: I1001 15:21:57.934861 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="d7bd9313-7a67-41cd-9ab7-efd58f5ab44f" containerName="nova-manage" Oct 01 15:21:57 crc kubenswrapper[4869]: I1001 15:21:57.934892 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="5d724a70-eb91-4679-9f86-c3d1e874bdc6" containerName="nova-cell1-conductor-db-sync" Oct 01 15:21:57 crc kubenswrapper[4869]: I1001 15:21:57.938705 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Oct 01 15:21:57 crc kubenswrapper[4869]: I1001 15:21:57.939808 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"589bdd72-e961-4bbd-bb04-bcff96363cab","Type":"ContainerDied","Data":"e8b64caed0813fb5ed0821adfcb99cde3a792c21c19b3342252fc0afc3d446fe"} Oct 01 15:21:57 crc kubenswrapper[4869]: I1001 15:21:57.939851 4869 scope.go:117] "RemoveContainer" containerID="796cb2602fbe212a6a0fff1c0b6dfa43ce47db0f54b471aa1a94dfc4b889263d" Oct 01 15:21:57 crc kubenswrapper[4869]: I1001 15:21:57.939989 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-0" Oct 01 15:21:57 crc kubenswrapper[4869]: I1001 15:21:57.950530 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-cell-mapping-g8tmw" event={"ID":"d7bd9313-7a67-41cd-9ab7-efd58f5ab44f","Type":"ContainerDied","Data":"905034e920487c6cfe11916cdb75a7101726c1de81006cc8f31de514c7d67923"} Oct 01 15:21:57 crc kubenswrapper[4869]: I1001 15:21:57.950572 4869 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="905034e920487c6cfe11916cdb75a7101726c1de81006cc8f31de514c7d67923" Oct 01 15:21:57 crc kubenswrapper[4869]: I1001 15:21:57.950632 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-cell-mapping-g8tmw" Oct 01 15:21:57 crc kubenswrapper[4869]: I1001 15:21:57.953392 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-db-sync-fmgks" Oct 01 15:21:57 crc kubenswrapper[4869]: I1001 15:21:57.953706 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-db-sync-fmgks" event={"ID":"5d724a70-eb91-4679-9f86-c3d1e874bdc6","Type":"ContainerDied","Data":"3c6c2b30525e7cd893424c81f57ca55eb47e9c903d88a874e04c8530752e5862"} Oct 01 15:21:57 crc kubenswrapper[4869]: I1001 15:21:57.953752 4869 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="3c6c2b30525e7cd893424c81f57ca55eb47e9c903d88a874e04c8530752e5862" Oct 01 15:21:57 crc kubenswrapper[4869]: I1001 15:21:57.969556 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-0"] Oct 01 15:21:58 crc kubenswrapper[4869]: I1001 15:21:58.025428 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-scheduler-0" Oct 01 15:21:58 crc kubenswrapper[4869]: I1001 15:21:58.032371 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f6ff894f-b32f-46d8-9d9c-79c08c97478c-config-data\") pod \"nova-cell1-conductor-0\" (UID: \"f6ff894f-b32f-46d8-9d9c-79c08c97478c\") " pod="openstack/nova-cell1-conductor-0" Oct 01 15:21:58 crc kubenswrapper[4869]: I1001 15:21:58.032404 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f6ff894f-b32f-46d8-9d9c-79c08c97478c-combined-ca-bundle\") pod \"nova-cell1-conductor-0\" (UID: \"f6ff894f-b32f-46d8-9d9c-79c08c97478c\") " pod="openstack/nova-cell1-conductor-0" Oct 01 15:21:58 crc kubenswrapper[4869]: I1001 15:21:58.032544 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-b5zxz\" (UniqueName: \"kubernetes.io/projected/f6ff894f-b32f-46d8-9d9c-79c08c97478c-kube-api-access-b5zxz\") pod \"nova-cell1-conductor-0\" (UID: \"f6ff894f-b32f-46d8-9d9c-79c08c97478c\") " pod="openstack/nova-cell1-conductor-0" Oct 01 15:21:58 crc kubenswrapper[4869]: I1001 15:21:58.060844 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Oct 01 15:21:58 crc kubenswrapper[4869]: I1001 15:21:58.061072 4869 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="8c2c4d71-1c1e-4627-8e29-c0ffd9862e25" containerName="nova-api-log" containerID="cri-o://6708f67a419d26f377783bf3934dd3f13f9047abdebda9ca227f52ec8a2d8ab6" gracePeriod=30 Oct 01 15:21:58 crc kubenswrapper[4869]: I1001 15:21:58.061224 4869 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="8c2c4d71-1c1e-4627-8e29-c0ffd9862e25" containerName="nova-api-api" containerID="cri-o://105e1c4e601b7ff9c6cac949694c7065447a874eab57473debf0e76263a8d8dd" gracePeriod=30 Oct 01 15:21:58 crc kubenswrapper[4869]: I1001 15:21:58.088174 4869 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="8c2c4d71-1c1e-4627-8e29-c0ffd9862e25" containerName="nova-api-log" probeResult="failure" output="Get \"http://10.217.0.174:8774/\": EOF" Oct 01 15:21:58 crc kubenswrapper[4869]: I1001 15:21:58.088481 4869 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="8c2c4d71-1c1e-4627-8e29-c0ffd9862e25" containerName="nova-api-api" probeResult="failure" output="Get \"http://10.217.0.174:8774/\": EOF" Oct 01 15:21:58 crc kubenswrapper[4869]: I1001 15:21:58.097370 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/kube-state-metrics-0"] Oct 01 15:21:58 crc kubenswrapper[4869]: I1001 15:21:58.125326 4869 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/kube-state-metrics-0"] Oct 01 15:21:58 crc kubenswrapper[4869]: I1001 15:21:58.136427 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-b5zxz\" (UniqueName: \"kubernetes.io/projected/f6ff894f-b32f-46d8-9d9c-79c08c97478c-kube-api-access-b5zxz\") pod \"nova-cell1-conductor-0\" (UID: \"f6ff894f-b32f-46d8-9d9c-79c08c97478c\") " pod="openstack/nova-cell1-conductor-0" Oct 01 15:21:58 crc kubenswrapper[4869]: I1001 15:21:58.136597 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f6ff894f-b32f-46d8-9d9c-79c08c97478c-config-data\") pod \"nova-cell1-conductor-0\" (UID: \"f6ff894f-b32f-46d8-9d9c-79c08c97478c\") " pod="openstack/nova-cell1-conductor-0" Oct 01 15:21:58 crc kubenswrapper[4869]: I1001 15:21:58.136634 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f6ff894f-b32f-46d8-9d9c-79c08c97478c-combined-ca-bundle\") pod \"nova-cell1-conductor-0\" (UID: \"f6ff894f-b32f-46d8-9d9c-79c08c97478c\") " pod="openstack/nova-cell1-conductor-0" Oct 01 15:21:58 crc kubenswrapper[4869]: I1001 15:21:58.150058 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f6ff894f-b32f-46d8-9d9c-79c08c97478c-combined-ca-bundle\") pod \"nova-cell1-conductor-0\" (UID: \"f6ff894f-b32f-46d8-9d9c-79c08c97478c\") " pod="openstack/nova-cell1-conductor-0" Oct 01 15:21:58 crc kubenswrapper[4869]: I1001 15:21:58.152118 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f6ff894f-b32f-46d8-9d9c-79c08c97478c-config-data\") pod \"nova-cell1-conductor-0\" (UID: \"f6ff894f-b32f-46d8-9d9c-79c08c97478c\") " pod="openstack/nova-cell1-conductor-0" Oct 01 15:21:58 crc kubenswrapper[4869]: I1001 15:21:58.176250 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Oct 01 15:21:58 crc kubenswrapper[4869]: I1001 15:21:58.177405 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-b5zxz\" (UniqueName: \"kubernetes.io/projected/f6ff894f-b32f-46d8-9d9c-79c08c97478c-kube-api-access-b5zxz\") pod \"nova-cell1-conductor-0\" (UID: \"f6ff894f-b32f-46d8-9d9c-79c08c97478c\") " pod="openstack/nova-cell1-conductor-0" Oct 01 15:21:58 crc kubenswrapper[4869]: I1001 15:21:58.199573 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Oct 01 15:21:58 crc kubenswrapper[4869]: I1001 15:21:58.200426 4869 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="98a767a6-6018-452e-acc7-83c2c5ae1e68" containerName="nova-metadata-metadata" containerID="cri-o://824d481e5db562318cf0b278f7dd15ab28f01bc7d530bc39e39accfe3a48f97c" gracePeriod=30 Oct 01 15:21:58 crc kubenswrapper[4869]: I1001 15:21:58.200935 4869 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="98a767a6-6018-452e-acc7-83c2c5ae1e68" containerName="nova-metadata-log" containerID="cri-o://aa2dd2a25a5f0b83290e77156934deef68902d9c3efb435e56ba88c5e422ee33" gracePeriod=30 Oct 01 15:21:58 crc kubenswrapper[4869]: I1001 15:21:58.213463 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-f548b88b9-jqj9f" Oct 01 15:21:58 crc kubenswrapper[4869]: I1001 15:21:58.214369 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/kube-state-metrics-0"] Oct 01 15:21:58 crc kubenswrapper[4869]: E1001 15:21:58.214674 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8fdb029f-29fa-47b7-9899-1fd9f14fb383" containerName="dnsmasq-dns" Oct 01 15:21:58 crc kubenswrapper[4869]: I1001 15:21:58.214688 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="8fdb029f-29fa-47b7-9899-1fd9f14fb383" containerName="dnsmasq-dns" Oct 01 15:21:58 crc kubenswrapper[4869]: E1001 15:21:58.214715 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8fdb029f-29fa-47b7-9899-1fd9f14fb383" containerName="init" Oct 01 15:21:58 crc kubenswrapper[4869]: I1001 15:21:58.214722 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="8fdb029f-29fa-47b7-9899-1fd9f14fb383" containerName="init" Oct 01 15:21:58 crc kubenswrapper[4869]: I1001 15:21:58.214887 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="8fdb029f-29fa-47b7-9899-1fd9f14fb383" containerName="dnsmasq-dns" Oct 01 15:21:58 crc kubenswrapper[4869]: I1001 15:21:58.215470 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Oct 01 15:21:58 crc kubenswrapper[4869]: I1001 15:21:58.217800 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"kube-state-metrics-tls-config" Oct 01 15:21:58 crc kubenswrapper[4869]: I1001 15:21:58.217953 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-kube-state-metrics-svc" Oct 01 15:21:58 crc kubenswrapper[4869]: I1001 15:21:58.224114 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Oct 01 15:21:58 crc kubenswrapper[4869]: I1001 15:21:58.224193 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Oct 01 15:21:58 crc kubenswrapper[4869]: I1001 15:21:58.224444 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/kube-state-metrics-0"] Oct 01 15:21:58 crc kubenswrapper[4869]: I1001 15:21:58.251108 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-state-metrics-tls-config\" (UniqueName: \"kubernetes.io/secret/43f59ffb-9d2a-48b9-a6b4-44ca953e1314-kube-state-metrics-tls-config\") pod \"kube-state-metrics-0\" (UID: \"43f59ffb-9d2a-48b9-a6b4-44ca953e1314\") " pod="openstack/kube-state-metrics-0" Oct 01 15:21:58 crc kubenswrapper[4869]: I1001 15:21:58.251646 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-state-metrics-tls-certs\" (UniqueName: \"kubernetes.io/secret/43f59ffb-9d2a-48b9-a6b4-44ca953e1314-kube-state-metrics-tls-certs\") pod \"kube-state-metrics-0\" (UID: \"43f59ffb-9d2a-48b9-a6b4-44ca953e1314\") " pod="openstack/kube-state-metrics-0" Oct 01 15:21:58 crc kubenswrapper[4869]: I1001 15:21:58.251791 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/43f59ffb-9d2a-48b9-a6b4-44ca953e1314-combined-ca-bundle\") pod \"kube-state-metrics-0\" (UID: \"43f59ffb-9d2a-48b9-a6b4-44ca953e1314\") " pod="openstack/kube-state-metrics-0" Oct 01 15:21:58 crc kubenswrapper[4869]: I1001 15:21:58.251993 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cdx4h\" (UniqueName: \"kubernetes.io/projected/43f59ffb-9d2a-48b9-a6b4-44ca953e1314-kube-api-access-cdx4h\") pod \"kube-state-metrics-0\" (UID: \"43f59ffb-9d2a-48b9-a6b4-44ca953e1314\") " pod="openstack/kube-state-metrics-0" Oct 01 15:21:58 crc kubenswrapper[4869]: I1001 15:21:58.349967 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-0" Oct 01 15:21:58 crc kubenswrapper[4869]: I1001 15:21:58.364657 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/8fdb029f-29fa-47b7-9899-1fd9f14fb383-ovsdbserver-nb\") pod \"8fdb029f-29fa-47b7-9899-1fd9f14fb383\" (UID: \"8fdb029f-29fa-47b7-9899-1fd9f14fb383\") " Oct 01 15:21:58 crc kubenswrapper[4869]: I1001 15:21:58.364752 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x64zb\" (UniqueName: \"kubernetes.io/projected/8fdb029f-29fa-47b7-9899-1fd9f14fb383-kube-api-access-x64zb\") pod \"8fdb029f-29fa-47b7-9899-1fd9f14fb383\" (UID: \"8fdb029f-29fa-47b7-9899-1fd9f14fb383\") " Oct 01 15:21:58 crc kubenswrapper[4869]: I1001 15:21:58.364812 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/8fdb029f-29fa-47b7-9899-1fd9f14fb383-dns-svc\") pod \"8fdb029f-29fa-47b7-9899-1fd9f14fb383\" (UID: \"8fdb029f-29fa-47b7-9899-1fd9f14fb383\") " Oct 01 15:21:58 crc kubenswrapper[4869]: I1001 15:21:58.364836 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/8fdb029f-29fa-47b7-9899-1fd9f14fb383-ovsdbserver-sb\") pod \"8fdb029f-29fa-47b7-9899-1fd9f14fb383\" (UID: \"8fdb029f-29fa-47b7-9899-1fd9f14fb383\") " Oct 01 15:21:58 crc kubenswrapper[4869]: I1001 15:21:58.364928 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8fdb029f-29fa-47b7-9899-1fd9f14fb383-config\") pod \"8fdb029f-29fa-47b7-9899-1fd9f14fb383\" (UID: \"8fdb029f-29fa-47b7-9899-1fd9f14fb383\") " Oct 01 15:21:58 crc kubenswrapper[4869]: I1001 15:21:58.365355 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-state-metrics-tls-config\" (UniqueName: \"kubernetes.io/secret/43f59ffb-9d2a-48b9-a6b4-44ca953e1314-kube-state-metrics-tls-config\") pod \"kube-state-metrics-0\" (UID: \"43f59ffb-9d2a-48b9-a6b4-44ca953e1314\") " pod="openstack/kube-state-metrics-0" Oct 01 15:21:58 crc kubenswrapper[4869]: I1001 15:21:58.365439 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-state-metrics-tls-certs\" (UniqueName: \"kubernetes.io/secret/43f59ffb-9d2a-48b9-a6b4-44ca953e1314-kube-state-metrics-tls-certs\") pod \"kube-state-metrics-0\" (UID: \"43f59ffb-9d2a-48b9-a6b4-44ca953e1314\") " pod="openstack/kube-state-metrics-0" Oct 01 15:21:58 crc kubenswrapper[4869]: I1001 15:21:58.365466 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/43f59ffb-9d2a-48b9-a6b4-44ca953e1314-combined-ca-bundle\") pod \"kube-state-metrics-0\" (UID: \"43f59ffb-9d2a-48b9-a6b4-44ca953e1314\") " pod="openstack/kube-state-metrics-0" Oct 01 15:21:58 crc kubenswrapper[4869]: I1001 15:21:58.365508 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cdx4h\" (UniqueName: \"kubernetes.io/projected/43f59ffb-9d2a-48b9-a6b4-44ca953e1314-kube-api-access-cdx4h\") pod \"kube-state-metrics-0\" (UID: \"43f59ffb-9d2a-48b9-a6b4-44ca953e1314\") " pod="openstack/kube-state-metrics-0" Oct 01 15:21:58 crc kubenswrapper[4869]: I1001 15:21:58.374390 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8fdb029f-29fa-47b7-9899-1fd9f14fb383-kube-api-access-x64zb" (OuterVolumeSpecName: "kube-api-access-x64zb") pod "8fdb029f-29fa-47b7-9899-1fd9f14fb383" (UID: "8fdb029f-29fa-47b7-9899-1fd9f14fb383"). InnerVolumeSpecName "kube-api-access-x64zb". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 15:21:58 crc kubenswrapper[4869]: I1001 15:21:58.376832 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-state-metrics-tls-certs\" (UniqueName: \"kubernetes.io/secret/43f59ffb-9d2a-48b9-a6b4-44ca953e1314-kube-state-metrics-tls-certs\") pod \"kube-state-metrics-0\" (UID: \"43f59ffb-9d2a-48b9-a6b4-44ca953e1314\") " pod="openstack/kube-state-metrics-0" Oct 01 15:21:58 crc kubenswrapper[4869]: I1001 15:21:58.383605 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-state-metrics-tls-config\" (UniqueName: \"kubernetes.io/secret/43f59ffb-9d2a-48b9-a6b4-44ca953e1314-kube-state-metrics-tls-config\") pod \"kube-state-metrics-0\" (UID: \"43f59ffb-9d2a-48b9-a6b4-44ca953e1314\") " pod="openstack/kube-state-metrics-0" Oct 01 15:21:58 crc kubenswrapper[4869]: I1001 15:21:58.388919 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/43f59ffb-9d2a-48b9-a6b4-44ca953e1314-combined-ca-bundle\") pod \"kube-state-metrics-0\" (UID: \"43f59ffb-9d2a-48b9-a6b4-44ca953e1314\") " pod="openstack/kube-state-metrics-0" Oct 01 15:21:58 crc kubenswrapper[4869]: I1001 15:21:58.396098 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cdx4h\" (UniqueName: \"kubernetes.io/projected/43f59ffb-9d2a-48b9-a6b4-44ca953e1314-kube-api-access-cdx4h\") pod \"kube-state-metrics-0\" (UID: \"43f59ffb-9d2a-48b9-a6b4-44ca953e1314\") " pod="openstack/kube-state-metrics-0" Oct 01 15:21:58 crc kubenswrapper[4869]: I1001 15:21:58.427001 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8fdb029f-29fa-47b7-9899-1fd9f14fb383-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "8fdb029f-29fa-47b7-9899-1fd9f14fb383" (UID: "8fdb029f-29fa-47b7-9899-1fd9f14fb383"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 15:21:58 crc kubenswrapper[4869]: I1001 15:21:58.433219 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8fdb029f-29fa-47b7-9899-1fd9f14fb383-config" (OuterVolumeSpecName: "config") pod "8fdb029f-29fa-47b7-9899-1fd9f14fb383" (UID: "8fdb029f-29fa-47b7-9899-1fd9f14fb383"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 15:21:58 crc kubenswrapper[4869]: I1001 15:21:58.448451 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8fdb029f-29fa-47b7-9899-1fd9f14fb383-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "8fdb029f-29fa-47b7-9899-1fd9f14fb383" (UID: "8fdb029f-29fa-47b7-9899-1fd9f14fb383"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 15:21:58 crc kubenswrapper[4869]: I1001 15:21:58.454572 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8fdb029f-29fa-47b7-9899-1fd9f14fb383-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "8fdb029f-29fa-47b7-9899-1fd9f14fb383" (UID: "8fdb029f-29fa-47b7-9899-1fd9f14fb383"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 15:21:58 crc kubenswrapper[4869]: I1001 15:21:58.467463 4869 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/8fdb029f-29fa-47b7-9899-1fd9f14fb383-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Oct 01 15:21:58 crc kubenswrapper[4869]: I1001 15:21:58.467493 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x64zb\" (UniqueName: \"kubernetes.io/projected/8fdb029f-29fa-47b7-9899-1fd9f14fb383-kube-api-access-x64zb\") on node \"crc\" DevicePath \"\"" Oct 01 15:21:58 crc kubenswrapper[4869]: I1001 15:21:58.467508 4869 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/8fdb029f-29fa-47b7-9899-1fd9f14fb383-dns-svc\") on node \"crc\" DevicePath \"\"" Oct 01 15:21:58 crc kubenswrapper[4869]: I1001 15:21:58.467520 4869 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/8fdb029f-29fa-47b7-9899-1fd9f14fb383-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Oct 01 15:21:58 crc kubenswrapper[4869]: I1001 15:21:58.467531 4869 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8fdb029f-29fa-47b7-9899-1fd9f14fb383-config\") on node \"crc\" DevicePath \"\"" Oct 01 15:21:58 crc kubenswrapper[4869]: I1001 15:21:58.534536 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Oct 01 15:21:58 crc kubenswrapper[4869]: I1001 15:21:58.860771 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Oct 01 15:21:58 crc kubenswrapper[4869]: I1001 15:21:58.880715 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-0"] Oct 01 15:21:58 crc kubenswrapper[4869]: I1001 15:21:58.881216 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/98a767a6-6018-452e-acc7-83c2c5ae1e68-combined-ca-bundle\") pod \"98a767a6-6018-452e-acc7-83c2c5ae1e68\" (UID: \"98a767a6-6018-452e-acc7-83c2c5ae1e68\") " Oct 01 15:21:58 crc kubenswrapper[4869]: I1001 15:21:58.881295 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/98a767a6-6018-452e-acc7-83c2c5ae1e68-nova-metadata-tls-certs\") pod \"98a767a6-6018-452e-acc7-83c2c5ae1e68\" (UID: \"98a767a6-6018-452e-acc7-83c2c5ae1e68\") " Oct 01 15:21:58 crc kubenswrapper[4869]: I1001 15:21:58.881515 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/98a767a6-6018-452e-acc7-83c2c5ae1e68-config-data\") pod \"98a767a6-6018-452e-acc7-83c2c5ae1e68\" (UID: \"98a767a6-6018-452e-acc7-83c2c5ae1e68\") " Oct 01 15:21:58 crc kubenswrapper[4869]: I1001 15:21:58.881552 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-thvpg\" (UniqueName: \"kubernetes.io/projected/98a767a6-6018-452e-acc7-83c2c5ae1e68-kube-api-access-thvpg\") pod \"98a767a6-6018-452e-acc7-83c2c5ae1e68\" (UID: \"98a767a6-6018-452e-acc7-83c2c5ae1e68\") " Oct 01 15:21:58 crc kubenswrapper[4869]: I1001 15:21:58.881627 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/98a767a6-6018-452e-acc7-83c2c5ae1e68-logs\") pod \"98a767a6-6018-452e-acc7-83c2c5ae1e68\" (UID: \"98a767a6-6018-452e-acc7-83c2c5ae1e68\") " Oct 01 15:21:58 crc kubenswrapper[4869]: I1001 15:21:58.882311 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/98a767a6-6018-452e-acc7-83c2c5ae1e68-logs" (OuterVolumeSpecName: "logs") pod "98a767a6-6018-452e-acc7-83c2c5ae1e68" (UID: "98a767a6-6018-452e-acc7-83c2c5ae1e68"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 15:21:58 crc kubenswrapper[4869]: W1001 15:21:58.886149 4869 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podf6ff894f_b32f_46d8_9d9c_79c08c97478c.slice/crio-568edf740f4ec3d13e595ef7ad205a5de6ecc8b62dc4660fbbb52750999c526b WatchSource:0}: Error finding container 568edf740f4ec3d13e595ef7ad205a5de6ecc8b62dc4660fbbb52750999c526b: Status 404 returned error can't find the container with id 568edf740f4ec3d13e595ef7ad205a5de6ecc8b62dc4660fbbb52750999c526b Oct 01 15:21:58 crc kubenswrapper[4869]: I1001 15:21:58.908643 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/98a767a6-6018-452e-acc7-83c2c5ae1e68-kube-api-access-thvpg" (OuterVolumeSpecName: "kube-api-access-thvpg") pod "98a767a6-6018-452e-acc7-83c2c5ae1e68" (UID: "98a767a6-6018-452e-acc7-83c2c5ae1e68"). InnerVolumeSpecName "kube-api-access-thvpg". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 15:21:58 crc kubenswrapper[4869]: I1001 15:21:58.921426 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/98a767a6-6018-452e-acc7-83c2c5ae1e68-config-data" (OuterVolumeSpecName: "config-data") pod "98a767a6-6018-452e-acc7-83c2c5ae1e68" (UID: "98a767a6-6018-452e-acc7-83c2c5ae1e68"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 15:21:58 crc kubenswrapper[4869]: I1001 15:21:58.955332 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/98a767a6-6018-452e-acc7-83c2c5ae1e68-nova-metadata-tls-certs" (OuterVolumeSpecName: "nova-metadata-tls-certs") pod "98a767a6-6018-452e-acc7-83c2c5ae1e68" (UID: "98a767a6-6018-452e-acc7-83c2c5ae1e68"). InnerVolumeSpecName "nova-metadata-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 15:21:58 crc kubenswrapper[4869]: I1001 15:21:58.969854 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/98a767a6-6018-452e-acc7-83c2c5ae1e68-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "98a767a6-6018-452e-acc7-83c2c5ae1e68" (UID: "98a767a6-6018-452e-acc7-83c2c5ae1e68"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 15:21:58 crc kubenswrapper[4869]: I1001 15:21:58.976910 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-f548b88b9-jqj9f" event={"ID":"8fdb029f-29fa-47b7-9899-1fd9f14fb383","Type":"ContainerDied","Data":"a4f6fd6d18b03449fd6ebce38c353704f7f87350c7bda61d312945cd69244f60"} Oct 01 15:21:58 crc kubenswrapper[4869]: I1001 15:21:58.976964 4869 scope.go:117] "RemoveContainer" containerID="70e0830c58a3ddf1e7d4ee66671f11e4a25d780d6063de9893c2c07ebb04ffc0" Oct 01 15:21:58 crc kubenswrapper[4869]: I1001 15:21:58.977077 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-f548b88b9-jqj9f" Oct 01 15:21:58 crc kubenswrapper[4869]: I1001 15:21:58.983723 4869 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/98a767a6-6018-452e-acc7-83c2c5ae1e68-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 01 15:21:58 crc kubenswrapper[4869]: I1001 15:21:58.983749 4869 reconciler_common.go:293] "Volume detached for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/98a767a6-6018-452e-acc7-83c2c5ae1e68-nova-metadata-tls-certs\") on node \"crc\" DevicePath \"\"" Oct 01 15:21:58 crc kubenswrapper[4869]: I1001 15:21:58.983761 4869 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/98a767a6-6018-452e-acc7-83c2c5ae1e68-config-data\") on node \"crc\" DevicePath \"\"" Oct 01 15:21:58 crc kubenswrapper[4869]: I1001 15:21:58.983772 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-thvpg\" (UniqueName: \"kubernetes.io/projected/98a767a6-6018-452e-acc7-83c2c5ae1e68-kube-api-access-thvpg\") on node \"crc\" DevicePath \"\"" Oct 01 15:21:58 crc kubenswrapper[4869]: I1001 15:21:58.983772 4869 generic.go:334] "Generic (PLEG): container finished" podID="5ede1f51-a56a-4ac8-ba8b-6963803cb869" containerID="9c7eaec79522726d91645e0f278f0c7e1bc6ccd4fa3ef331cc40a1c9238264b4" exitCode=0 Oct 01 15:21:58 crc kubenswrapper[4869]: I1001 15:21:58.983782 4869 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/98a767a6-6018-452e-acc7-83c2c5ae1e68-logs\") on node \"crc\" DevicePath \"\"" Oct 01 15:21:58 crc kubenswrapper[4869]: I1001 15:21:58.983801 4869 generic.go:334] "Generic (PLEG): container finished" podID="5ede1f51-a56a-4ac8-ba8b-6963803cb869" containerID="2e28667845ee459879144661065b84c9ea1a9d3218470cc0665485c530f3e4c7" exitCode=2 Oct 01 15:21:58 crc kubenswrapper[4869]: I1001 15:21:58.983812 4869 generic.go:334] "Generic (PLEG): container finished" podID="5ede1f51-a56a-4ac8-ba8b-6963803cb869" containerID="ee67ced28f4ef58abd0ffbb7214b118c4b5b7ebf87b0bfa49a417b6076fe9eaa" exitCode=0 Oct 01 15:21:58 crc kubenswrapper[4869]: I1001 15:21:58.983874 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"5ede1f51-a56a-4ac8-ba8b-6963803cb869","Type":"ContainerDied","Data":"9c7eaec79522726d91645e0f278f0c7e1bc6ccd4fa3ef331cc40a1c9238264b4"} Oct 01 15:21:58 crc kubenswrapper[4869]: I1001 15:21:58.983906 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"5ede1f51-a56a-4ac8-ba8b-6963803cb869","Type":"ContainerDied","Data":"2e28667845ee459879144661065b84c9ea1a9d3218470cc0665485c530f3e4c7"} Oct 01 15:21:58 crc kubenswrapper[4869]: I1001 15:21:58.983920 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"5ede1f51-a56a-4ac8-ba8b-6963803cb869","Type":"ContainerDied","Data":"ee67ced28f4ef58abd0ffbb7214b118c4b5b7ebf87b0bfa49a417b6076fe9eaa"} Oct 01 15:21:58 crc kubenswrapper[4869]: I1001 15:21:58.986050 4869 generic.go:334] "Generic (PLEG): container finished" podID="8c2c4d71-1c1e-4627-8e29-c0ffd9862e25" containerID="6708f67a419d26f377783bf3934dd3f13f9047abdebda9ca227f52ec8a2d8ab6" exitCode=143 Oct 01 15:21:58 crc kubenswrapper[4869]: I1001 15:21:58.986097 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"8c2c4d71-1c1e-4627-8e29-c0ffd9862e25","Type":"ContainerDied","Data":"6708f67a419d26f377783bf3934dd3f13f9047abdebda9ca227f52ec8a2d8ab6"} Oct 01 15:21:58 crc kubenswrapper[4869]: I1001 15:21:58.988105 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-0" event={"ID":"f6ff894f-b32f-46d8-9d9c-79c08c97478c","Type":"ContainerStarted","Data":"568edf740f4ec3d13e595ef7ad205a5de6ecc8b62dc4660fbbb52750999c526b"} Oct 01 15:21:58 crc kubenswrapper[4869]: I1001 15:21:58.989774 4869 generic.go:334] "Generic (PLEG): container finished" podID="98a767a6-6018-452e-acc7-83c2c5ae1e68" containerID="824d481e5db562318cf0b278f7dd15ab28f01bc7d530bc39e39accfe3a48f97c" exitCode=0 Oct 01 15:21:58 crc kubenswrapper[4869]: I1001 15:21:58.989796 4869 generic.go:334] "Generic (PLEG): container finished" podID="98a767a6-6018-452e-acc7-83c2c5ae1e68" containerID="aa2dd2a25a5f0b83290e77156934deef68902d9c3efb435e56ba88c5e422ee33" exitCode=143 Oct 01 15:21:58 crc kubenswrapper[4869]: I1001 15:21:58.990621 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Oct 01 15:21:58 crc kubenswrapper[4869]: I1001 15:21:58.992414 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"98a767a6-6018-452e-acc7-83c2c5ae1e68","Type":"ContainerDied","Data":"824d481e5db562318cf0b278f7dd15ab28f01bc7d530bc39e39accfe3a48f97c"} Oct 01 15:21:58 crc kubenswrapper[4869]: I1001 15:21:58.992443 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"98a767a6-6018-452e-acc7-83c2c5ae1e68","Type":"ContainerDied","Data":"aa2dd2a25a5f0b83290e77156934deef68902d9c3efb435e56ba88c5e422ee33"} Oct 01 15:21:58 crc kubenswrapper[4869]: I1001 15:21:58.992454 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"98a767a6-6018-452e-acc7-83c2c5ae1e68","Type":"ContainerDied","Data":"fccec572c063b2119d7a223de783ba9190ff2a543553da4e9bd8707cc60858f0"} Oct 01 15:21:59 crc kubenswrapper[4869]: I1001 15:21:59.019269 4869 scope.go:117] "RemoveContainer" containerID="14fb375b9e35c03786f11ad8329d185cc78f7f8ebb9896b2e665bf299a67ef22" Oct 01 15:21:59 crc kubenswrapper[4869]: I1001 15:21:59.058411 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/kube-state-metrics-0"] Oct 01 15:21:59 crc kubenswrapper[4869]: I1001 15:21:59.064560 4869 scope.go:117] "RemoveContainer" containerID="824d481e5db562318cf0b278f7dd15ab28f01bc7d530bc39e39accfe3a48f97c" Oct 01 15:21:59 crc kubenswrapper[4869]: I1001 15:21:59.065220 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-f548b88b9-jqj9f"] Oct 01 15:21:59 crc kubenswrapper[4869]: I1001 15:21:59.079373 4869 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-f548b88b9-jqj9f"] Oct 01 15:21:59 crc kubenswrapper[4869]: I1001 15:21:59.089427 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Oct 01 15:21:59 crc kubenswrapper[4869]: I1001 15:21:59.101304 4869 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-metadata-0"] Oct 01 15:21:59 crc kubenswrapper[4869]: I1001 15:21:59.101595 4869 scope.go:117] "RemoveContainer" containerID="aa2dd2a25a5f0b83290e77156934deef68902d9c3efb435e56ba88c5e422ee33" Oct 01 15:21:59 crc kubenswrapper[4869]: I1001 15:21:59.113733 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-metadata-0"] Oct 01 15:21:59 crc kubenswrapper[4869]: E1001 15:21:59.114181 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="98a767a6-6018-452e-acc7-83c2c5ae1e68" containerName="nova-metadata-metadata" Oct 01 15:21:59 crc kubenswrapper[4869]: I1001 15:21:59.114194 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="98a767a6-6018-452e-acc7-83c2c5ae1e68" containerName="nova-metadata-metadata" Oct 01 15:21:59 crc kubenswrapper[4869]: E1001 15:21:59.114231 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="98a767a6-6018-452e-acc7-83c2c5ae1e68" containerName="nova-metadata-log" Oct 01 15:21:59 crc kubenswrapper[4869]: I1001 15:21:59.114238 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="98a767a6-6018-452e-acc7-83c2c5ae1e68" containerName="nova-metadata-log" Oct 01 15:21:59 crc kubenswrapper[4869]: I1001 15:21:59.115702 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="98a767a6-6018-452e-acc7-83c2c5ae1e68" containerName="nova-metadata-metadata" Oct 01 15:21:59 crc kubenswrapper[4869]: I1001 15:21:59.115732 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="98a767a6-6018-452e-acc7-83c2c5ae1e68" containerName="nova-metadata-log" Oct 01 15:21:59 crc kubenswrapper[4869]: I1001 15:21:59.117057 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Oct 01 15:21:59 crc kubenswrapper[4869]: I1001 15:21:59.122541 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Oct 01 15:21:59 crc kubenswrapper[4869]: I1001 15:21:59.122656 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-config-data" Oct 01 15:21:59 crc kubenswrapper[4869]: I1001 15:21:59.122871 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-metadata-internal-svc" Oct 01 15:21:59 crc kubenswrapper[4869]: I1001 15:21:59.149630 4869 scope.go:117] "RemoveContainer" containerID="824d481e5db562318cf0b278f7dd15ab28f01bc7d530bc39e39accfe3a48f97c" Oct 01 15:21:59 crc kubenswrapper[4869]: E1001 15:21:59.150100 4869 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"824d481e5db562318cf0b278f7dd15ab28f01bc7d530bc39e39accfe3a48f97c\": container with ID starting with 824d481e5db562318cf0b278f7dd15ab28f01bc7d530bc39e39accfe3a48f97c not found: ID does not exist" containerID="824d481e5db562318cf0b278f7dd15ab28f01bc7d530bc39e39accfe3a48f97c" Oct 01 15:21:59 crc kubenswrapper[4869]: I1001 15:21:59.150135 4869 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"824d481e5db562318cf0b278f7dd15ab28f01bc7d530bc39e39accfe3a48f97c"} err="failed to get container status \"824d481e5db562318cf0b278f7dd15ab28f01bc7d530bc39e39accfe3a48f97c\": rpc error: code = NotFound desc = could not find container \"824d481e5db562318cf0b278f7dd15ab28f01bc7d530bc39e39accfe3a48f97c\": container with ID starting with 824d481e5db562318cf0b278f7dd15ab28f01bc7d530bc39e39accfe3a48f97c not found: ID does not exist" Oct 01 15:21:59 crc kubenswrapper[4869]: I1001 15:21:59.150156 4869 scope.go:117] "RemoveContainer" containerID="aa2dd2a25a5f0b83290e77156934deef68902d9c3efb435e56ba88c5e422ee33" Oct 01 15:21:59 crc kubenswrapper[4869]: E1001 15:21:59.150674 4869 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"aa2dd2a25a5f0b83290e77156934deef68902d9c3efb435e56ba88c5e422ee33\": container with ID starting with aa2dd2a25a5f0b83290e77156934deef68902d9c3efb435e56ba88c5e422ee33 not found: ID does not exist" containerID="aa2dd2a25a5f0b83290e77156934deef68902d9c3efb435e56ba88c5e422ee33" Oct 01 15:21:59 crc kubenswrapper[4869]: I1001 15:21:59.150703 4869 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"aa2dd2a25a5f0b83290e77156934deef68902d9c3efb435e56ba88c5e422ee33"} err="failed to get container status \"aa2dd2a25a5f0b83290e77156934deef68902d9c3efb435e56ba88c5e422ee33\": rpc error: code = NotFound desc = could not find container \"aa2dd2a25a5f0b83290e77156934deef68902d9c3efb435e56ba88c5e422ee33\": container with ID starting with aa2dd2a25a5f0b83290e77156934deef68902d9c3efb435e56ba88c5e422ee33 not found: ID does not exist" Oct 01 15:21:59 crc kubenswrapper[4869]: I1001 15:21:59.150723 4869 scope.go:117] "RemoveContainer" containerID="824d481e5db562318cf0b278f7dd15ab28f01bc7d530bc39e39accfe3a48f97c" Oct 01 15:21:59 crc kubenswrapper[4869]: I1001 15:21:59.150974 4869 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"824d481e5db562318cf0b278f7dd15ab28f01bc7d530bc39e39accfe3a48f97c"} err="failed to get container status \"824d481e5db562318cf0b278f7dd15ab28f01bc7d530bc39e39accfe3a48f97c\": rpc error: code = NotFound desc = could not find container \"824d481e5db562318cf0b278f7dd15ab28f01bc7d530bc39e39accfe3a48f97c\": container with ID starting with 824d481e5db562318cf0b278f7dd15ab28f01bc7d530bc39e39accfe3a48f97c not found: ID does not exist" Oct 01 15:21:59 crc kubenswrapper[4869]: I1001 15:21:59.150997 4869 scope.go:117] "RemoveContainer" containerID="aa2dd2a25a5f0b83290e77156934deef68902d9c3efb435e56ba88c5e422ee33" Oct 01 15:21:59 crc kubenswrapper[4869]: I1001 15:21:59.151581 4869 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"aa2dd2a25a5f0b83290e77156934deef68902d9c3efb435e56ba88c5e422ee33"} err="failed to get container status \"aa2dd2a25a5f0b83290e77156934deef68902d9c3efb435e56ba88c5e422ee33\": rpc error: code = NotFound desc = could not find container \"aa2dd2a25a5f0b83290e77156934deef68902d9c3efb435e56ba88c5e422ee33\": container with ID starting with aa2dd2a25a5f0b83290e77156934deef68902d9c3efb435e56ba88c5e422ee33 not found: ID does not exist" Oct 01 15:21:59 crc kubenswrapper[4869]: I1001 15:21:59.192566 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/38137ffc-97e2-4517-ac14-42bfc87df875-logs\") pod \"nova-metadata-0\" (UID: \"38137ffc-97e2-4517-ac14-42bfc87df875\") " pod="openstack/nova-metadata-0" Oct 01 15:21:59 crc kubenswrapper[4869]: I1001 15:21:59.192657 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/38137ffc-97e2-4517-ac14-42bfc87df875-config-data\") pod \"nova-metadata-0\" (UID: \"38137ffc-97e2-4517-ac14-42bfc87df875\") " pod="openstack/nova-metadata-0" Oct 01 15:21:59 crc kubenswrapper[4869]: I1001 15:21:59.192690 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/38137ffc-97e2-4517-ac14-42bfc87df875-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"38137ffc-97e2-4517-ac14-42bfc87df875\") " pod="openstack/nova-metadata-0" Oct 01 15:21:59 crc kubenswrapper[4869]: I1001 15:21:59.192726 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xjbdw\" (UniqueName: \"kubernetes.io/projected/38137ffc-97e2-4517-ac14-42bfc87df875-kube-api-access-xjbdw\") pod \"nova-metadata-0\" (UID: \"38137ffc-97e2-4517-ac14-42bfc87df875\") " pod="openstack/nova-metadata-0" Oct 01 15:21:59 crc kubenswrapper[4869]: I1001 15:21:59.192747 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/38137ffc-97e2-4517-ac14-42bfc87df875-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"38137ffc-97e2-4517-ac14-42bfc87df875\") " pod="openstack/nova-metadata-0" Oct 01 15:21:59 crc kubenswrapper[4869]: I1001 15:21:59.294299 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/38137ffc-97e2-4517-ac14-42bfc87df875-logs\") pod \"nova-metadata-0\" (UID: \"38137ffc-97e2-4517-ac14-42bfc87df875\") " pod="openstack/nova-metadata-0" Oct 01 15:21:59 crc kubenswrapper[4869]: I1001 15:21:59.294393 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/38137ffc-97e2-4517-ac14-42bfc87df875-config-data\") pod \"nova-metadata-0\" (UID: \"38137ffc-97e2-4517-ac14-42bfc87df875\") " pod="openstack/nova-metadata-0" Oct 01 15:21:59 crc kubenswrapper[4869]: I1001 15:21:59.294432 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/38137ffc-97e2-4517-ac14-42bfc87df875-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"38137ffc-97e2-4517-ac14-42bfc87df875\") " pod="openstack/nova-metadata-0" Oct 01 15:21:59 crc kubenswrapper[4869]: I1001 15:21:59.294471 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xjbdw\" (UniqueName: \"kubernetes.io/projected/38137ffc-97e2-4517-ac14-42bfc87df875-kube-api-access-xjbdw\") pod \"nova-metadata-0\" (UID: \"38137ffc-97e2-4517-ac14-42bfc87df875\") " pod="openstack/nova-metadata-0" Oct 01 15:21:59 crc kubenswrapper[4869]: I1001 15:21:59.294492 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/38137ffc-97e2-4517-ac14-42bfc87df875-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"38137ffc-97e2-4517-ac14-42bfc87df875\") " pod="openstack/nova-metadata-0" Oct 01 15:21:59 crc kubenswrapper[4869]: I1001 15:21:59.294799 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/38137ffc-97e2-4517-ac14-42bfc87df875-logs\") pod \"nova-metadata-0\" (UID: \"38137ffc-97e2-4517-ac14-42bfc87df875\") " pod="openstack/nova-metadata-0" Oct 01 15:21:59 crc kubenswrapper[4869]: I1001 15:21:59.299194 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/38137ffc-97e2-4517-ac14-42bfc87df875-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"38137ffc-97e2-4517-ac14-42bfc87df875\") " pod="openstack/nova-metadata-0" Oct 01 15:21:59 crc kubenswrapper[4869]: I1001 15:21:59.299878 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/38137ffc-97e2-4517-ac14-42bfc87df875-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"38137ffc-97e2-4517-ac14-42bfc87df875\") " pod="openstack/nova-metadata-0" Oct 01 15:21:59 crc kubenswrapper[4869]: I1001 15:21:59.300234 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/38137ffc-97e2-4517-ac14-42bfc87df875-config-data\") pod \"nova-metadata-0\" (UID: \"38137ffc-97e2-4517-ac14-42bfc87df875\") " pod="openstack/nova-metadata-0" Oct 01 15:21:59 crc kubenswrapper[4869]: I1001 15:21:59.314759 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xjbdw\" (UniqueName: \"kubernetes.io/projected/38137ffc-97e2-4517-ac14-42bfc87df875-kube-api-access-xjbdw\") pod \"nova-metadata-0\" (UID: \"38137ffc-97e2-4517-ac14-42bfc87df875\") " pod="openstack/nova-metadata-0" Oct 01 15:21:59 crc kubenswrapper[4869]: I1001 15:21:59.450012 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Oct 01 15:21:59 crc kubenswrapper[4869]: I1001 15:21:59.595196 4869 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="589bdd72-e961-4bbd-bb04-bcff96363cab" path="/var/lib/kubelet/pods/589bdd72-e961-4bbd-bb04-bcff96363cab/volumes" Oct 01 15:21:59 crc kubenswrapper[4869]: I1001 15:21:59.595906 4869 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8fdb029f-29fa-47b7-9899-1fd9f14fb383" path="/var/lib/kubelet/pods/8fdb029f-29fa-47b7-9899-1fd9f14fb383/volumes" Oct 01 15:21:59 crc kubenswrapper[4869]: I1001 15:21:59.596880 4869 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="98a767a6-6018-452e-acc7-83c2c5ae1e68" path="/var/lib/kubelet/pods/98a767a6-6018-452e-acc7-83c2c5ae1e68/volumes" Oct 01 15:21:59 crc kubenswrapper[4869]: I1001 15:21:59.926352 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Oct 01 15:21:59 crc kubenswrapper[4869]: W1001 15:21:59.942173 4869 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod38137ffc_97e2_4517_ac14_42bfc87df875.slice/crio-c9bbde8a54ec6054719650948ad4e94c0d26195fe97a2411f1d973ead3cd9d93 WatchSource:0}: Error finding container c9bbde8a54ec6054719650948ad4e94c0d26195fe97a2411f1d973ead3cd9d93: Status 404 returned error can't find the container with id c9bbde8a54ec6054719650948ad4e94c0d26195fe97a2411f1d973ead3cd9d93 Oct 01 15:22:00 crc kubenswrapper[4869]: I1001 15:22:00.008676 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"43f59ffb-9d2a-48b9-a6b4-44ca953e1314","Type":"ContainerStarted","Data":"95442ec10eca0a989f316cd5639b8d3d73a56a018660bc5a59601165267e7b1a"} Oct 01 15:22:00 crc kubenswrapper[4869]: I1001 15:22:00.008717 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"43f59ffb-9d2a-48b9-a6b4-44ca953e1314","Type":"ContainerStarted","Data":"14b9f95195a5027eb1e64ac1a548b277410e1743d39f812ded5a4858e582865e"} Oct 01 15:22:00 crc kubenswrapper[4869]: I1001 15:22:00.008750 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/kube-state-metrics-0" Oct 01 15:22:00 crc kubenswrapper[4869]: I1001 15:22:00.010939 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"38137ffc-97e2-4517-ac14-42bfc87df875","Type":"ContainerStarted","Data":"c9bbde8a54ec6054719650948ad4e94c0d26195fe97a2411f1d973ead3cd9d93"} Oct 01 15:22:00 crc kubenswrapper[4869]: I1001 15:22:00.012889 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-0" event={"ID":"f6ff894f-b32f-46d8-9d9c-79c08c97478c","Type":"ContainerStarted","Data":"98c383bc860a9ab5dee9b262fd6bc01fc9c7f50c4fd484ffe91375a89ffd4284"} Oct 01 15:22:00 crc kubenswrapper[4869]: I1001 15:22:00.012994 4869 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-scheduler-0" podUID="34744dc3-77cb-4347-b1ad-fbce05019042" containerName="nova-scheduler-scheduler" containerID="cri-o://a2e13a113629984ae09ec5f542825035d53a85440900d718364efe8dda104458" gracePeriod=30 Oct 01 15:22:00 crc kubenswrapper[4869]: I1001 15:22:00.043993 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/kube-state-metrics-0" podStartSLOduration=1.683637925 podStartE2EDuration="2.043976766s" podCreationTimestamp="2025-10-01 15:21:58 +0000 UTC" firstStartedPulling="2025-10-01 15:21:59.074612703 +0000 UTC m=+1028.221455819" lastFinishedPulling="2025-10-01 15:21:59.434951544 +0000 UTC m=+1028.581794660" observedRunningTime="2025-10-01 15:22:00.024246088 +0000 UTC m=+1029.171089214" watchObservedRunningTime="2025-10-01 15:22:00.043976766 +0000 UTC m=+1029.190819982" Oct 01 15:22:00 crc kubenswrapper[4869]: I1001 15:22:00.051273 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-conductor-0" podStartSLOduration=3.05124537 podStartE2EDuration="3.05124537s" podCreationTimestamp="2025-10-01 15:21:57 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-01 15:22:00.037313748 +0000 UTC m=+1029.184156874" watchObservedRunningTime="2025-10-01 15:22:00.05124537 +0000 UTC m=+1029.198088506" Oct 01 15:22:01 crc kubenswrapper[4869]: I1001 15:22:01.027360 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"38137ffc-97e2-4517-ac14-42bfc87df875","Type":"ContainerStarted","Data":"6c1a4147750e3cb22d93d1cdad0d418617d251192c68dcfcc455e4556ee1e139"} Oct 01 15:22:01 crc kubenswrapper[4869]: I1001 15:22:01.027702 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"38137ffc-97e2-4517-ac14-42bfc87df875","Type":"ContainerStarted","Data":"1000a8aaedaaf024cc396a8c5f36dd0099d653e970e541963855eab7a6db8ecf"} Oct 01 15:22:01 crc kubenswrapper[4869]: I1001 15:22:01.027744 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell1-conductor-0" Oct 01 15:22:01 crc kubenswrapper[4869]: I1001 15:22:01.056552 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-metadata-0" podStartSLOduration=2.05652717 podStartE2EDuration="2.05652717s" podCreationTimestamp="2025-10-01 15:21:59 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-01 15:22:01.047222555 +0000 UTC m=+1030.194065671" watchObservedRunningTime="2025-10-01 15:22:01.05652717 +0000 UTC m=+1030.203370286" Oct 01 15:22:01 crc kubenswrapper[4869]: I1001 15:22:01.667401 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 01 15:22:01 crc kubenswrapper[4869]: I1001 15:22:01.755887 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5ede1f51-a56a-4ac8-ba8b-6963803cb869-config-data\") pod \"5ede1f51-a56a-4ac8-ba8b-6963803cb869\" (UID: \"5ede1f51-a56a-4ac8-ba8b-6963803cb869\") " Oct 01 15:22:01 crc kubenswrapper[4869]: I1001 15:22:01.756208 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/5ede1f51-a56a-4ac8-ba8b-6963803cb869-sg-core-conf-yaml\") pod \"5ede1f51-a56a-4ac8-ba8b-6963803cb869\" (UID: \"5ede1f51-a56a-4ac8-ba8b-6963803cb869\") " Oct 01 15:22:01 crc kubenswrapper[4869]: I1001 15:22:01.756396 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/5ede1f51-a56a-4ac8-ba8b-6963803cb869-log-httpd\") pod \"5ede1f51-a56a-4ac8-ba8b-6963803cb869\" (UID: \"5ede1f51-a56a-4ac8-ba8b-6963803cb869\") " Oct 01 15:22:01 crc kubenswrapper[4869]: I1001 15:22:01.756440 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5ede1f51-a56a-4ac8-ba8b-6963803cb869-combined-ca-bundle\") pod \"5ede1f51-a56a-4ac8-ba8b-6963803cb869\" (UID: \"5ede1f51-a56a-4ac8-ba8b-6963803cb869\") " Oct 01 15:22:01 crc kubenswrapper[4869]: I1001 15:22:01.756520 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5ede1f51-a56a-4ac8-ba8b-6963803cb869-scripts\") pod \"5ede1f51-a56a-4ac8-ba8b-6963803cb869\" (UID: \"5ede1f51-a56a-4ac8-ba8b-6963803cb869\") " Oct 01 15:22:01 crc kubenswrapper[4869]: I1001 15:22:01.756756 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bngb8\" (UniqueName: \"kubernetes.io/projected/5ede1f51-a56a-4ac8-ba8b-6963803cb869-kube-api-access-bngb8\") pod \"5ede1f51-a56a-4ac8-ba8b-6963803cb869\" (UID: \"5ede1f51-a56a-4ac8-ba8b-6963803cb869\") " Oct 01 15:22:01 crc kubenswrapper[4869]: I1001 15:22:01.756873 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/5ede1f51-a56a-4ac8-ba8b-6963803cb869-run-httpd\") pod \"5ede1f51-a56a-4ac8-ba8b-6963803cb869\" (UID: \"5ede1f51-a56a-4ac8-ba8b-6963803cb869\") " Oct 01 15:22:01 crc kubenswrapper[4869]: I1001 15:22:01.756975 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5ede1f51-a56a-4ac8-ba8b-6963803cb869-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "5ede1f51-a56a-4ac8-ba8b-6963803cb869" (UID: "5ede1f51-a56a-4ac8-ba8b-6963803cb869"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 15:22:01 crc kubenswrapper[4869]: I1001 15:22:01.757170 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5ede1f51-a56a-4ac8-ba8b-6963803cb869-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "5ede1f51-a56a-4ac8-ba8b-6963803cb869" (UID: "5ede1f51-a56a-4ac8-ba8b-6963803cb869"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 15:22:01 crc kubenswrapper[4869]: I1001 15:22:01.757875 4869 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/5ede1f51-a56a-4ac8-ba8b-6963803cb869-run-httpd\") on node \"crc\" DevicePath \"\"" Oct 01 15:22:01 crc kubenswrapper[4869]: I1001 15:22:01.757906 4869 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/5ede1f51-a56a-4ac8-ba8b-6963803cb869-log-httpd\") on node \"crc\" DevicePath \"\"" Oct 01 15:22:01 crc kubenswrapper[4869]: I1001 15:22:01.766558 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5ede1f51-a56a-4ac8-ba8b-6963803cb869-kube-api-access-bngb8" (OuterVolumeSpecName: "kube-api-access-bngb8") pod "5ede1f51-a56a-4ac8-ba8b-6963803cb869" (UID: "5ede1f51-a56a-4ac8-ba8b-6963803cb869"). InnerVolumeSpecName "kube-api-access-bngb8". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 15:22:01 crc kubenswrapper[4869]: I1001 15:22:01.778972 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5ede1f51-a56a-4ac8-ba8b-6963803cb869-scripts" (OuterVolumeSpecName: "scripts") pod "5ede1f51-a56a-4ac8-ba8b-6963803cb869" (UID: "5ede1f51-a56a-4ac8-ba8b-6963803cb869"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 15:22:01 crc kubenswrapper[4869]: I1001 15:22:01.816494 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5ede1f51-a56a-4ac8-ba8b-6963803cb869-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "5ede1f51-a56a-4ac8-ba8b-6963803cb869" (UID: "5ede1f51-a56a-4ac8-ba8b-6963803cb869"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 15:22:01 crc kubenswrapper[4869]: I1001 15:22:01.848064 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5ede1f51-a56a-4ac8-ba8b-6963803cb869-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "5ede1f51-a56a-4ac8-ba8b-6963803cb869" (UID: "5ede1f51-a56a-4ac8-ba8b-6963803cb869"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 15:22:01 crc kubenswrapper[4869]: I1001 15:22:01.859347 4869 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/5ede1f51-a56a-4ac8-ba8b-6963803cb869-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Oct 01 15:22:01 crc kubenswrapper[4869]: I1001 15:22:01.859371 4869 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5ede1f51-a56a-4ac8-ba8b-6963803cb869-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 01 15:22:01 crc kubenswrapper[4869]: I1001 15:22:01.859380 4869 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5ede1f51-a56a-4ac8-ba8b-6963803cb869-scripts\") on node \"crc\" DevicePath \"\"" Oct 01 15:22:01 crc kubenswrapper[4869]: I1001 15:22:01.859389 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bngb8\" (UniqueName: \"kubernetes.io/projected/5ede1f51-a56a-4ac8-ba8b-6963803cb869-kube-api-access-bngb8\") on node \"crc\" DevicePath \"\"" Oct 01 15:22:01 crc kubenswrapper[4869]: I1001 15:22:01.869224 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5ede1f51-a56a-4ac8-ba8b-6963803cb869-config-data" (OuterVolumeSpecName: "config-data") pod "5ede1f51-a56a-4ac8-ba8b-6963803cb869" (UID: "5ede1f51-a56a-4ac8-ba8b-6963803cb869"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 15:22:01 crc kubenswrapper[4869]: I1001 15:22:01.961773 4869 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5ede1f51-a56a-4ac8-ba8b-6963803cb869-config-data\") on node \"crc\" DevicePath \"\"" Oct 01 15:22:02 crc kubenswrapper[4869]: I1001 15:22:02.042150 4869 generic.go:334] "Generic (PLEG): container finished" podID="5ede1f51-a56a-4ac8-ba8b-6963803cb869" containerID="c5a524ecf5caf9bdd7cfd14e0c7b5533b2b4f50b3b3c624bdb38f0b60ac369c4" exitCode=0 Oct 01 15:22:02 crc kubenswrapper[4869]: I1001 15:22:02.042230 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"5ede1f51-a56a-4ac8-ba8b-6963803cb869","Type":"ContainerDied","Data":"c5a524ecf5caf9bdd7cfd14e0c7b5533b2b4f50b3b3c624bdb38f0b60ac369c4"} Oct 01 15:22:02 crc kubenswrapper[4869]: I1001 15:22:02.042283 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"5ede1f51-a56a-4ac8-ba8b-6963803cb869","Type":"ContainerDied","Data":"28bb802fbec15c7baf1cc03344ed450877e1e8071106e0f5e195e01464e60b61"} Oct 01 15:22:02 crc kubenswrapper[4869]: I1001 15:22:02.042301 4869 scope.go:117] "RemoveContainer" containerID="9c7eaec79522726d91645e0f278f0c7e1bc6ccd4fa3ef331cc40a1c9238264b4" Oct 01 15:22:02 crc kubenswrapper[4869]: I1001 15:22:02.042327 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 01 15:22:02 crc kubenswrapper[4869]: I1001 15:22:02.065176 4869 scope.go:117] "RemoveContainer" containerID="2e28667845ee459879144661065b84c9ea1a9d3218470cc0665485c530f3e4c7" Oct 01 15:22:02 crc kubenswrapper[4869]: I1001 15:22:02.080989 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Oct 01 15:22:02 crc kubenswrapper[4869]: I1001 15:22:02.088977 4869 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Oct 01 15:22:02 crc kubenswrapper[4869]: I1001 15:22:02.091203 4869 scope.go:117] "RemoveContainer" containerID="c5a524ecf5caf9bdd7cfd14e0c7b5533b2b4f50b3b3c624bdb38f0b60ac369c4" Oct 01 15:22:02 crc kubenswrapper[4869]: I1001 15:22:02.113128 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Oct 01 15:22:02 crc kubenswrapper[4869]: E1001 15:22:02.113596 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5ede1f51-a56a-4ac8-ba8b-6963803cb869" containerName="proxy-httpd" Oct 01 15:22:02 crc kubenswrapper[4869]: I1001 15:22:02.113614 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="5ede1f51-a56a-4ac8-ba8b-6963803cb869" containerName="proxy-httpd" Oct 01 15:22:02 crc kubenswrapper[4869]: E1001 15:22:02.113628 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5ede1f51-a56a-4ac8-ba8b-6963803cb869" containerName="ceilometer-notification-agent" Oct 01 15:22:02 crc kubenswrapper[4869]: I1001 15:22:02.113637 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="5ede1f51-a56a-4ac8-ba8b-6963803cb869" containerName="ceilometer-notification-agent" Oct 01 15:22:02 crc kubenswrapper[4869]: E1001 15:22:02.113649 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5ede1f51-a56a-4ac8-ba8b-6963803cb869" containerName="ceilometer-central-agent" Oct 01 15:22:02 crc kubenswrapper[4869]: I1001 15:22:02.113659 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="5ede1f51-a56a-4ac8-ba8b-6963803cb869" containerName="ceilometer-central-agent" Oct 01 15:22:02 crc kubenswrapper[4869]: E1001 15:22:02.113706 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5ede1f51-a56a-4ac8-ba8b-6963803cb869" containerName="sg-core" Oct 01 15:22:02 crc kubenswrapper[4869]: I1001 15:22:02.113715 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="5ede1f51-a56a-4ac8-ba8b-6963803cb869" containerName="sg-core" Oct 01 15:22:02 crc kubenswrapper[4869]: I1001 15:22:02.113924 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="5ede1f51-a56a-4ac8-ba8b-6963803cb869" containerName="ceilometer-notification-agent" Oct 01 15:22:02 crc kubenswrapper[4869]: I1001 15:22:02.113959 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="5ede1f51-a56a-4ac8-ba8b-6963803cb869" containerName="ceilometer-central-agent" Oct 01 15:22:02 crc kubenswrapper[4869]: I1001 15:22:02.113976 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="5ede1f51-a56a-4ac8-ba8b-6963803cb869" containerName="proxy-httpd" Oct 01 15:22:02 crc kubenswrapper[4869]: I1001 15:22:02.113990 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="5ede1f51-a56a-4ac8-ba8b-6963803cb869" containerName="sg-core" Oct 01 15:22:02 crc kubenswrapper[4869]: I1001 15:22:02.117205 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 01 15:22:02 crc kubenswrapper[4869]: I1001 15:22:02.119770 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ceilometer-internal-svc" Oct 01 15:22:02 crc kubenswrapper[4869]: I1001 15:22:02.119922 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Oct 01 15:22:02 crc kubenswrapper[4869]: I1001 15:22:02.120118 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Oct 01 15:22:02 crc kubenswrapper[4869]: I1001 15:22:02.122239 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Oct 01 15:22:02 crc kubenswrapper[4869]: I1001 15:22:02.157062 4869 scope.go:117] "RemoveContainer" containerID="ee67ced28f4ef58abd0ffbb7214b118c4b5b7ebf87b0bfa49a417b6076fe9eaa" Oct 01 15:22:02 crc kubenswrapper[4869]: I1001 15:22:02.167008 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cd73cdc8-d865-4b29-842c-5f11fe20cf66-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"cd73cdc8-d865-4b29-842c-5f11fe20cf66\") " pod="openstack/ceilometer-0" Oct 01 15:22:02 crc kubenswrapper[4869]: I1001 15:22:02.167253 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/cd73cdc8-d865-4b29-842c-5f11fe20cf66-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"cd73cdc8-d865-4b29-842c-5f11fe20cf66\") " pod="openstack/ceilometer-0" Oct 01 15:22:02 crc kubenswrapper[4869]: I1001 15:22:02.167326 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rq9fj\" (UniqueName: \"kubernetes.io/projected/cd73cdc8-d865-4b29-842c-5f11fe20cf66-kube-api-access-rq9fj\") pod \"ceilometer-0\" (UID: \"cd73cdc8-d865-4b29-842c-5f11fe20cf66\") " pod="openstack/ceilometer-0" Oct 01 15:22:02 crc kubenswrapper[4869]: I1001 15:22:02.167443 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/cd73cdc8-d865-4b29-842c-5f11fe20cf66-scripts\") pod \"ceilometer-0\" (UID: \"cd73cdc8-d865-4b29-842c-5f11fe20cf66\") " pod="openstack/ceilometer-0" Oct 01 15:22:02 crc kubenswrapper[4869]: I1001 15:22:02.167591 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/cd73cdc8-d865-4b29-842c-5f11fe20cf66-run-httpd\") pod \"ceilometer-0\" (UID: \"cd73cdc8-d865-4b29-842c-5f11fe20cf66\") " pod="openstack/ceilometer-0" Oct 01 15:22:02 crc kubenswrapper[4869]: I1001 15:22:02.167647 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/cd73cdc8-d865-4b29-842c-5f11fe20cf66-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"cd73cdc8-d865-4b29-842c-5f11fe20cf66\") " pod="openstack/ceilometer-0" Oct 01 15:22:02 crc kubenswrapper[4869]: I1001 15:22:02.167731 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cd73cdc8-d865-4b29-842c-5f11fe20cf66-config-data\") pod \"ceilometer-0\" (UID: \"cd73cdc8-d865-4b29-842c-5f11fe20cf66\") " pod="openstack/ceilometer-0" Oct 01 15:22:02 crc kubenswrapper[4869]: I1001 15:22:02.167782 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/cd73cdc8-d865-4b29-842c-5f11fe20cf66-log-httpd\") pod \"ceilometer-0\" (UID: \"cd73cdc8-d865-4b29-842c-5f11fe20cf66\") " pod="openstack/ceilometer-0" Oct 01 15:22:02 crc kubenswrapper[4869]: I1001 15:22:02.184114 4869 scope.go:117] "RemoveContainer" containerID="9c7eaec79522726d91645e0f278f0c7e1bc6ccd4fa3ef331cc40a1c9238264b4" Oct 01 15:22:02 crc kubenswrapper[4869]: E1001 15:22:02.184404 4869 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9c7eaec79522726d91645e0f278f0c7e1bc6ccd4fa3ef331cc40a1c9238264b4\": container with ID starting with 9c7eaec79522726d91645e0f278f0c7e1bc6ccd4fa3ef331cc40a1c9238264b4 not found: ID does not exist" containerID="9c7eaec79522726d91645e0f278f0c7e1bc6ccd4fa3ef331cc40a1c9238264b4" Oct 01 15:22:02 crc kubenswrapper[4869]: I1001 15:22:02.184434 4869 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9c7eaec79522726d91645e0f278f0c7e1bc6ccd4fa3ef331cc40a1c9238264b4"} err="failed to get container status \"9c7eaec79522726d91645e0f278f0c7e1bc6ccd4fa3ef331cc40a1c9238264b4\": rpc error: code = NotFound desc = could not find container \"9c7eaec79522726d91645e0f278f0c7e1bc6ccd4fa3ef331cc40a1c9238264b4\": container with ID starting with 9c7eaec79522726d91645e0f278f0c7e1bc6ccd4fa3ef331cc40a1c9238264b4 not found: ID does not exist" Oct 01 15:22:02 crc kubenswrapper[4869]: I1001 15:22:02.184454 4869 scope.go:117] "RemoveContainer" containerID="2e28667845ee459879144661065b84c9ea1a9d3218470cc0665485c530f3e4c7" Oct 01 15:22:02 crc kubenswrapper[4869]: E1001 15:22:02.184785 4869 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2e28667845ee459879144661065b84c9ea1a9d3218470cc0665485c530f3e4c7\": container with ID starting with 2e28667845ee459879144661065b84c9ea1a9d3218470cc0665485c530f3e4c7 not found: ID does not exist" containerID="2e28667845ee459879144661065b84c9ea1a9d3218470cc0665485c530f3e4c7" Oct 01 15:22:02 crc kubenswrapper[4869]: I1001 15:22:02.184810 4869 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2e28667845ee459879144661065b84c9ea1a9d3218470cc0665485c530f3e4c7"} err="failed to get container status \"2e28667845ee459879144661065b84c9ea1a9d3218470cc0665485c530f3e4c7\": rpc error: code = NotFound desc = could not find container \"2e28667845ee459879144661065b84c9ea1a9d3218470cc0665485c530f3e4c7\": container with ID starting with 2e28667845ee459879144661065b84c9ea1a9d3218470cc0665485c530f3e4c7 not found: ID does not exist" Oct 01 15:22:02 crc kubenswrapper[4869]: I1001 15:22:02.184823 4869 scope.go:117] "RemoveContainer" containerID="c5a524ecf5caf9bdd7cfd14e0c7b5533b2b4f50b3b3c624bdb38f0b60ac369c4" Oct 01 15:22:02 crc kubenswrapper[4869]: E1001 15:22:02.184984 4869 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c5a524ecf5caf9bdd7cfd14e0c7b5533b2b4f50b3b3c624bdb38f0b60ac369c4\": container with ID starting with c5a524ecf5caf9bdd7cfd14e0c7b5533b2b4f50b3b3c624bdb38f0b60ac369c4 not found: ID does not exist" containerID="c5a524ecf5caf9bdd7cfd14e0c7b5533b2b4f50b3b3c624bdb38f0b60ac369c4" Oct 01 15:22:02 crc kubenswrapper[4869]: I1001 15:22:02.185003 4869 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c5a524ecf5caf9bdd7cfd14e0c7b5533b2b4f50b3b3c624bdb38f0b60ac369c4"} err="failed to get container status \"c5a524ecf5caf9bdd7cfd14e0c7b5533b2b4f50b3b3c624bdb38f0b60ac369c4\": rpc error: code = NotFound desc = could not find container \"c5a524ecf5caf9bdd7cfd14e0c7b5533b2b4f50b3b3c624bdb38f0b60ac369c4\": container with ID starting with c5a524ecf5caf9bdd7cfd14e0c7b5533b2b4f50b3b3c624bdb38f0b60ac369c4 not found: ID does not exist" Oct 01 15:22:02 crc kubenswrapper[4869]: I1001 15:22:02.185024 4869 scope.go:117] "RemoveContainer" containerID="ee67ced28f4ef58abd0ffbb7214b118c4b5b7ebf87b0bfa49a417b6076fe9eaa" Oct 01 15:22:02 crc kubenswrapper[4869]: E1001 15:22:02.185250 4869 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ee67ced28f4ef58abd0ffbb7214b118c4b5b7ebf87b0bfa49a417b6076fe9eaa\": container with ID starting with ee67ced28f4ef58abd0ffbb7214b118c4b5b7ebf87b0bfa49a417b6076fe9eaa not found: ID does not exist" containerID="ee67ced28f4ef58abd0ffbb7214b118c4b5b7ebf87b0bfa49a417b6076fe9eaa" Oct 01 15:22:02 crc kubenswrapper[4869]: I1001 15:22:02.185285 4869 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ee67ced28f4ef58abd0ffbb7214b118c4b5b7ebf87b0bfa49a417b6076fe9eaa"} err="failed to get container status \"ee67ced28f4ef58abd0ffbb7214b118c4b5b7ebf87b0bfa49a417b6076fe9eaa\": rpc error: code = NotFound desc = could not find container \"ee67ced28f4ef58abd0ffbb7214b118c4b5b7ebf87b0bfa49a417b6076fe9eaa\": container with ID starting with ee67ced28f4ef58abd0ffbb7214b118c4b5b7ebf87b0bfa49a417b6076fe9eaa not found: ID does not exist" Oct 01 15:22:02 crc kubenswrapper[4869]: I1001 15:22:02.269934 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/cd73cdc8-d865-4b29-842c-5f11fe20cf66-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"cd73cdc8-d865-4b29-842c-5f11fe20cf66\") " pod="openstack/ceilometer-0" Oct 01 15:22:02 crc kubenswrapper[4869]: I1001 15:22:02.270009 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rq9fj\" (UniqueName: \"kubernetes.io/projected/cd73cdc8-d865-4b29-842c-5f11fe20cf66-kube-api-access-rq9fj\") pod \"ceilometer-0\" (UID: \"cd73cdc8-d865-4b29-842c-5f11fe20cf66\") " pod="openstack/ceilometer-0" Oct 01 15:22:02 crc kubenswrapper[4869]: I1001 15:22:02.270079 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/cd73cdc8-d865-4b29-842c-5f11fe20cf66-scripts\") pod \"ceilometer-0\" (UID: \"cd73cdc8-d865-4b29-842c-5f11fe20cf66\") " pod="openstack/ceilometer-0" Oct 01 15:22:02 crc kubenswrapper[4869]: I1001 15:22:02.270180 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/cd73cdc8-d865-4b29-842c-5f11fe20cf66-run-httpd\") pod \"ceilometer-0\" (UID: \"cd73cdc8-d865-4b29-842c-5f11fe20cf66\") " pod="openstack/ceilometer-0" Oct 01 15:22:02 crc kubenswrapper[4869]: I1001 15:22:02.270231 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/cd73cdc8-d865-4b29-842c-5f11fe20cf66-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"cd73cdc8-d865-4b29-842c-5f11fe20cf66\") " pod="openstack/ceilometer-0" Oct 01 15:22:02 crc kubenswrapper[4869]: I1001 15:22:02.270317 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cd73cdc8-d865-4b29-842c-5f11fe20cf66-config-data\") pod \"ceilometer-0\" (UID: \"cd73cdc8-d865-4b29-842c-5f11fe20cf66\") " pod="openstack/ceilometer-0" Oct 01 15:22:02 crc kubenswrapper[4869]: I1001 15:22:02.270375 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/cd73cdc8-d865-4b29-842c-5f11fe20cf66-log-httpd\") pod \"ceilometer-0\" (UID: \"cd73cdc8-d865-4b29-842c-5f11fe20cf66\") " pod="openstack/ceilometer-0" Oct 01 15:22:02 crc kubenswrapper[4869]: I1001 15:22:02.270731 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/cd73cdc8-d865-4b29-842c-5f11fe20cf66-run-httpd\") pod \"ceilometer-0\" (UID: \"cd73cdc8-d865-4b29-842c-5f11fe20cf66\") " pod="openstack/ceilometer-0" Oct 01 15:22:02 crc kubenswrapper[4869]: I1001 15:22:02.270868 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/cd73cdc8-d865-4b29-842c-5f11fe20cf66-log-httpd\") pod \"ceilometer-0\" (UID: \"cd73cdc8-d865-4b29-842c-5f11fe20cf66\") " pod="openstack/ceilometer-0" Oct 01 15:22:02 crc kubenswrapper[4869]: I1001 15:22:02.270940 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cd73cdc8-d865-4b29-842c-5f11fe20cf66-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"cd73cdc8-d865-4b29-842c-5f11fe20cf66\") " pod="openstack/ceilometer-0" Oct 01 15:22:02 crc kubenswrapper[4869]: I1001 15:22:02.273204 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/cd73cdc8-d865-4b29-842c-5f11fe20cf66-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"cd73cdc8-d865-4b29-842c-5f11fe20cf66\") " pod="openstack/ceilometer-0" Oct 01 15:22:02 crc kubenswrapper[4869]: I1001 15:22:02.274496 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/cd73cdc8-d865-4b29-842c-5f11fe20cf66-scripts\") pod \"ceilometer-0\" (UID: \"cd73cdc8-d865-4b29-842c-5f11fe20cf66\") " pod="openstack/ceilometer-0" Oct 01 15:22:02 crc kubenswrapper[4869]: I1001 15:22:02.274605 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cd73cdc8-d865-4b29-842c-5f11fe20cf66-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"cd73cdc8-d865-4b29-842c-5f11fe20cf66\") " pod="openstack/ceilometer-0" Oct 01 15:22:02 crc kubenswrapper[4869]: I1001 15:22:02.274855 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cd73cdc8-d865-4b29-842c-5f11fe20cf66-config-data\") pod \"ceilometer-0\" (UID: \"cd73cdc8-d865-4b29-842c-5f11fe20cf66\") " pod="openstack/ceilometer-0" Oct 01 15:22:02 crc kubenswrapper[4869]: I1001 15:22:02.286995 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/cd73cdc8-d865-4b29-842c-5f11fe20cf66-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"cd73cdc8-d865-4b29-842c-5f11fe20cf66\") " pod="openstack/ceilometer-0" Oct 01 15:22:02 crc kubenswrapper[4869]: I1001 15:22:02.290483 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rq9fj\" (UniqueName: \"kubernetes.io/projected/cd73cdc8-d865-4b29-842c-5f11fe20cf66-kube-api-access-rq9fj\") pod \"ceilometer-0\" (UID: \"cd73cdc8-d865-4b29-842c-5f11fe20cf66\") " pod="openstack/ceilometer-0" Oct 01 15:22:02 crc kubenswrapper[4869]: E1001 15:22:02.359569 4869 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="a2e13a113629984ae09ec5f542825035d53a85440900d718364efe8dda104458" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Oct 01 15:22:02 crc kubenswrapper[4869]: E1001 15:22:02.361539 4869 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="a2e13a113629984ae09ec5f542825035d53a85440900d718364efe8dda104458" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Oct 01 15:22:02 crc kubenswrapper[4869]: E1001 15:22:02.362814 4869 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="a2e13a113629984ae09ec5f542825035d53a85440900d718364efe8dda104458" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Oct 01 15:22:02 crc kubenswrapper[4869]: E1001 15:22:02.362853 4869 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack/nova-scheduler-0" podUID="34744dc3-77cb-4347-b1ad-fbce05019042" containerName="nova-scheduler-scheduler" Oct 01 15:22:02 crc kubenswrapper[4869]: I1001 15:22:02.446634 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 01 15:22:02 crc kubenswrapper[4869]: I1001 15:22:02.808179 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Oct 01 15:22:02 crc kubenswrapper[4869]: I1001 15:22:02.890304 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Oct 01 15:22:02 crc kubenswrapper[4869]: I1001 15:22:02.982828 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x9klt\" (UniqueName: \"kubernetes.io/projected/34744dc3-77cb-4347-b1ad-fbce05019042-kube-api-access-x9klt\") pod \"34744dc3-77cb-4347-b1ad-fbce05019042\" (UID: \"34744dc3-77cb-4347-b1ad-fbce05019042\") " Oct 01 15:22:02 crc kubenswrapper[4869]: I1001 15:22:02.982917 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/34744dc3-77cb-4347-b1ad-fbce05019042-config-data\") pod \"34744dc3-77cb-4347-b1ad-fbce05019042\" (UID: \"34744dc3-77cb-4347-b1ad-fbce05019042\") " Oct 01 15:22:02 crc kubenswrapper[4869]: I1001 15:22:02.983208 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/34744dc3-77cb-4347-b1ad-fbce05019042-combined-ca-bundle\") pod \"34744dc3-77cb-4347-b1ad-fbce05019042\" (UID: \"34744dc3-77cb-4347-b1ad-fbce05019042\") " Oct 01 15:22:02 crc kubenswrapper[4869]: I1001 15:22:02.989834 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/34744dc3-77cb-4347-b1ad-fbce05019042-kube-api-access-x9klt" (OuterVolumeSpecName: "kube-api-access-x9klt") pod "34744dc3-77cb-4347-b1ad-fbce05019042" (UID: "34744dc3-77cb-4347-b1ad-fbce05019042"). InnerVolumeSpecName "kube-api-access-x9klt". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 15:22:03 crc kubenswrapper[4869]: I1001 15:22:03.012801 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/34744dc3-77cb-4347-b1ad-fbce05019042-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "34744dc3-77cb-4347-b1ad-fbce05019042" (UID: "34744dc3-77cb-4347-b1ad-fbce05019042"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 15:22:03 crc kubenswrapper[4869]: I1001 15:22:03.019020 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/34744dc3-77cb-4347-b1ad-fbce05019042-config-data" (OuterVolumeSpecName: "config-data") pod "34744dc3-77cb-4347-b1ad-fbce05019042" (UID: "34744dc3-77cb-4347-b1ad-fbce05019042"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 15:22:03 crc kubenswrapper[4869]: I1001 15:22:03.053432 4869 generic.go:334] "Generic (PLEG): container finished" podID="34744dc3-77cb-4347-b1ad-fbce05019042" containerID="a2e13a113629984ae09ec5f542825035d53a85440900d718364efe8dda104458" exitCode=0 Oct 01 15:22:03 crc kubenswrapper[4869]: I1001 15:22:03.053507 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"34744dc3-77cb-4347-b1ad-fbce05019042","Type":"ContainerDied","Data":"a2e13a113629984ae09ec5f542825035d53a85440900d718364efe8dda104458"} Oct 01 15:22:03 crc kubenswrapper[4869]: I1001 15:22:03.053531 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"34744dc3-77cb-4347-b1ad-fbce05019042","Type":"ContainerDied","Data":"fd8997f1b8bbbcc09318bea8206733b23ab418dcec564eb3ab71a9c794c6458c"} Oct 01 15:22:03 crc kubenswrapper[4869]: I1001 15:22:03.053548 4869 scope.go:117] "RemoveContainer" containerID="a2e13a113629984ae09ec5f542825035d53a85440900d718364efe8dda104458" Oct 01 15:22:03 crc kubenswrapper[4869]: I1001 15:22:03.053687 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Oct 01 15:22:03 crc kubenswrapper[4869]: I1001 15:22:03.056652 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"cd73cdc8-d865-4b29-842c-5f11fe20cf66","Type":"ContainerStarted","Data":"c8792b36e93f76a928a64aa9d72b5ebbec7fca541e4d032c577bc20c068b4711"} Oct 01 15:22:03 crc kubenswrapper[4869]: I1001 15:22:03.092524 4869 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/34744dc3-77cb-4347-b1ad-fbce05019042-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 01 15:22:03 crc kubenswrapper[4869]: I1001 15:22:03.092562 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x9klt\" (UniqueName: \"kubernetes.io/projected/34744dc3-77cb-4347-b1ad-fbce05019042-kube-api-access-x9klt\") on node \"crc\" DevicePath \"\"" Oct 01 15:22:03 crc kubenswrapper[4869]: I1001 15:22:03.092575 4869 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/34744dc3-77cb-4347-b1ad-fbce05019042-config-data\") on node \"crc\" DevicePath \"\"" Oct 01 15:22:03 crc kubenswrapper[4869]: I1001 15:22:03.104348 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Oct 01 15:22:03 crc kubenswrapper[4869]: I1001 15:22:03.109436 4869 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-scheduler-0"] Oct 01 15:22:03 crc kubenswrapper[4869]: I1001 15:22:03.110607 4869 scope.go:117] "RemoveContainer" containerID="a2e13a113629984ae09ec5f542825035d53a85440900d718364efe8dda104458" Oct 01 15:22:03 crc kubenswrapper[4869]: E1001 15:22:03.111176 4869 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a2e13a113629984ae09ec5f542825035d53a85440900d718364efe8dda104458\": container with ID starting with a2e13a113629984ae09ec5f542825035d53a85440900d718364efe8dda104458 not found: ID does not exist" containerID="a2e13a113629984ae09ec5f542825035d53a85440900d718364efe8dda104458" Oct 01 15:22:03 crc kubenswrapper[4869]: I1001 15:22:03.111210 4869 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a2e13a113629984ae09ec5f542825035d53a85440900d718364efe8dda104458"} err="failed to get container status \"a2e13a113629984ae09ec5f542825035d53a85440900d718364efe8dda104458\": rpc error: code = NotFound desc = could not find container \"a2e13a113629984ae09ec5f542825035d53a85440900d718364efe8dda104458\": container with ID starting with a2e13a113629984ae09ec5f542825035d53a85440900d718364efe8dda104458 not found: ID does not exist" Oct 01 15:22:03 crc kubenswrapper[4869]: I1001 15:22:03.119114 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-scheduler-0"] Oct 01 15:22:03 crc kubenswrapper[4869]: E1001 15:22:03.119677 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="34744dc3-77cb-4347-b1ad-fbce05019042" containerName="nova-scheduler-scheduler" Oct 01 15:22:03 crc kubenswrapper[4869]: I1001 15:22:03.119708 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="34744dc3-77cb-4347-b1ad-fbce05019042" containerName="nova-scheduler-scheduler" Oct 01 15:22:03 crc kubenswrapper[4869]: I1001 15:22:03.119981 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="34744dc3-77cb-4347-b1ad-fbce05019042" containerName="nova-scheduler-scheduler" Oct 01 15:22:03 crc kubenswrapper[4869]: I1001 15:22:03.120998 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Oct 01 15:22:03 crc kubenswrapper[4869]: I1001 15:22:03.123250 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-scheduler-config-data" Oct 01 15:22:03 crc kubenswrapper[4869]: I1001 15:22:03.131706 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Oct 01 15:22:03 crc kubenswrapper[4869]: I1001 15:22:03.176353 4869 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-f548b88b9-jqj9f" podUID="8fdb029f-29fa-47b7-9899-1fd9f14fb383" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.156:5353: i/o timeout" Oct 01 15:22:03 crc kubenswrapper[4869]: I1001 15:22:03.193964 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0c777f8b-6eec-48f7-97a6-137936bfb76c-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"0c777f8b-6eec-48f7-97a6-137936bfb76c\") " pod="openstack/nova-scheduler-0" Oct 01 15:22:03 crc kubenswrapper[4869]: I1001 15:22:03.194015 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0c777f8b-6eec-48f7-97a6-137936bfb76c-config-data\") pod \"nova-scheduler-0\" (UID: \"0c777f8b-6eec-48f7-97a6-137936bfb76c\") " pod="openstack/nova-scheduler-0" Oct 01 15:22:03 crc kubenswrapper[4869]: I1001 15:22:03.194044 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pmnmq\" (UniqueName: \"kubernetes.io/projected/0c777f8b-6eec-48f7-97a6-137936bfb76c-kube-api-access-pmnmq\") pod \"nova-scheduler-0\" (UID: \"0c777f8b-6eec-48f7-97a6-137936bfb76c\") " pod="openstack/nova-scheduler-0" Oct 01 15:22:03 crc kubenswrapper[4869]: I1001 15:22:03.296518 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0c777f8b-6eec-48f7-97a6-137936bfb76c-config-data\") pod \"nova-scheduler-0\" (UID: \"0c777f8b-6eec-48f7-97a6-137936bfb76c\") " pod="openstack/nova-scheduler-0" Oct 01 15:22:03 crc kubenswrapper[4869]: I1001 15:22:03.296570 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0c777f8b-6eec-48f7-97a6-137936bfb76c-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"0c777f8b-6eec-48f7-97a6-137936bfb76c\") " pod="openstack/nova-scheduler-0" Oct 01 15:22:03 crc kubenswrapper[4869]: I1001 15:22:03.296599 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pmnmq\" (UniqueName: \"kubernetes.io/projected/0c777f8b-6eec-48f7-97a6-137936bfb76c-kube-api-access-pmnmq\") pod \"nova-scheduler-0\" (UID: \"0c777f8b-6eec-48f7-97a6-137936bfb76c\") " pod="openstack/nova-scheduler-0" Oct 01 15:22:03 crc kubenswrapper[4869]: I1001 15:22:03.303500 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0c777f8b-6eec-48f7-97a6-137936bfb76c-config-data\") pod \"nova-scheduler-0\" (UID: \"0c777f8b-6eec-48f7-97a6-137936bfb76c\") " pod="openstack/nova-scheduler-0" Oct 01 15:22:03 crc kubenswrapper[4869]: I1001 15:22:03.313216 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0c777f8b-6eec-48f7-97a6-137936bfb76c-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"0c777f8b-6eec-48f7-97a6-137936bfb76c\") " pod="openstack/nova-scheduler-0" Oct 01 15:22:03 crc kubenswrapper[4869]: I1001 15:22:03.313373 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pmnmq\" (UniqueName: \"kubernetes.io/projected/0c777f8b-6eec-48f7-97a6-137936bfb76c-kube-api-access-pmnmq\") pod \"nova-scheduler-0\" (UID: \"0c777f8b-6eec-48f7-97a6-137936bfb76c\") " pod="openstack/nova-scheduler-0" Oct 01 15:22:03 crc kubenswrapper[4869]: I1001 15:22:03.441676 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Oct 01 15:22:03 crc kubenswrapper[4869]: I1001 15:22:03.595355 4869 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="34744dc3-77cb-4347-b1ad-fbce05019042" path="/var/lib/kubelet/pods/34744dc3-77cb-4347-b1ad-fbce05019042/volumes" Oct 01 15:22:03 crc kubenswrapper[4869]: I1001 15:22:03.596182 4869 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5ede1f51-a56a-4ac8-ba8b-6963803cb869" path="/var/lib/kubelet/pods/5ede1f51-a56a-4ac8-ba8b-6963803cb869/volumes" Oct 01 15:22:03 crc kubenswrapper[4869]: W1001 15:22:03.879732 4869 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod0c777f8b_6eec_48f7_97a6_137936bfb76c.slice/crio-57cf95c75a950531e9fd39a60f9c53837b8484e5e0416a2750dd7d099aed4e0c WatchSource:0}: Error finding container 57cf95c75a950531e9fd39a60f9c53837b8484e5e0416a2750dd7d099aed4e0c: Status 404 returned error can't find the container with id 57cf95c75a950531e9fd39a60f9c53837b8484e5e0416a2750dd7d099aed4e0c Oct 01 15:22:03 crc kubenswrapper[4869]: I1001 15:22:03.882106 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Oct 01 15:22:04 crc kubenswrapper[4869]: I1001 15:22:04.006586 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Oct 01 15:22:04 crc kubenswrapper[4869]: I1001 15:22:04.069450 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"cd73cdc8-d865-4b29-842c-5f11fe20cf66","Type":"ContainerStarted","Data":"0477058267af225a9a2b9982eba75a9c4c755894f86330d92a8d2df0993cd5a4"} Oct 01 15:22:04 crc kubenswrapper[4869]: I1001 15:22:04.070594 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"0c777f8b-6eec-48f7-97a6-137936bfb76c","Type":"ContainerStarted","Data":"57cf95c75a950531e9fd39a60f9c53837b8484e5e0416a2750dd7d099aed4e0c"} Oct 01 15:22:04 crc kubenswrapper[4869]: I1001 15:22:04.072001 4869 generic.go:334] "Generic (PLEG): container finished" podID="8c2c4d71-1c1e-4627-8e29-c0ffd9862e25" containerID="105e1c4e601b7ff9c6cac949694c7065447a874eab57473debf0e76263a8d8dd" exitCode=0 Oct 01 15:22:04 crc kubenswrapper[4869]: I1001 15:22:04.072045 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"8c2c4d71-1c1e-4627-8e29-c0ffd9862e25","Type":"ContainerDied","Data":"105e1c4e601b7ff9c6cac949694c7065447a874eab57473debf0e76263a8d8dd"} Oct 01 15:22:04 crc kubenswrapper[4869]: I1001 15:22:04.072062 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"8c2c4d71-1c1e-4627-8e29-c0ffd9862e25","Type":"ContainerDied","Data":"5d2a02c9fe1fe4819802662a50e09100606694dd9dc1b3080cef358edc560066"} Oct 01 15:22:04 crc kubenswrapper[4869]: I1001 15:22:04.072080 4869 scope.go:117] "RemoveContainer" containerID="105e1c4e601b7ff9c6cac949694c7065447a874eab57473debf0e76263a8d8dd" Oct 01 15:22:04 crc kubenswrapper[4869]: I1001 15:22:04.072202 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Oct 01 15:22:04 crc kubenswrapper[4869]: I1001 15:22:04.101785 4869 scope.go:117] "RemoveContainer" containerID="6708f67a419d26f377783bf3934dd3f13f9047abdebda9ca227f52ec8a2d8ab6" Oct 01 15:22:04 crc kubenswrapper[4869]: I1001 15:22:04.113869 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5qbrn\" (UniqueName: \"kubernetes.io/projected/8c2c4d71-1c1e-4627-8e29-c0ffd9862e25-kube-api-access-5qbrn\") pod \"8c2c4d71-1c1e-4627-8e29-c0ffd9862e25\" (UID: \"8c2c4d71-1c1e-4627-8e29-c0ffd9862e25\") " Oct 01 15:22:04 crc kubenswrapper[4869]: I1001 15:22:04.113969 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8c2c4d71-1c1e-4627-8e29-c0ffd9862e25-combined-ca-bundle\") pod \"8c2c4d71-1c1e-4627-8e29-c0ffd9862e25\" (UID: \"8c2c4d71-1c1e-4627-8e29-c0ffd9862e25\") " Oct 01 15:22:04 crc kubenswrapper[4869]: I1001 15:22:04.114024 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/8c2c4d71-1c1e-4627-8e29-c0ffd9862e25-logs\") pod \"8c2c4d71-1c1e-4627-8e29-c0ffd9862e25\" (UID: \"8c2c4d71-1c1e-4627-8e29-c0ffd9862e25\") " Oct 01 15:22:04 crc kubenswrapper[4869]: I1001 15:22:04.114056 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8c2c4d71-1c1e-4627-8e29-c0ffd9862e25-config-data\") pod \"8c2c4d71-1c1e-4627-8e29-c0ffd9862e25\" (UID: \"8c2c4d71-1c1e-4627-8e29-c0ffd9862e25\") " Oct 01 15:22:04 crc kubenswrapper[4869]: I1001 15:22:04.115655 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8c2c4d71-1c1e-4627-8e29-c0ffd9862e25-logs" (OuterVolumeSpecName: "logs") pod "8c2c4d71-1c1e-4627-8e29-c0ffd9862e25" (UID: "8c2c4d71-1c1e-4627-8e29-c0ffd9862e25"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 15:22:04 crc kubenswrapper[4869]: I1001 15:22:04.122249 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8c2c4d71-1c1e-4627-8e29-c0ffd9862e25-kube-api-access-5qbrn" (OuterVolumeSpecName: "kube-api-access-5qbrn") pod "8c2c4d71-1c1e-4627-8e29-c0ffd9862e25" (UID: "8c2c4d71-1c1e-4627-8e29-c0ffd9862e25"). InnerVolumeSpecName "kube-api-access-5qbrn". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 15:22:04 crc kubenswrapper[4869]: I1001 15:22:04.128635 4869 scope.go:117] "RemoveContainer" containerID="105e1c4e601b7ff9c6cac949694c7065447a874eab57473debf0e76263a8d8dd" Oct 01 15:22:04 crc kubenswrapper[4869]: E1001 15:22:04.129140 4869 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"105e1c4e601b7ff9c6cac949694c7065447a874eab57473debf0e76263a8d8dd\": container with ID starting with 105e1c4e601b7ff9c6cac949694c7065447a874eab57473debf0e76263a8d8dd not found: ID does not exist" containerID="105e1c4e601b7ff9c6cac949694c7065447a874eab57473debf0e76263a8d8dd" Oct 01 15:22:04 crc kubenswrapper[4869]: I1001 15:22:04.129170 4869 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"105e1c4e601b7ff9c6cac949694c7065447a874eab57473debf0e76263a8d8dd"} err="failed to get container status \"105e1c4e601b7ff9c6cac949694c7065447a874eab57473debf0e76263a8d8dd\": rpc error: code = NotFound desc = could not find container \"105e1c4e601b7ff9c6cac949694c7065447a874eab57473debf0e76263a8d8dd\": container with ID starting with 105e1c4e601b7ff9c6cac949694c7065447a874eab57473debf0e76263a8d8dd not found: ID does not exist" Oct 01 15:22:04 crc kubenswrapper[4869]: I1001 15:22:04.129190 4869 scope.go:117] "RemoveContainer" containerID="6708f67a419d26f377783bf3934dd3f13f9047abdebda9ca227f52ec8a2d8ab6" Oct 01 15:22:04 crc kubenswrapper[4869]: E1001 15:22:04.129391 4869 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6708f67a419d26f377783bf3934dd3f13f9047abdebda9ca227f52ec8a2d8ab6\": container with ID starting with 6708f67a419d26f377783bf3934dd3f13f9047abdebda9ca227f52ec8a2d8ab6 not found: ID does not exist" containerID="6708f67a419d26f377783bf3934dd3f13f9047abdebda9ca227f52ec8a2d8ab6" Oct 01 15:22:04 crc kubenswrapper[4869]: I1001 15:22:04.129413 4869 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6708f67a419d26f377783bf3934dd3f13f9047abdebda9ca227f52ec8a2d8ab6"} err="failed to get container status \"6708f67a419d26f377783bf3934dd3f13f9047abdebda9ca227f52ec8a2d8ab6\": rpc error: code = NotFound desc = could not find container \"6708f67a419d26f377783bf3934dd3f13f9047abdebda9ca227f52ec8a2d8ab6\": container with ID starting with 6708f67a419d26f377783bf3934dd3f13f9047abdebda9ca227f52ec8a2d8ab6 not found: ID does not exist" Oct 01 15:22:04 crc kubenswrapper[4869]: I1001 15:22:04.159175 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8c2c4d71-1c1e-4627-8e29-c0ffd9862e25-config-data" (OuterVolumeSpecName: "config-data") pod "8c2c4d71-1c1e-4627-8e29-c0ffd9862e25" (UID: "8c2c4d71-1c1e-4627-8e29-c0ffd9862e25"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 15:22:04 crc kubenswrapper[4869]: I1001 15:22:04.164484 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8c2c4d71-1c1e-4627-8e29-c0ffd9862e25-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "8c2c4d71-1c1e-4627-8e29-c0ffd9862e25" (UID: "8c2c4d71-1c1e-4627-8e29-c0ffd9862e25"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 15:22:04 crc kubenswrapper[4869]: I1001 15:22:04.216193 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5qbrn\" (UniqueName: \"kubernetes.io/projected/8c2c4d71-1c1e-4627-8e29-c0ffd9862e25-kube-api-access-5qbrn\") on node \"crc\" DevicePath \"\"" Oct 01 15:22:04 crc kubenswrapper[4869]: I1001 15:22:04.216222 4869 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8c2c4d71-1c1e-4627-8e29-c0ffd9862e25-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 01 15:22:04 crc kubenswrapper[4869]: I1001 15:22:04.216231 4869 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/8c2c4d71-1c1e-4627-8e29-c0ffd9862e25-logs\") on node \"crc\" DevicePath \"\"" Oct 01 15:22:04 crc kubenswrapper[4869]: I1001 15:22:04.216239 4869 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8c2c4d71-1c1e-4627-8e29-c0ffd9862e25-config-data\") on node \"crc\" DevicePath \"\"" Oct 01 15:22:04 crc kubenswrapper[4869]: I1001 15:22:04.411712 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Oct 01 15:22:04 crc kubenswrapper[4869]: I1001 15:22:04.427233 4869 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-0"] Oct 01 15:22:04 crc kubenswrapper[4869]: I1001 15:22:04.441475 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-0"] Oct 01 15:22:04 crc kubenswrapper[4869]: E1001 15:22:04.441944 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8c2c4d71-1c1e-4627-8e29-c0ffd9862e25" containerName="nova-api-api" Oct 01 15:22:04 crc kubenswrapper[4869]: I1001 15:22:04.441967 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="8c2c4d71-1c1e-4627-8e29-c0ffd9862e25" containerName="nova-api-api" Oct 01 15:22:04 crc kubenswrapper[4869]: E1001 15:22:04.441993 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8c2c4d71-1c1e-4627-8e29-c0ffd9862e25" containerName="nova-api-log" Oct 01 15:22:04 crc kubenswrapper[4869]: I1001 15:22:04.442002 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="8c2c4d71-1c1e-4627-8e29-c0ffd9862e25" containerName="nova-api-log" Oct 01 15:22:04 crc kubenswrapper[4869]: I1001 15:22:04.442229 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="8c2c4d71-1c1e-4627-8e29-c0ffd9862e25" containerName="nova-api-log" Oct 01 15:22:04 crc kubenswrapper[4869]: I1001 15:22:04.442275 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="8c2c4d71-1c1e-4627-8e29-c0ffd9862e25" containerName="nova-api-api" Oct 01 15:22:04 crc kubenswrapper[4869]: I1001 15:22:04.443453 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Oct 01 15:22:04 crc kubenswrapper[4869]: I1001 15:22:04.449419 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Oct 01 15:22:04 crc kubenswrapper[4869]: I1001 15:22:04.452525 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Oct 01 15:22:04 crc kubenswrapper[4869]: I1001 15:22:04.452576 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Oct 01 15:22:04 crc kubenswrapper[4869]: I1001 15:22:04.455098 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-config-data" Oct 01 15:22:04 crc kubenswrapper[4869]: I1001 15:22:04.521609 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hdpqb\" (UniqueName: \"kubernetes.io/projected/811451db-4e74-4bfd-9916-d0036698f3f2-kube-api-access-hdpqb\") pod \"nova-api-0\" (UID: \"811451db-4e74-4bfd-9916-d0036698f3f2\") " pod="openstack/nova-api-0" Oct 01 15:22:04 crc kubenswrapper[4869]: I1001 15:22:04.521747 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/811451db-4e74-4bfd-9916-d0036698f3f2-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"811451db-4e74-4bfd-9916-d0036698f3f2\") " pod="openstack/nova-api-0" Oct 01 15:22:04 crc kubenswrapper[4869]: I1001 15:22:04.521798 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/811451db-4e74-4bfd-9916-d0036698f3f2-logs\") pod \"nova-api-0\" (UID: \"811451db-4e74-4bfd-9916-d0036698f3f2\") " pod="openstack/nova-api-0" Oct 01 15:22:04 crc kubenswrapper[4869]: I1001 15:22:04.521834 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/811451db-4e74-4bfd-9916-d0036698f3f2-config-data\") pod \"nova-api-0\" (UID: \"811451db-4e74-4bfd-9916-d0036698f3f2\") " pod="openstack/nova-api-0" Oct 01 15:22:04 crc kubenswrapper[4869]: I1001 15:22:04.623738 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hdpqb\" (UniqueName: \"kubernetes.io/projected/811451db-4e74-4bfd-9916-d0036698f3f2-kube-api-access-hdpqb\") pod \"nova-api-0\" (UID: \"811451db-4e74-4bfd-9916-d0036698f3f2\") " pod="openstack/nova-api-0" Oct 01 15:22:04 crc kubenswrapper[4869]: I1001 15:22:04.623852 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/811451db-4e74-4bfd-9916-d0036698f3f2-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"811451db-4e74-4bfd-9916-d0036698f3f2\") " pod="openstack/nova-api-0" Oct 01 15:22:04 crc kubenswrapper[4869]: I1001 15:22:04.623886 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/811451db-4e74-4bfd-9916-d0036698f3f2-logs\") pod \"nova-api-0\" (UID: \"811451db-4e74-4bfd-9916-d0036698f3f2\") " pod="openstack/nova-api-0" Oct 01 15:22:04 crc kubenswrapper[4869]: I1001 15:22:04.623918 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/811451db-4e74-4bfd-9916-d0036698f3f2-config-data\") pod \"nova-api-0\" (UID: \"811451db-4e74-4bfd-9916-d0036698f3f2\") " pod="openstack/nova-api-0" Oct 01 15:22:04 crc kubenswrapper[4869]: I1001 15:22:04.626339 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/811451db-4e74-4bfd-9916-d0036698f3f2-logs\") pod \"nova-api-0\" (UID: \"811451db-4e74-4bfd-9916-d0036698f3f2\") " pod="openstack/nova-api-0" Oct 01 15:22:04 crc kubenswrapper[4869]: I1001 15:22:04.629095 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/811451db-4e74-4bfd-9916-d0036698f3f2-config-data\") pod \"nova-api-0\" (UID: \"811451db-4e74-4bfd-9916-d0036698f3f2\") " pod="openstack/nova-api-0" Oct 01 15:22:04 crc kubenswrapper[4869]: I1001 15:22:04.629223 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/811451db-4e74-4bfd-9916-d0036698f3f2-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"811451db-4e74-4bfd-9916-d0036698f3f2\") " pod="openstack/nova-api-0" Oct 01 15:22:04 crc kubenswrapper[4869]: I1001 15:22:04.643096 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hdpqb\" (UniqueName: \"kubernetes.io/projected/811451db-4e74-4bfd-9916-d0036698f3f2-kube-api-access-hdpqb\") pod \"nova-api-0\" (UID: \"811451db-4e74-4bfd-9916-d0036698f3f2\") " pod="openstack/nova-api-0" Oct 01 15:22:04 crc kubenswrapper[4869]: I1001 15:22:04.771203 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Oct 01 15:22:05 crc kubenswrapper[4869]: I1001 15:22:05.086285 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"cd73cdc8-d865-4b29-842c-5f11fe20cf66","Type":"ContainerStarted","Data":"16a8485a46792e4bcdc916eaacd8b874f9b1ec5de4bdc2f078de96d7dbe1784b"} Oct 01 15:22:05 crc kubenswrapper[4869]: I1001 15:22:05.088103 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"0c777f8b-6eec-48f7-97a6-137936bfb76c","Type":"ContainerStarted","Data":"80234bd86f4f625f7e0c5d77c5f34b4e2bb1be316c4d95d9b98c256b6f845c54"} Oct 01 15:22:05 crc kubenswrapper[4869]: I1001 15:22:05.104818 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-scheduler-0" podStartSLOduration=2.104803137 podStartE2EDuration="2.104803137s" podCreationTimestamp="2025-10-01 15:22:03 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-01 15:22:05.101385191 +0000 UTC m=+1034.248228317" watchObservedRunningTime="2025-10-01 15:22:05.104803137 +0000 UTC m=+1034.251646253" Oct 01 15:22:05 crc kubenswrapper[4869]: I1001 15:22:05.267077 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Oct 01 15:22:05 crc kubenswrapper[4869]: I1001 15:22:05.591358 4869 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8c2c4d71-1c1e-4627-8e29-c0ffd9862e25" path="/var/lib/kubelet/pods/8c2c4d71-1c1e-4627-8e29-c0ffd9862e25/volumes" Oct 01 15:22:06 crc kubenswrapper[4869]: I1001 15:22:06.100921 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"811451db-4e74-4bfd-9916-d0036698f3f2","Type":"ContainerStarted","Data":"7a16dd3de0b603e0345f570343ba44c58e96ebf2d9eb8e02fb3cba044bec2028"} Oct 01 15:22:06 crc kubenswrapper[4869]: I1001 15:22:06.100974 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"811451db-4e74-4bfd-9916-d0036698f3f2","Type":"ContainerStarted","Data":"12e54c9d8996869e9ae82636fe4218199f1f6691a870e6af921e866f18da713b"} Oct 01 15:22:06 crc kubenswrapper[4869]: I1001 15:22:06.100993 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"811451db-4e74-4bfd-9916-d0036698f3f2","Type":"ContainerStarted","Data":"6ce98187e4ddd2379fbbad88e1022b5061a9822b114e843a54d9029a91ef6d5a"} Oct 01 15:22:06 crc kubenswrapper[4869]: I1001 15:22:06.105511 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"cd73cdc8-d865-4b29-842c-5f11fe20cf66","Type":"ContainerStarted","Data":"6bf79e6b6b2682c8c55566d6f3cc0a2335da3ffa9c4d8972fd89a7cdafbd5248"} Oct 01 15:22:06 crc kubenswrapper[4869]: I1001 15:22:06.136007 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-0" podStartSLOduration=2.135988242 podStartE2EDuration="2.135988242s" podCreationTimestamp="2025-10-01 15:22:04 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-01 15:22:06.130313239 +0000 UTC m=+1035.277156395" watchObservedRunningTime="2025-10-01 15:22:06.135988242 +0000 UTC m=+1035.282831368" Oct 01 15:22:08 crc kubenswrapper[4869]: I1001 15:22:08.145941 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"cd73cdc8-d865-4b29-842c-5f11fe20cf66","Type":"ContainerStarted","Data":"bee10b89aa05bd1d960a5a8d7f9bad58571afbc6b572b634f917755f2ad3e8d8"} Oct 01 15:22:08 crc kubenswrapper[4869]: I1001 15:22:08.146962 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Oct 01 15:22:08 crc kubenswrapper[4869]: I1001 15:22:08.196716 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=1.7508502350000001 podStartE2EDuration="6.19668959s" podCreationTimestamp="2025-10-01 15:22:02 +0000 UTC" firstStartedPulling="2025-10-01 15:22:02.819764194 +0000 UTC m=+1031.966607300" lastFinishedPulling="2025-10-01 15:22:07.265603529 +0000 UTC m=+1036.412446655" observedRunningTime="2025-10-01 15:22:08.186600996 +0000 UTC m=+1037.333444172" watchObservedRunningTime="2025-10-01 15:22:08.19668959 +0000 UTC m=+1037.343532736" Oct 01 15:22:08 crc kubenswrapper[4869]: I1001 15:22:08.381020 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-cell1-conductor-0" Oct 01 15:22:08 crc kubenswrapper[4869]: I1001 15:22:08.442299 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-scheduler-0" Oct 01 15:22:08 crc kubenswrapper[4869]: I1001 15:22:08.546459 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/kube-state-metrics-0" Oct 01 15:22:09 crc kubenswrapper[4869]: I1001 15:22:09.451347 4869 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Oct 01 15:22:09 crc kubenswrapper[4869]: I1001 15:22:09.451629 4869 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Oct 01 15:22:10 crc kubenswrapper[4869]: I1001 15:22:10.467498 4869 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="38137ffc-97e2-4517-ac14-42bfc87df875" containerName="nova-metadata-log" probeResult="failure" output="Get \"https://10.217.0.182:8775/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Oct 01 15:22:10 crc kubenswrapper[4869]: I1001 15:22:10.467512 4869 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="38137ffc-97e2-4517-ac14-42bfc87df875" containerName="nova-metadata-metadata" probeResult="failure" output="Get \"https://10.217.0.182:8775/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Oct 01 15:22:13 crc kubenswrapper[4869]: I1001 15:22:13.354409 4869 patch_prober.go:28] interesting pod/machine-config-daemon-c86m8 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 01 15:22:13 crc kubenswrapper[4869]: I1001 15:22:13.354791 4869 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-c86m8" podUID="a4b64f7f-0b03-4f47-965b-9fde048b735c" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 01 15:22:13 crc kubenswrapper[4869]: I1001 15:22:13.442734 4869 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-scheduler-0" Oct 01 15:22:13 crc kubenswrapper[4869]: I1001 15:22:13.481681 4869 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-scheduler-0" Oct 01 15:22:14 crc kubenswrapper[4869]: I1001 15:22:14.234476 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-scheduler-0" Oct 01 15:22:14 crc kubenswrapper[4869]: I1001 15:22:14.772191 4869 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Oct 01 15:22:14 crc kubenswrapper[4869]: I1001 15:22:14.772287 4869 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Oct 01 15:22:15 crc kubenswrapper[4869]: I1001 15:22:15.813588 4869 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="811451db-4e74-4bfd-9916-d0036698f3f2" containerName="nova-api-log" probeResult="failure" output="Get \"http://10.217.0.185:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Oct 01 15:22:15 crc kubenswrapper[4869]: I1001 15:22:15.854570 4869 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="811451db-4e74-4bfd-9916-d0036698f3f2" containerName="nova-api-api" probeResult="failure" output="Get \"http://10.217.0.185:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Oct 01 15:22:19 crc kubenswrapper[4869]: I1001 15:22:19.461864 4869 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-metadata-0" Oct 01 15:22:19 crc kubenswrapper[4869]: I1001 15:22:19.462618 4869 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-metadata-0" Oct 01 15:22:19 crc kubenswrapper[4869]: I1001 15:22:19.467909 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-metadata-0" Oct 01 15:22:19 crc kubenswrapper[4869]: I1001 15:22:19.474507 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-metadata-0" Oct 01 15:22:22 crc kubenswrapper[4869]: I1001 15:22:22.212911 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Oct 01 15:22:22 crc kubenswrapper[4869]: I1001 15:22:22.267430 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5f6cc53d-ee0e-480c-9a9c-47085a16bf8e-combined-ca-bundle\") pod \"5f6cc53d-ee0e-480c-9a9c-47085a16bf8e\" (UID: \"5f6cc53d-ee0e-480c-9a9c-47085a16bf8e\") " Oct 01 15:22:22 crc kubenswrapper[4869]: I1001 15:22:22.267848 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5f6cc53d-ee0e-480c-9a9c-47085a16bf8e-config-data\") pod \"5f6cc53d-ee0e-480c-9a9c-47085a16bf8e\" (UID: \"5f6cc53d-ee0e-480c-9a9c-47085a16bf8e\") " Oct 01 15:22:22 crc kubenswrapper[4869]: I1001 15:22:22.268104 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pr5lh\" (UniqueName: \"kubernetes.io/projected/5f6cc53d-ee0e-480c-9a9c-47085a16bf8e-kube-api-access-pr5lh\") pod \"5f6cc53d-ee0e-480c-9a9c-47085a16bf8e\" (UID: \"5f6cc53d-ee0e-480c-9a9c-47085a16bf8e\") " Oct 01 15:22:22 crc kubenswrapper[4869]: I1001 15:22:22.274503 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5f6cc53d-ee0e-480c-9a9c-47085a16bf8e-kube-api-access-pr5lh" (OuterVolumeSpecName: "kube-api-access-pr5lh") pod "5f6cc53d-ee0e-480c-9a9c-47085a16bf8e" (UID: "5f6cc53d-ee0e-480c-9a9c-47085a16bf8e"). InnerVolumeSpecName "kube-api-access-pr5lh". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 15:22:22 crc kubenswrapper[4869]: I1001 15:22:22.288162 4869 generic.go:334] "Generic (PLEG): container finished" podID="5f6cc53d-ee0e-480c-9a9c-47085a16bf8e" containerID="28c6e575f21e30762a2b996e6e4a30351fafe38f6788a06901e7ddcb74ed8755" exitCode=137 Oct 01 15:22:22 crc kubenswrapper[4869]: I1001 15:22:22.288316 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"5f6cc53d-ee0e-480c-9a9c-47085a16bf8e","Type":"ContainerDied","Data":"28c6e575f21e30762a2b996e6e4a30351fafe38f6788a06901e7ddcb74ed8755"} Oct 01 15:22:22 crc kubenswrapper[4869]: I1001 15:22:22.288400 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"5f6cc53d-ee0e-480c-9a9c-47085a16bf8e","Type":"ContainerDied","Data":"31f1755565263eaa430b3fc420bff95259bfc07b2f4c63b1fc06a81b226a7a23"} Oct 01 15:22:22 crc kubenswrapper[4869]: I1001 15:22:22.288429 4869 scope.go:117] "RemoveContainer" containerID="28c6e575f21e30762a2b996e6e4a30351fafe38f6788a06901e7ddcb74ed8755" Oct 01 15:22:22 crc kubenswrapper[4869]: I1001 15:22:22.288616 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Oct 01 15:22:22 crc kubenswrapper[4869]: I1001 15:22:22.295825 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5f6cc53d-ee0e-480c-9a9c-47085a16bf8e-config-data" (OuterVolumeSpecName: "config-data") pod "5f6cc53d-ee0e-480c-9a9c-47085a16bf8e" (UID: "5f6cc53d-ee0e-480c-9a9c-47085a16bf8e"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 15:22:22 crc kubenswrapper[4869]: I1001 15:22:22.297858 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5f6cc53d-ee0e-480c-9a9c-47085a16bf8e-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "5f6cc53d-ee0e-480c-9a9c-47085a16bf8e" (UID: "5f6cc53d-ee0e-480c-9a9c-47085a16bf8e"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 15:22:22 crc kubenswrapper[4869]: I1001 15:22:22.373473 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pr5lh\" (UniqueName: \"kubernetes.io/projected/5f6cc53d-ee0e-480c-9a9c-47085a16bf8e-kube-api-access-pr5lh\") on node \"crc\" DevicePath \"\"" Oct 01 15:22:22 crc kubenswrapper[4869]: I1001 15:22:22.373532 4869 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5f6cc53d-ee0e-480c-9a9c-47085a16bf8e-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 01 15:22:22 crc kubenswrapper[4869]: I1001 15:22:22.373551 4869 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5f6cc53d-ee0e-480c-9a9c-47085a16bf8e-config-data\") on node \"crc\" DevicePath \"\"" Oct 01 15:22:22 crc kubenswrapper[4869]: I1001 15:22:22.380434 4869 scope.go:117] "RemoveContainer" containerID="28c6e575f21e30762a2b996e6e4a30351fafe38f6788a06901e7ddcb74ed8755" Oct 01 15:22:22 crc kubenswrapper[4869]: E1001 15:22:22.380879 4869 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"28c6e575f21e30762a2b996e6e4a30351fafe38f6788a06901e7ddcb74ed8755\": container with ID starting with 28c6e575f21e30762a2b996e6e4a30351fafe38f6788a06901e7ddcb74ed8755 not found: ID does not exist" containerID="28c6e575f21e30762a2b996e6e4a30351fafe38f6788a06901e7ddcb74ed8755" Oct 01 15:22:22 crc kubenswrapper[4869]: I1001 15:22:22.380953 4869 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"28c6e575f21e30762a2b996e6e4a30351fafe38f6788a06901e7ddcb74ed8755"} err="failed to get container status \"28c6e575f21e30762a2b996e6e4a30351fafe38f6788a06901e7ddcb74ed8755\": rpc error: code = NotFound desc = could not find container \"28c6e575f21e30762a2b996e6e4a30351fafe38f6788a06901e7ddcb74ed8755\": container with ID starting with 28c6e575f21e30762a2b996e6e4a30351fafe38f6788a06901e7ddcb74ed8755 not found: ID does not exist" Oct 01 15:22:22 crc kubenswrapper[4869]: I1001 15:22:22.647850 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Oct 01 15:22:22 crc kubenswrapper[4869]: I1001 15:22:22.673307 4869 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Oct 01 15:22:22 crc kubenswrapper[4869]: I1001 15:22:22.695562 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Oct 01 15:22:22 crc kubenswrapper[4869]: E1001 15:22:22.696195 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5f6cc53d-ee0e-480c-9a9c-47085a16bf8e" containerName="nova-cell1-novncproxy-novncproxy" Oct 01 15:22:22 crc kubenswrapper[4869]: I1001 15:22:22.696224 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="5f6cc53d-ee0e-480c-9a9c-47085a16bf8e" containerName="nova-cell1-novncproxy-novncproxy" Oct 01 15:22:22 crc kubenswrapper[4869]: I1001 15:22:22.696589 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="5f6cc53d-ee0e-480c-9a9c-47085a16bf8e" containerName="nova-cell1-novncproxy-novncproxy" Oct 01 15:22:22 crc kubenswrapper[4869]: I1001 15:22:22.697653 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Oct 01 15:22:22 crc kubenswrapper[4869]: I1001 15:22:22.702713 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-novncproxy-cell1-public-svc" Oct 01 15:22:22 crc kubenswrapper[4869]: I1001 15:22:22.703007 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-novncproxy-config-data" Oct 01 15:22:22 crc kubenswrapper[4869]: I1001 15:22:22.704280 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-novncproxy-cell1-vencrypt" Oct 01 15:22:22 crc kubenswrapper[4869]: I1001 15:22:22.708702 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Oct 01 15:22:22 crc kubenswrapper[4869]: I1001 15:22:22.780965 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5kqc2\" (UniqueName: \"kubernetes.io/projected/c57e77ab-553d-4e64-b104-0a3c434d680b-kube-api-access-5kqc2\") pod \"nova-cell1-novncproxy-0\" (UID: \"c57e77ab-553d-4e64-b104-0a3c434d680b\") " pod="openstack/nova-cell1-novncproxy-0" Oct 01 15:22:22 crc kubenswrapper[4869]: I1001 15:22:22.781344 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"vencrypt-tls-certs\" (UniqueName: \"kubernetes.io/secret/c57e77ab-553d-4e64-b104-0a3c434d680b-vencrypt-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"c57e77ab-553d-4e64-b104-0a3c434d680b\") " pod="openstack/nova-cell1-novncproxy-0" Oct 01 15:22:22 crc kubenswrapper[4869]: I1001 15:22:22.781622 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c57e77ab-553d-4e64-b104-0a3c434d680b-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"c57e77ab-553d-4e64-b104-0a3c434d680b\") " pod="openstack/nova-cell1-novncproxy-0" Oct 01 15:22:22 crc kubenswrapper[4869]: I1001 15:22:22.781679 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-novncproxy-tls-certs\" (UniqueName: \"kubernetes.io/secret/c57e77ab-553d-4e64-b104-0a3c434d680b-nova-novncproxy-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"c57e77ab-553d-4e64-b104-0a3c434d680b\") " pod="openstack/nova-cell1-novncproxy-0" Oct 01 15:22:22 crc kubenswrapper[4869]: I1001 15:22:22.781745 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c57e77ab-553d-4e64-b104-0a3c434d680b-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"c57e77ab-553d-4e64-b104-0a3c434d680b\") " pod="openstack/nova-cell1-novncproxy-0" Oct 01 15:22:22 crc kubenswrapper[4869]: I1001 15:22:22.884009 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"vencrypt-tls-certs\" (UniqueName: \"kubernetes.io/secret/c57e77ab-553d-4e64-b104-0a3c434d680b-vencrypt-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"c57e77ab-553d-4e64-b104-0a3c434d680b\") " pod="openstack/nova-cell1-novncproxy-0" Oct 01 15:22:22 crc kubenswrapper[4869]: I1001 15:22:22.884107 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c57e77ab-553d-4e64-b104-0a3c434d680b-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"c57e77ab-553d-4e64-b104-0a3c434d680b\") " pod="openstack/nova-cell1-novncproxy-0" Oct 01 15:22:22 crc kubenswrapper[4869]: I1001 15:22:22.884137 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-novncproxy-tls-certs\" (UniqueName: \"kubernetes.io/secret/c57e77ab-553d-4e64-b104-0a3c434d680b-nova-novncproxy-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"c57e77ab-553d-4e64-b104-0a3c434d680b\") " pod="openstack/nova-cell1-novncproxy-0" Oct 01 15:22:22 crc kubenswrapper[4869]: I1001 15:22:22.884179 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c57e77ab-553d-4e64-b104-0a3c434d680b-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"c57e77ab-553d-4e64-b104-0a3c434d680b\") " pod="openstack/nova-cell1-novncproxy-0" Oct 01 15:22:22 crc kubenswrapper[4869]: I1001 15:22:22.884236 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5kqc2\" (UniqueName: \"kubernetes.io/projected/c57e77ab-553d-4e64-b104-0a3c434d680b-kube-api-access-5kqc2\") pod \"nova-cell1-novncproxy-0\" (UID: \"c57e77ab-553d-4e64-b104-0a3c434d680b\") " pod="openstack/nova-cell1-novncproxy-0" Oct 01 15:22:22 crc kubenswrapper[4869]: I1001 15:22:22.889817 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c57e77ab-553d-4e64-b104-0a3c434d680b-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"c57e77ab-553d-4e64-b104-0a3c434d680b\") " pod="openstack/nova-cell1-novncproxy-0" Oct 01 15:22:22 crc kubenswrapper[4869]: I1001 15:22:22.890084 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"vencrypt-tls-certs\" (UniqueName: \"kubernetes.io/secret/c57e77ab-553d-4e64-b104-0a3c434d680b-vencrypt-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"c57e77ab-553d-4e64-b104-0a3c434d680b\") " pod="openstack/nova-cell1-novncproxy-0" Oct 01 15:22:22 crc kubenswrapper[4869]: I1001 15:22:22.890954 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c57e77ab-553d-4e64-b104-0a3c434d680b-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"c57e77ab-553d-4e64-b104-0a3c434d680b\") " pod="openstack/nova-cell1-novncproxy-0" Oct 01 15:22:22 crc kubenswrapper[4869]: I1001 15:22:22.897133 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-novncproxy-tls-certs\" (UniqueName: \"kubernetes.io/secret/c57e77ab-553d-4e64-b104-0a3c434d680b-nova-novncproxy-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"c57e77ab-553d-4e64-b104-0a3c434d680b\") " pod="openstack/nova-cell1-novncproxy-0" Oct 01 15:22:22 crc kubenswrapper[4869]: I1001 15:22:22.909816 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5kqc2\" (UniqueName: \"kubernetes.io/projected/c57e77ab-553d-4e64-b104-0a3c434d680b-kube-api-access-5kqc2\") pod \"nova-cell1-novncproxy-0\" (UID: \"c57e77ab-553d-4e64-b104-0a3c434d680b\") " pod="openstack/nova-cell1-novncproxy-0" Oct 01 15:22:23 crc kubenswrapper[4869]: I1001 15:22:23.035775 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Oct 01 15:22:23 crc kubenswrapper[4869]: I1001 15:22:23.561670 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Oct 01 15:22:23 crc kubenswrapper[4869]: W1001 15:22:23.568563 4869 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podc57e77ab_553d_4e64_b104_0a3c434d680b.slice/crio-35f692c71fad0aba3f56be7b78c61caefccaa549106346d6f27a232b1a28b745 WatchSource:0}: Error finding container 35f692c71fad0aba3f56be7b78c61caefccaa549106346d6f27a232b1a28b745: Status 404 returned error can't find the container with id 35f692c71fad0aba3f56be7b78c61caefccaa549106346d6f27a232b1a28b745 Oct 01 15:22:23 crc kubenswrapper[4869]: I1001 15:22:23.590842 4869 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5f6cc53d-ee0e-480c-9a9c-47085a16bf8e" path="/var/lib/kubelet/pods/5f6cc53d-ee0e-480c-9a9c-47085a16bf8e/volumes" Oct 01 15:22:24 crc kubenswrapper[4869]: I1001 15:22:24.314509 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"c57e77ab-553d-4e64-b104-0a3c434d680b","Type":"ContainerStarted","Data":"78ed2e10b324bebd19532e9e1e5098e2e8e2704f6edffbc9a5f8b60c307f2728"} Oct 01 15:22:24 crc kubenswrapper[4869]: I1001 15:22:24.314550 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"c57e77ab-553d-4e64-b104-0a3c434d680b","Type":"ContainerStarted","Data":"35f692c71fad0aba3f56be7b78c61caefccaa549106346d6f27a232b1a28b745"} Oct 01 15:22:24 crc kubenswrapper[4869]: I1001 15:22:24.352569 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-novncproxy-0" podStartSLOduration=2.352550705 podStartE2EDuration="2.352550705s" podCreationTimestamp="2025-10-01 15:22:22 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-01 15:22:24.337397332 +0000 UTC m=+1053.484240468" watchObservedRunningTime="2025-10-01 15:22:24.352550705 +0000 UTC m=+1053.499393821" Oct 01 15:22:24 crc kubenswrapper[4869]: I1001 15:22:24.777307 4869 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-api-0" Oct 01 15:22:24 crc kubenswrapper[4869]: I1001 15:22:24.779206 4869 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-api-0" Oct 01 15:22:24 crc kubenswrapper[4869]: I1001 15:22:24.779719 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-api-0" Oct 01 15:22:24 crc kubenswrapper[4869]: I1001 15:22:24.779915 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-api-0" Oct 01 15:22:24 crc kubenswrapper[4869]: I1001 15:22:24.785609 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-api-0" Oct 01 15:22:24 crc kubenswrapper[4869]: I1001 15:22:24.798313 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-api-0" Oct 01 15:22:25 crc kubenswrapper[4869]: I1001 15:22:25.030967 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-6fcdb98747-4stnn"] Oct 01 15:22:25 crc kubenswrapper[4869]: I1001 15:22:25.032712 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6fcdb98747-4stnn" Oct 01 15:22:25 crc kubenswrapper[4869]: I1001 15:22:25.069716 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-6fcdb98747-4stnn"] Oct 01 15:22:25 crc kubenswrapper[4869]: I1001 15:22:25.229796 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/277a7a86-40f9-47c0-9e4c-ec4193086c16-ovsdbserver-sb\") pod \"dnsmasq-dns-6fcdb98747-4stnn\" (UID: \"277a7a86-40f9-47c0-9e4c-ec4193086c16\") " pod="openstack/dnsmasq-dns-6fcdb98747-4stnn" Oct 01 15:22:25 crc kubenswrapper[4869]: I1001 15:22:25.230034 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/277a7a86-40f9-47c0-9e4c-ec4193086c16-config\") pod \"dnsmasq-dns-6fcdb98747-4stnn\" (UID: \"277a7a86-40f9-47c0-9e4c-ec4193086c16\") " pod="openstack/dnsmasq-dns-6fcdb98747-4stnn" Oct 01 15:22:25 crc kubenswrapper[4869]: I1001 15:22:25.230180 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-96g85\" (UniqueName: \"kubernetes.io/projected/277a7a86-40f9-47c0-9e4c-ec4193086c16-kube-api-access-96g85\") pod \"dnsmasq-dns-6fcdb98747-4stnn\" (UID: \"277a7a86-40f9-47c0-9e4c-ec4193086c16\") " pod="openstack/dnsmasq-dns-6fcdb98747-4stnn" Oct 01 15:22:25 crc kubenswrapper[4869]: I1001 15:22:25.230355 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/277a7a86-40f9-47c0-9e4c-ec4193086c16-ovsdbserver-nb\") pod \"dnsmasq-dns-6fcdb98747-4stnn\" (UID: \"277a7a86-40f9-47c0-9e4c-ec4193086c16\") " pod="openstack/dnsmasq-dns-6fcdb98747-4stnn" Oct 01 15:22:25 crc kubenswrapper[4869]: I1001 15:22:25.230402 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/277a7a86-40f9-47c0-9e4c-ec4193086c16-dns-svc\") pod \"dnsmasq-dns-6fcdb98747-4stnn\" (UID: \"277a7a86-40f9-47c0-9e4c-ec4193086c16\") " pod="openstack/dnsmasq-dns-6fcdb98747-4stnn" Oct 01 15:22:25 crc kubenswrapper[4869]: I1001 15:22:25.331743 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/277a7a86-40f9-47c0-9e4c-ec4193086c16-config\") pod \"dnsmasq-dns-6fcdb98747-4stnn\" (UID: \"277a7a86-40f9-47c0-9e4c-ec4193086c16\") " pod="openstack/dnsmasq-dns-6fcdb98747-4stnn" Oct 01 15:22:25 crc kubenswrapper[4869]: I1001 15:22:25.331811 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-96g85\" (UniqueName: \"kubernetes.io/projected/277a7a86-40f9-47c0-9e4c-ec4193086c16-kube-api-access-96g85\") pod \"dnsmasq-dns-6fcdb98747-4stnn\" (UID: \"277a7a86-40f9-47c0-9e4c-ec4193086c16\") " pod="openstack/dnsmasq-dns-6fcdb98747-4stnn" Oct 01 15:22:25 crc kubenswrapper[4869]: I1001 15:22:25.331859 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/277a7a86-40f9-47c0-9e4c-ec4193086c16-ovsdbserver-nb\") pod \"dnsmasq-dns-6fcdb98747-4stnn\" (UID: \"277a7a86-40f9-47c0-9e4c-ec4193086c16\") " pod="openstack/dnsmasq-dns-6fcdb98747-4stnn" Oct 01 15:22:25 crc kubenswrapper[4869]: I1001 15:22:25.331883 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/277a7a86-40f9-47c0-9e4c-ec4193086c16-dns-svc\") pod \"dnsmasq-dns-6fcdb98747-4stnn\" (UID: \"277a7a86-40f9-47c0-9e4c-ec4193086c16\") " pod="openstack/dnsmasq-dns-6fcdb98747-4stnn" Oct 01 15:22:25 crc kubenswrapper[4869]: I1001 15:22:25.331918 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/277a7a86-40f9-47c0-9e4c-ec4193086c16-ovsdbserver-sb\") pod \"dnsmasq-dns-6fcdb98747-4stnn\" (UID: \"277a7a86-40f9-47c0-9e4c-ec4193086c16\") " pod="openstack/dnsmasq-dns-6fcdb98747-4stnn" Oct 01 15:22:25 crc kubenswrapper[4869]: I1001 15:22:25.333174 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/277a7a86-40f9-47c0-9e4c-ec4193086c16-ovsdbserver-sb\") pod \"dnsmasq-dns-6fcdb98747-4stnn\" (UID: \"277a7a86-40f9-47c0-9e4c-ec4193086c16\") " pod="openstack/dnsmasq-dns-6fcdb98747-4stnn" Oct 01 15:22:25 crc kubenswrapper[4869]: I1001 15:22:25.333730 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/277a7a86-40f9-47c0-9e4c-ec4193086c16-config\") pod \"dnsmasq-dns-6fcdb98747-4stnn\" (UID: \"277a7a86-40f9-47c0-9e4c-ec4193086c16\") " pod="openstack/dnsmasq-dns-6fcdb98747-4stnn" Oct 01 15:22:25 crc kubenswrapper[4869]: I1001 15:22:25.335055 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/277a7a86-40f9-47c0-9e4c-ec4193086c16-dns-svc\") pod \"dnsmasq-dns-6fcdb98747-4stnn\" (UID: \"277a7a86-40f9-47c0-9e4c-ec4193086c16\") " pod="openstack/dnsmasq-dns-6fcdb98747-4stnn" Oct 01 15:22:25 crc kubenswrapper[4869]: I1001 15:22:25.335726 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/277a7a86-40f9-47c0-9e4c-ec4193086c16-ovsdbserver-nb\") pod \"dnsmasq-dns-6fcdb98747-4stnn\" (UID: \"277a7a86-40f9-47c0-9e4c-ec4193086c16\") " pod="openstack/dnsmasq-dns-6fcdb98747-4stnn" Oct 01 15:22:25 crc kubenswrapper[4869]: I1001 15:22:25.359773 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-96g85\" (UniqueName: \"kubernetes.io/projected/277a7a86-40f9-47c0-9e4c-ec4193086c16-kube-api-access-96g85\") pod \"dnsmasq-dns-6fcdb98747-4stnn\" (UID: \"277a7a86-40f9-47c0-9e4c-ec4193086c16\") " pod="openstack/dnsmasq-dns-6fcdb98747-4stnn" Oct 01 15:22:25 crc kubenswrapper[4869]: I1001 15:22:25.381823 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6fcdb98747-4stnn" Oct 01 15:22:25 crc kubenswrapper[4869]: I1001 15:22:25.880013 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-6fcdb98747-4stnn"] Oct 01 15:22:25 crc kubenswrapper[4869]: W1001 15:22:25.883920 4869 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod277a7a86_40f9_47c0_9e4c_ec4193086c16.slice/crio-3142a136fbf6f4ab7d68df31aa779c0863f02faac64123d6b3237a05e009f513 WatchSource:0}: Error finding container 3142a136fbf6f4ab7d68df31aa779c0863f02faac64123d6b3237a05e009f513: Status 404 returned error can't find the container with id 3142a136fbf6f4ab7d68df31aa779c0863f02faac64123d6b3237a05e009f513 Oct 01 15:22:26 crc kubenswrapper[4869]: I1001 15:22:26.330694 4869 generic.go:334] "Generic (PLEG): container finished" podID="277a7a86-40f9-47c0-9e4c-ec4193086c16" containerID="f6938aa5d14f7cdf56fb6aef8d272222cf0f1d583b1e47fad583a837789368a3" exitCode=0 Oct 01 15:22:26 crc kubenswrapper[4869]: I1001 15:22:26.330897 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6fcdb98747-4stnn" event={"ID":"277a7a86-40f9-47c0-9e4c-ec4193086c16","Type":"ContainerDied","Data":"f6938aa5d14f7cdf56fb6aef8d272222cf0f1d583b1e47fad583a837789368a3"} Oct 01 15:22:26 crc kubenswrapper[4869]: I1001 15:22:26.331165 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6fcdb98747-4stnn" event={"ID":"277a7a86-40f9-47c0-9e4c-ec4193086c16","Type":"ContainerStarted","Data":"3142a136fbf6f4ab7d68df31aa779c0863f02faac64123d6b3237a05e009f513"} Oct 01 15:22:27 crc kubenswrapper[4869]: I1001 15:22:27.154769 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Oct 01 15:22:27 crc kubenswrapper[4869]: I1001 15:22:27.155599 4869 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="cd73cdc8-d865-4b29-842c-5f11fe20cf66" containerName="ceilometer-central-agent" containerID="cri-o://0477058267af225a9a2b9982eba75a9c4c755894f86330d92a8d2df0993cd5a4" gracePeriod=30 Oct 01 15:22:27 crc kubenswrapper[4869]: I1001 15:22:27.155726 4869 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="cd73cdc8-d865-4b29-842c-5f11fe20cf66" containerName="proxy-httpd" containerID="cri-o://bee10b89aa05bd1d960a5a8d7f9bad58571afbc6b572b634f917755f2ad3e8d8" gracePeriod=30 Oct 01 15:22:27 crc kubenswrapper[4869]: I1001 15:22:27.155761 4869 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="cd73cdc8-d865-4b29-842c-5f11fe20cf66" containerName="sg-core" containerID="cri-o://6bf79e6b6b2682c8c55566d6f3cc0a2335da3ffa9c4d8972fd89a7cdafbd5248" gracePeriod=30 Oct 01 15:22:27 crc kubenswrapper[4869]: I1001 15:22:27.155791 4869 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="cd73cdc8-d865-4b29-842c-5f11fe20cf66" containerName="ceilometer-notification-agent" containerID="cri-o://16a8485a46792e4bcdc916eaacd8b874f9b1ec5de4bdc2f078de96d7dbe1784b" gracePeriod=30 Oct 01 15:22:27 crc kubenswrapper[4869]: I1001 15:22:27.174940 4869 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/ceilometer-0" podUID="cd73cdc8-d865-4b29-842c-5f11fe20cf66" containerName="proxy-httpd" probeResult="failure" output="Get \"https://10.217.0.183:3000/\": read tcp 10.217.0.2:41972->10.217.0.183:3000: read: connection reset by peer" Oct 01 15:22:27 crc kubenswrapper[4869]: I1001 15:22:27.347700 4869 generic.go:334] "Generic (PLEG): container finished" podID="cd73cdc8-d865-4b29-842c-5f11fe20cf66" containerID="6bf79e6b6b2682c8c55566d6f3cc0a2335da3ffa9c4d8972fd89a7cdafbd5248" exitCode=2 Oct 01 15:22:27 crc kubenswrapper[4869]: I1001 15:22:27.347967 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"cd73cdc8-d865-4b29-842c-5f11fe20cf66","Type":"ContainerDied","Data":"6bf79e6b6b2682c8c55566d6f3cc0a2335da3ffa9c4d8972fd89a7cdafbd5248"} Oct 01 15:22:27 crc kubenswrapper[4869]: I1001 15:22:27.350607 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6fcdb98747-4stnn" event={"ID":"277a7a86-40f9-47c0-9e4c-ec4193086c16","Type":"ContainerStarted","Data":"1a3f91626d055dedf37348ce45e16bf1c2049c8fe2fac5e3992abc3ca716c955"} Oct 01 15:22:27 crc kubenswrapper[4869]: I1001 15:22:27.350665 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-6fcdb98747-4stnn" Oct 01 15:22:27 crc kubenswrapper[4869]: I1001 15:22:27.372416 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-6fcdb98747-4stnn" podStartSLOduration=3.372395302 podStartE2EDuration="3.372395302s" podCreationTimestamp="2025-10-01 15:22:24 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-01 15:22:27.369656443 +0000 UTC m=+1056.516499559" watchObservedRunningTime="2025-10-01 15:22:27.372395302 +0000 UTC m=+1056.519238438" Oct 01 15:22:27 crc kubenswrapper[4869]: I1001 15:22:27.500552 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Oct 01 15:22:27 crc kubenswrapper[4869]: I1001 15:22:27.500756 4869 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="811451db-4e74-4bfd-9916-d0036698f3f2" containerName="nova-api-log" containerID="cri-o://12e54c9d8996869e9ae82636fe4218199f1f6691a870e6af921e866f18da713b" gracePeriod=30 Oct 01 15:22:27 crc kubenswrapper[4869]: I1001 15:22:27.500884 4869 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="811451db-4e74-4bfd-9916-d0036698f3f2" containerName="nova-api-api" containerID="cri-o://7a16dd3de0b603e0345f570343ba44c58e96ebf2d9eb8e02fb3cba044bec2028" gracePeriod=30 Oct 01 15:22:28 crc kubenswrapper[4869]: I1001 15:22:28.036767 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell1-novncproxy-0" Oct 01 15:22:28 crc kubenswrapper[4869]: I1001 15:22:28.362516 4869 generic.go:334] "Generic (PLEG): container finished" podID="811451db-4e74-4bfd-9916-d0036698f3f2" containerID="12e54c9d8996869e9ae82636fe4218199f1f6691a870e6af921e866f18da713b" exitCode=143 Oct 01 15:22:28 crc kubenswrapper[4869]: I1001 15:22:28.362580 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"811451db-4e74-4bfd-9916-d0036698f3f2","Type":"ContainerDied","Data":"12e54c9d8996869e9ae82636fe4218199f1f6691a870e6af921e866f18da713b"} Oct 01 15:22:28 crc kubenswrapper[4869]: I1001 15:22:28.365896 4869 generic.go:334] "Generic (PLEG): container finished" podID="cd73cdc8-d865-4b29-842c-5f11fe20cf66" containerID="bee10b89aa05bd1d960a5a8d7f9bad58571afbc6b572b634f917755f2ad3e8d8" exitCode=0 Oct 01 15:22:28 crc kubenswrapper[4869]: I1001 15:22:28.365924 4869 generic.go:334] "Generic (PLEG): container finished" podID="cd73cdc8-d865-4b29-842c-5f11fe20cf66" containerID="0477058267af225a9a2b9982eba75a9c4c755894f86330d92a8d2df0993cd5a4" exitCode=0 Oct 01 15:22:28 crc kubenswrapper[4869]: I1001 15:22:28.365997 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"cd73cdc8-d865-4b29-842c-5f11fe20cf66","Type":"ContainerDied","Data":"bee10b89aa05bd1d960a5a8d7f9bad58571afbc6b572b634f917755f2ad3e8d8"} Oct 01 15:22:28 crc kubenswrapper[4869]: I1001 15:22:28.366072 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"cd73cdc8-d865-4b29-842c-5f11fe20cf66","Type":"ContainerDied","Data":"0477058267af225a9a2b9982eba75a9c4c755894f86330d92a8d2df0993cd5a4"} Oct 01 15:22:30 crc kubenswrapper[4869]: I1001 15:22:30.090685 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 01 15:22:30 crc kubenswrapper[4869]: I1001 15:22:30.218699 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/cd73cdc8-d865-4b29-842c-5f11fe20cf66-ceilometer-tls-certs\") pod \"cd73cdc8-d865-4b29-842c-5f11fe20cf66\" (UID: \"cd73cdc8-d865-4b29-842c-5f11fe20cf66\") " Oct 01 15:22:30 crc kubenswrapper[4869]: I1001 15:22:30.218805 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cd73cdc8-d865-4b29-842c-5f11fe20cf66-config-data\") pod \"cd73cdc8-d865-4b29-842c-5f11fe20cf66\" (UID: \"cd73cdc8-d865-4b29-842c-5f11fe20cf66\") " Oct 01 15:22:30 crc kubenswrapper[4869]: I1001 15:22:30.218842 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cd73cdc8-d865-4b29-842c-5f11fe20cf66-combined-ca-bundle\") pod \"cd73cdc8-d865-4b29-842c-5f11fe20cf66\" (UID: \"cd73cdc8-d865-4b29-842c-5f11fe20cf66\") " Oct 01 15:22:30 crc kubenswrapper[4869]: I1001 15:22:30.218896 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/cd73cdc8-d865-4b29-842c-5f11fe20cf66-sg-core-conf-yaml\") pod \"cd73cdc8-d865-4b29-842c-5f11fe20cf66\" (UID: \"cd73cdc8-d865-4b29-842c-5f11fe20cf66\") " Oct 01 15:22:30 crc kubenswrapper[4869]: I1001 15:22:30.218956 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/cd73cdc8-d865-4b29-842c-5f11fe20cf66-log-httpd\") pod \"cd73cdc8-d865-4b29-842c-5f11fe20cf66\" (UID: \"cd73cdc8-d865-4b29-842c-5f11fe20cf66\") " Oct 01 15:22:30 crc kubenswrapper[4869]: I1001 15:22:30.219006 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rq9fj\" (UniqueName: \"kubernetes.io/projected/cd73cdc8-d865-4b29-842c-5f11fe20cf66-kube-api-access-rq9fj\") pod \"cd73cdc8-d865-4b29-842c-5f11fe20cf66\" (UID: \"cd73cdc8-d865-4b29-842c-5f11fe20cf66\") " Oct 01 15:22:30 crc kubenswrapper[4869]: I1001 15:22:30.219039 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/cd73cdc8-d865-4b29-842c-5f11fe20cf66-run-httpd\") pod \"cd73cdc8-d865-4b29-842c-5f11fe20cf66\" (UID: \"cd73cdc8-d865-4b29-842c-5f11fe20cf66\") " Oct 01 15:22:30 crc kubenswrapper[4869]: I1001 15:22:30.219184 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/cd73cdc8-d865-4b29-842c-5f11fe20cf66-scripts\") pod \"cd73cdc8-d865-4b29-842c-5f11fe20cf66\" (UID: \"cd73cdc8-d865-4b29-842c-5f11fe20cf66\") " Oct 01 15:22:30 crc kubenswrapper[4869]: I1001 15:22:30.219680 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/cd73cdc8-d865-4b29-842c-5f11fe20cf66-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "cd73cdc8-d865-4b29-842c-5f11fe20cf66" (UID: "cd73cdc8-d865-4b29-842c-5f11fe20cf66"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 15:22:30 crc kubenswrapper[4869]: I1001 15:22:30.220843 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/cd73cdc8-d865-4b29-842c-5f11fe20cf66-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "cd73cdc8-d865-4b29-842c-5f11fe20cf66" (UID: "cd73cdc8-d865-4b29-842c-5f11fe20cf66"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 15:22:30 crc kubenswrapper[4869]: I1001 15:22:30.229872 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cd73cdc8-d865-4b29-842c-5f11fe20cf66-kube-api-access-rq9fj" (OuterVolumeSpecName: "kube-api-access-rq9fj") pod "cd73cdc8-d865-4b29-842c-5f11fe20cf66" (UID: "cd73cdc8-d865-4b29-842c-5f11fe20cf66"). InnerVolumeSpecName "kube-api-access-rq9fj". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 15:22:30 crc kubenswrapper[4869]: I1001 15:22:30.233421 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cd73cdc8-d865-4b29-842c-5f11fe20cf66-scripts" (OuterVolumeSpecName: "scripts") pod "cd73cdc8-d865-4b29-842c-5f11fe20cf66" (UID: "cd73cdc8-d865-4b29-842c-5f11fe20cf66"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 15:22:30 crc kubenswrapper[4869]: I1001 15:22:30.247624 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cd73cdc8-d865-4b29-842c-5f11fe20cf66-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "cd73cdc8-d865-4b29-842c-5f11fe20cf66" (UID: "cd73cdc8-d865-4b29-842c-5f11fe20cf66"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 15:22:30 crc kubenswrapper[4869]: I1001 15:22:30.270093 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cd73cdc8-d865-4b29-842c-5f11fe20cf66-ceilometer-tls-certs" (OuterVolumeSpecName: "ceilometer-tls-certs") pod "cd73cdc8-d865-4b29-842c-5f11fe20cf66" (UID: "cd73cdc8-d865-4b29-842c-5f11fe20cf66"). InnerVolumeSpecName "ceilometer-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 15:22:30 crc kubenswrapper[4869]: I1001 15:22:30.320959 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cd73cdc8-d865-4b29-842c-5f11fe20cf66-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "cd73cdc8-d865-4b29-842c-5f11fe20cf66" (UID: "cd73cdc8-d865-4b29-842c-5f11fe20cf66"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 15:22:30 crc kubenswrapper[4869]: I1001 15:22:30.321198 4869 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/cd73cdc8-d865-4b29-842c-5f11fe20cf66-log-httpd\") on node \"crc\" DevicePath \"\"" Oct 01 15:22:30 crc kubenswrapper[4869]: I1001 15:22:30.321234 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rq9fj\" (UniqueName: \"kubernetes.io/projected/cd73cdc8-d865-4b29-842c-5f11fe20cf66-kube-api-access-rq9fj\") on node \"crc\" DevicePath \"\"" Oct 01 15:22:30 crc kubenswrapper[4869]: I1001 15:22:30.321251 4869 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/cd73cdc8-d865-4b29-842c-5f11fe20cf66-run-httpd\") on node \"crc\" DevicePath \"\"" Oct 01 15:22:30 crc kubenswrapper[4869]: I1001 15:22:30.321299 4869 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/cd73cdc8-d865-4b29-842c-5f11fe20cf66-scripts\") on node \"crc\" DevicePath \"\"" Oct 01 15:22:30 crc kubenswrapper[4869]: I1001 15:22:30.321311 4869 reconciler_common.go:293] "Volume detached for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/cd73cdc8-d865-4b29-842c-5f11fe20cf66-ceilometer-tls-certs\") on node \"crc\" DevicePath \"\"" Oct 01 15:22:30 crc kubenswrapper[4869]: I1001 15:22:30.321323 4869 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cd73cdc8-d865-4b29-842c-5f11fe20cf66-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 01 15:22:30 crc kubenswrapper[4869]: I1001 15:22:30.321337 4869 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/cd73cdc8-d865-4b29-842c-5f11fe20cf66-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Oct 01 15:22:30 crc kubenswrapper[4869]: I1001 15:22:30.335026 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cd73cdc8-d865-4b29-842c-5f11fe20cf66-config-data" (OuterVolumeSpecName: "config-data") pod "cd73cdc8-d865-4b29-842c-5f11fe20cf66" (UID: "cd73cdc8-d865-4b29-842c-5f11fe20cf66"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 15:22:30 crc kubenswrapper[4869]: I1001 15:22:30.402653 4869 generic.go:334] "Generic (PLEG): container finished" podID="cd73cdc8-d865-4b29-842c-5f11fe20cf66" containerID="16a8485a46792e4bcdc916eaacd8b874f9b1ec5de4bdc2f078de96d7dbe1784b" exitCode=0 Oct 01 15:22:30 crc kubenswrapper[4869]: I1001 15:22:30.402713 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"cd73cdc8-d865-4b29-842c-5f11fe20cf66","Type":"ContainerDied","Data":"16a8485a46792e4bcdc916eaacd8b874f9b1ec5de4bdc2f078de96d7dbe1784b"} Oct 01 15:22:30 crc kubenswrapper[4869]: I1001 15:22:30.402719 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 01 15:22:30 crc kubenswrapper[4869]: I1001 15:22:30.402755 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"cd73cdc8-d865-4b29-842c-5f11fe20cf66","Type":"ContainerDied","Data":"c8792b36e93f76a928a64aa9d72b5ebbec7fca541e4d032c577bc20c068b4711"} Oct 01 15:22:30 crc kubenswrapper[4869]: I1001 15:22:30.402782 4869 scope.go:117] "RemoveContainer" containerID="bee10b89aa05bd1d960a5a8d7f9bad58571afbc6b572b634f917755f2ad3e8d8" Oct 01 15:22:30 crc kubenswrapper[4869]: I1001 15:22:30.423010 4869 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cd73cdc8-d865-4b29-842c-5f11fe20cf66-config-data\") on node \"crc\" DevicePath \"\"" Oct 01 15:22:30 crc kubenswrapper[4869]: I1001 15:22:30.424739 4869 scope.go:117] "RemoveContainer" containerID="6bf79e6b6b2682c8c55566d6f3cc0a2335da3ffa9c4d8972fd89a7cdafbd5248" Oct 01 15:22:30 crc kubenswrapper[4869]: I1001 15:22:30.446720 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Oct 01 15:22:30 crc kubenswrapper[4869]: I1001 15:22:30.456426 4869 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Oct 01 15:22:30 crc kubenswrapper[4869]: I1001 15:22:30.456797 4869 scope.go:117] "RemoveContainer" containerID="16a8485a46792e4bcdc916eaacd8b874f9b1ec5de4bdc2f078de96d7dbe1784b" Oct 01 15:22:30 crc kubenswrapper[4869]: I1001 15:22:30.468285 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Oct 01 15:22:30 crc kubenswrapper[4869]: E1001 15:22:30.468685 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cd73cdc8-d865-4b29-842c-5f11fe20cf66" containerName="ceilometer-notification-agent" Oct 01 15:22:30 crc kubenswrapper[4869]: I1001 15:22:30.468705 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="cd73cdc8-d865-4b29-842c-5f11fe20cf66" containerName="ceilometer-notification-agent" Oct 01 15:22:30 crc kubenswrapper[4869]: E1001 15:22:30.468722 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cd73cdc8-d865-4b29-842c-5f11fe20cf66" containerName="ceilometer-central-agent" Oct 01 15:22:30 crc kubenswrapper[4869]: I1001 15:22:30.468729 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="cd73cdc8-d865-4b29-842c-5f11fe20cf66" containerName="ceilometer-central-agent" Oct 01 15:22:30 crc kubenswrapper[4869]: E1001 15:22:30.468737 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cd73cdc8-d865-4b29-842c-5f11fe20cf66" containerName="proxy-httpd" Oct 01 15:22:30 crc kubenswrapper[4869]: I1001 15:22:30.468743 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="cd73cdc8-d865-4b29-842c-5f11fe20cf66" containerName="proxy-httpd" Oct 01 15:22:30 crc kubenswrapper[4869]: E1001 15:22:30.468773 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cd73cdc8-d865-4b29-842c-5f11fe20cf66" containerName="sg-core" Oct 01 15:22:30 crc kubenswrapper[4869]: I1001 15:22:30.468779 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="cd73cdc8-d865-4b29-842c-5f11fe20cf66" containerName="sg-core" Oct 01 15:22:30 crc kubenswrapper[4869]: I1001 15:22:30.473426 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="cd73cdc8-d865-4b29-842c-5f11fe20cf66" containerName="ceilometer-notification-agent" Oct 01 15:22:30 crc kubenswrapper[4869]: I1001 15:22:30.473477 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="cd73cdc8-d865-4b29-842c-5f11fe20cf66" containerName="sg-core" Oct 01 15:22:30 crc kubenswrapper[4869]: I1001 15:22:30.473502 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="cd73cdc8-d865-4b29-842c-5f11fe20cf66" containerName="ceilometer-central-agent" Oct 01 15:22:30 crc kubenswrapper[4869]: I1001 15:22:30.473536 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="cd73cdc8-d865-4b29-842c-5f11fe20cf66" containerName="proxy-httpd" Oct 01 15:22:30 crc kubenswrapper[4869]: I1001 15:22:30.475341 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 01 15:22:30 crc kubenswrapper[4869]: I1001 15:22:30.478281 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Oct 01 15:22:30 crc kubenswrapper[4869]: I1001 15:22:30.478468 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ceilometer-internal-svc" Oct 01 15:22:30 crc kubenswrapper[4869]: I1001 15:22:30.480322 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Oct 01 15:22:30 crc kubenswrapper[4869]: I1001 15:22:30.483608 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Oct 01 15:22:30 crc kubenswrapper[4869]: I1001 15:22:30.484653 4869 scope.go:117] "RemoveContainer" containerID="0477058267af225a9a2b9982eba75a9c4c755894f86330d92a8d2df0993cd5a4" Oct 01 15:22:30 crc kubenswrapper[4869]: I1001 15:22:30.515670 4869 scope.go:117] "RemoveContainer" containerID="bee10b89aa05bd1d960a5a8d7f9bad58571afbc6b572b634f917755f2ad3e8d8" Oct 01 15:22:30 crc kubenswrapper[4869]: E1001 15:22:30.516225 4869 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"bee10b89aa05bd1d960a5a8d7f9bad58571afbc6b572b634f917755f2ad3e8d8\": container with ID starting with bee10b89aa05bd1d960a5a8d7f9bad58571afbc6b572b634f917755f2ad3e8d8 not found: ID does not exist" containerID="bee10b89aa05bd1d960a5a8d7f9bad58571afbc6b572b634f917755f2ad3e8d8" Oct 01 15:22:30 crc kubenswrapper[4869]: I1001 15:22:30.516303 4869 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"bee10b89aa05bd1d960a5a8d7f9bad58571afbc6b572b634f917755f2ad3e8d8"} err="failed to get container status \"bee10b89aa05bd1d960a5a8d7f9bad58571afbc6b572b634f917755f2ad3e8d8\": rpc error: code = NotFound desc = could not find container \"bee10b89aa05bd1d960a5a8d7f9bad58571afbc6b572b634f917755f2ad3e8d8\": container with ID starting with bee10b89aa05bd1d960a5a8d7f9bad58571afbc6b572b634f917755f2ad3e8d8 not found: ID does not exist" Oct 01 15:22:30 crc kubenswrapper[4869]: I1001 15:22:30.516339 4869 scope.go:117] "RemoveContainer" containerID="6bf79e6b6b2682c8c55566d6f3cc0a2335da3ffa9c4d8972fd89a7cdafbd5248" Oct 01 15:22:30 crc kubenswrapper[4869]: E1001 15:22:30.516953 4869 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6bf79e6b6b2682c8c55566d6f3cc0a2335da3ffa9c4d8972fd89a7cdafbd5248\": container with ID starting with 6bf79e6b6b2682c8c55566d6f3cc0a2335da3ffa9c4d8972fd89a7cdafbd5248 not found: ID does not exist" containerID="6bf79e6b6b2682c8c55566d6f3cc0a2335da3ffa9c4d8972fd89a7cdafbd5248" Oct 01 15:22:30 crc kubenswrapper[4869]: I1001 15:22:30.517014 4869 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6bf79e6b6b2682c8c55566d6f3cc0a2335da3ffa9c4d8972fd89a7cdafbd5248"} err="failed to get container status \"6bf79e6b6b2682c8c55566d6f3cc0a2335da3ffa9c4d8972fd89a7cdafbd5248\": rpc error: code = NotFound desc = could not find container \"6bf79e6b6b2682c8c55566d6f3cc0a2335da3ffa9c4d8972fd89a7cdafbd5248\": container with ID starting with 6bf79e6b6b2682c8c55566d6f3cc0a2335da3ffa9c4d8972fd89a7cdafbd5248 not found: ID does not exist" Oct 01 15:22:30 crc kubenswrapper[4869]: I1001 15:22:30.517058 4869 scope.go:117] "RemoveContainer" containerID="16a8485a46792e4bcdc916eaacd8b874f9b1ec5de4bdc2f078de96d7dbe1784b" Oct 01 15:22:30 crc kubenswrapper[4869]: E1001 15:22:30.518084 4869 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"16a8485a46792e4bcdc916eaacd8b874f9b1ec5de4bdc2f078de96d7dbe1784b\": container with ID starting with 16a8485a46792e4bcdc916eaacd8b874f9b1ec5de4bdc2f078de96d7dbe1784b not found: ID does not exist" containerID="16a8485a46792e4bcdc916eaacd8b874f9b1ec5de4bdc2f078de96d7dbe1784b" Oct 01 15:22:30 crc kubenswrapper[4869]: I1001 15:22:30.518153 4869 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"16a8485a46792e4bcdc916eaacd8b874f9b1ec5de4bdc2f078de96d7dbe1784b"} err="failed to get container status \"16a8485a46792e4bcdc916eaacd8b874f9b1ec5de4bdc2f078de96d7dbe1784b\": rpc error: code = NotFound desc = could not find container \"16a8485a46792e4bcdc916eaacd8b874f9b1ec5de4bdc2f078de96d7dbe1784b\": container with ID starting with 16a8485a46792e4bcdc916eaacd8b874f9b1ec5de4bdc2f078de96d7dbe1784b not found: ID does not exist" Oct 01 15:22:30 crc kubenswrapper[4869]: I1001 15:22:30.518203 4869 scope.go:117] "RemoveContainer" containerID="0477058267af225a9a2b9982eba75a9c4c755894f86330d92a8d2df0993cd5a4" Oct 01 15:22:30 crc kubenswrapper[4869]: E1001 15:22:30.518736 4869 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0477058267af225a9a2b9982eba75a9c4c755894f86330d92a8d2df0993cd5a4\": container with ID starting with 0477058267af225a9a2b9982eba75a9c4c755894f86330d92a8d2df0993cd5a4 not found: ID does not exist" containerID="0477058267af225a9a2b9982eba75a9c4c755894f86330d92a8d2df0993cd5a4" Oct 01 15:22:30 crc kubenswrapper[4869]: I1001 15:22:30.518766 4869 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0477058267af225a9a2b9982eba75a9c4c755894f86330d92a8d2df0993cd5a4"} err="failed to get container status \"0477058267af225a9a2b9982eba75a9c4c755894f86330d92a8d2df0993cd5a4\": rpc error: code = NotFound desc = could not find container \"0477058267af225a9a2b9982eba75a9c4c755894f86330d92a8d2df0993cd5a4\": container with ID starting with 0477058267af225a9a2b9982eba75a9c4c755894f86330d92a8d2df0993cd5a4 not found: ID does not exist" Oct 01 15:22:30 crc kubenswrapper[4869]: I1001 15:22:30.524741 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/a85f4a7e-3c28-4c54-be1f-723b7bca17cb-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"a85f4a7e-3c28-4c54-be1f-723b7bca17cb\") " pod="openstack/ceilometer-0" Oct 01 15:22:30 crc kubenswrapper[4869]: I1001 15:22:30.524828 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a85f4a7e-3c28-4c54-be1f-723b7bca17cb-config-data\") pod \"ceilometer-0\" (UID: \"a85f4a7e-3c28-4c54-be1f-723b7bca17cb\") " pod="openstack/ceilometer-0" Oct 01 15:22:30 crc kubenswrapper[4869]: I1001 15:22:30.524956 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a85f4a7e-3c28-4c54-be1f-723b7bca17cb-log-httpd\") pod \"ceilometer-0\" (UID: \"a85f4a7e-3c28-4c54-be1f-723b7bca17cb\") " pod="openstack/ceilometer-0" Oct 01 15:22:30 crc kubenswrapper[4869]: I1001 15:22:30.525035 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a85f4a7e-3c28-4c54-be1f-723b7bca17cb-scripts\") pod \"ceilometer-0\" (UID: \"a85f4a7e-3c28-4c54-be1f-723b7bca17cb\") " pod="openstack/ceilometer-0" Oct 01 15:22:30 crc kubenswrapper[4869]: I1001 15:22:30.525083 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a85f4a7e-3c28-4c54-be1f-723b7bca17cb-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"a85f4a7e-3c28-4c54-be1f-723b7bca17cb\") " pod="openstack/ceilometer-0" Oct 01 15:22:30 crc kubenswrapper[4869]: I1001 15:22:30.525132 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/a85f4a7e-3c28-4c54-be1f-723b7bca17cb-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"a85f4a7e-3c28-4c54-be1f-723b7bca17cb\") " pod="openstack/ceilometer-0" Oct 01 15:22:30 crc kubenswrapper[4869]: I1001 15:22:30.525254 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a85f4a7e-3c28-4c54-be1f-723b7bca17cb-run-httpd\") pod \"ceilometer-0\" (UID: \"a85f4a7e-3c28-4c54-be1f-723b7bca17cb\") " pod="openstack/ceilometer-0" Oct 01 15:22:30 crc kubenswrapper[4869]: I1001 15:22:30.525392 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vzlsc\" (UniqueName: \"kubernetes.io/projected/a85f4a7e-3c28-4c54-be1f-723b7bca17cb-kube-api-access-vzlsc\") pod \"ceilometer-0\" (UID: \"a85f4a7e-3c28-4c54-be1f-723b7bca17cb\") " pod="openstack/ceilometer-0" Oct 01 15:22:30 crc kubenswrapper[4869]: I1001 15:22:30.626395 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vzlsc\" (UniqueName: \"kubernetes.io/projected/a85f4a7e-3c28-4c54-be1f-723b7bca17cb-kube-api-access-vzlsc\") pod \"ceilometer-0\" (UID: \"a85f4a7e-3c28-4c54-be1f-723b7bca17cb\") " pod="openstack/ceilometer-0" Oct 01 15:22:30 crc kubenswrapper[4869]: I1001 15:22:30.626475 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/a85f4a7e-3c28-4c54-be1f-723b7bca17cb-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"a85f4a7e-3c28-4c54-be1f-723b7bca17cb\") " pod="openstack/ceilometer-0" Oct 01 15:22:30 crc kubenswrapper[4869]: I1001 15:22:30.626559 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a85f4a7e-3c28-4c54-be1f-723b7bca17cb-config-data\") pod \"ceilometer-0\" (UID: \"a85f4a7e-3c28-4c54-be1f-723b7bca17cb\") " pod="openstack/ceilometer-0" Oct 01 15:22:30 crc kubenswrapper[4869]: I1001 15:22:30.626596 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a85f4a7e-3c28-4c54-be1f-723b7bca17cb-log-httpd\") pod \"ceilometer-0\" (UID: \"a85f4a7e-3c28-4c54-be1f-723b7bca17cb\") " pod="openstack/ceilometer-0" Oct 01 15:22:30 crc kubenswrapper[4869]: I1001 15:22:30.626616 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a85f4a7e-3c28-4c54-be1f-723b7bca17cb-scripts\") pod \"ceilometer-0\" (UID: \"a85f4a7e-3c28-4c54-be1f-723b7bca17cb\") " pod="openstack/ceilometer-0" Oct 01 15:22:30 crc kubenswrapper[4869]: I1001 15:22:30.626640 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a85f4a7e-3c28-4c54-be1f-723b7bca17cb-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"a85f4a7e-3c28-4c54-be1f-723b7bca17cb\") " pod="openstack/ceilometer-0" Oct 01 15:22:30 crc kubenswrapper[4869]: I1001 15:22:30.626658 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/a85f4a7e-3c28-4c54-be1f-723b7bca17cb-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"a85f4a7e-3c28-4c54-be1f-723b7bca17cb\") " pod="openstack/ceilometer-0" Oct 01 15:22:30 crc kubenswrapper[4869]: I1001 15:22:30.626698 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a85f4a7e-3c28-4c54-be1f-723b7bca17cb-run-httpd\") pod \"ceilometer-0\" (UID: \"a85f4a7e-3c28-4c54-be1f-723b7bca17cb\") " pod="openstack/ceilometer-0" Oct 01 15:22:30 crc kubenswrapper[4869]: I1001 15:22:30.627133 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a85f4a7e-3c28-4c54-be1f-723b7bca17cb-run-httpd\") pod \"ceilometer-0\" (UID: \"a85f4a7e-3c28-4c54-be1f-723b7bca17cb\") " pod="openstack/ceilometer-0" Oct 01 15:22:30 crc kubenswrapper[4869]: I1001 15:22:30.627700 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a85f4a7e-3c28-4c54-be1f-723b7bca17cb-log-httpd\") pod \"ceilometer-0\" (UID: \"a85f4a7e-3c28-4c54-be1f-723b7bca17cb\") " pod="openstack/ceilometer-0" Oct 01 15:22:30 crc kubenswrapper[4869]: I1001 15:22:30.630697 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/a85f4a7e-3c28-4c54-be1f-723b7bca17cb-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"a85f4a7e-3c28-4c54-be1f-723b7bca17cb\") " pod="openstack/ceilometer-0" Oct 01 15:22:30 crc kubenswrapper[4869]: I1001 15:22:30.631469 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/a85f4a7e-3c28-4c54-be1f-723b7bca17cb-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"a85f4a7e-3c28-4c54-be1f-723b7bca17cb\") " pod="openstack/ceilometer-0" Oct 01 15:22:30 crc kubenswrapper[4869]: I1001 15:22:30.631660 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a85f4a7e-3c28-4c54-be1f-723b7bca17cb-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"a85f4a7e-3c28-4c54-be1f-723b7bca17cb\") " pod="openstack/ceilometer-0" Oct 01 15:22:30 crc kubenswrapper[4869]: I1001 15:22:30.631798 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a85f4a7e-3c28-4c54-be1f-723b7bca17cb-config-data\") pod \"ceilometer-0\" (UID: \"a85f4a7e-3c28-4c54-be1f-723b7bca17cb\") " pod="openstack/ceilometer-0" Oct 01 15:22:30 crc kubenswrapper[4869]: I1001 15:22:30.632748 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a85f4a7e-3c28-4c54-be1f-723b7bca17cb-scripts\") pod \"ceilometer-0\" (UID: \"a85f4a7e-3c28-4c54-be1f-723b7bca17cb\") " pod="openstack/ceilometer-0" Oct 01 15:22:30 crc kubenswrapper[4869]: I1001 15:22:30.647011 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vzlsc\" (UniqueName: \"kubernetes.io/projected/a85f4a7e-3c28-4c54-be1f-723b7bca17cb-kube-api-access-vzlsc\") pod \"ceilometer-0\" (UID: \"a85f4a7e-3c28-4c54-be1f-723b7bca17cb\") " pod="openstack/ceilometer-0" Oct 01 15:22:30 crc kubenswrapper[4869]: I1001 15:22:30.798289 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 01 15:22:31 crc kubenswrapper[4869]: I1001 15:22:31.088630 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Oct 01 15:22:31 crc kubenswrapper[4869]: I1001 15:22:31.241910 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/811451db-4e74-4bfd-9916-d0036698f3f2-logs\") pod \"811451db-4e74-4bfd-9916-d0036698f3f2\" (UID: \"811451db-4e74-4bfd-9916-d0036698f3f2\") " Oct 01 15:22:31 crc kubenswrapper[4869]: I1001 15:22:31.242059 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/811451db-4e74-4bfd-9916-d0036698f3f2-config-data\") pod \"811451db-4e74-4bfd-9916-d0036698f3f2\" (UID: \"811451db-4e74-4bfd-9916-d0036698f3f2\") " Oct 01 15:22:31 crc kubenswrapper[4869]: I1001 15:22:31.242185 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/811451db-4e74-4bfd-9916-d0036698f3f2-combined-ca-bundle\") pod \"811451db-4e74-4bfd-9916-d0036698f3f2\" (UID: \"811451db-4e74-4bfd-9916-d0036698f3f2\") " Oct 01 15:22:31 crc kubenswrapper[4869]: I1001 15:22:31.242615 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/811451db-4e74-4bfd-9916-d0036698f3f2-logs" (OuterVolumeSpecName: "logs") pod "811451db-4e74-4bfd-9916-d0036698f3f2" (UID: "811451db-4e74-4bfd-9916-d0036698f3f2"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 15:22:31 crc kubenswrapper[4869]: I1001 15:22:31.242834 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hdpqb\" (UniqueName: \"kubernetes.io/projected/811451db-4e74-4bfd-9916-d0036698f3f2-kube-api-access-hdpqb\") pod \"811451db-4e74-4bfd-9916-d0036698f3f2\" (UID: \"811451db-4e74-4bfd-9916-d0036698f3f2\") " Oct 01 15:22:31 crc kubenswrapper[4869]: I1001 15:22:31.243222 4869 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/811451db-4e74-4bfd-9916-d0036698f3f2-logs\") on node \"crc\" DevicePath \"\"" Oct 01 15:22:31 crc kubenswrapper[4869]: I1001 15:22:31.249484 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/811451db-4e74-4bfd-9916-d0036698f3f2-kube-api-access-hdpqb" (OuterVolumeSpecName: "kube-api-access-hdpqb") pod "811451db-4e74-4bfd-9916-d0036698f3f2" (UID: "811451db-4e74-4bfd-9916-d0036698f3f2"). InnerVolumeSpecName "kube-api-access-hdpqb". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 15:22:31 crc kubenswrapper[4869]: I1001 15:22:31.272331 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/811451db-4e74-4bfd-9916-d0036698f3f2-config-data" (OuterVolumeSpecName: "config-data") pod "811451db-4e74-4bfd-9916-d0036698f3f2" (UID: "811451db-4e74-4bfd-9916-d0036698f3f2"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 15:22:31 crc kubenswrapper[4869]: I1001 15:22:31.279785 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/811451db-4e74-4bfd-9916-d0036698f3f2-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "811451db-4e74-4bfd-9916-d0036698f3f2" (UID: "811451db-4e74-4bfd-9916-d0036698f3f2"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 15:22:31 crc kubenswrapper[4869]: W1001 15:22:31.307406 4869 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poda85f4a7e_3c28_4c54_be1f_723b7bca17cb.slice/crio-f804d56478987f7113e177c4d908481bc49e8ef7b05f0e3daf28c8ae6cf91665 WatchSource:0}: Error finding container f804d56478987f7113e177c4d908481bc49e8ef7b05f0e3daf28c8ae6cf91665: Status 404 returned error can't find the container with id f804d56478987f7113e177c4d908481bc49e8ef7b05f0e3daf28c8ae6cf91665 Oct 01 15:22:31 crc kubenswrapper[4869]: I1001 15:22:31.307645 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Oct 01 15:22:31 crc kubenswrapper[4869]: I1001 15:22:31.345045 4869 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/811451db-4e74-4bfd-9916-d0036698f3f2-config-data\") on node \"crc\" DevicePath \"\"" Oct 01 15:22:31 crc kubenswrapper[4869]: I1001 15:22:31.345079 4869 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/811451db-4e74-4bfd-9916-d0036698f3f2-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 01 15:22:31 crc kubenswrapper[4869]: I1001 15:22:31.345091 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hdpqb\" (UniqueName: \"kubernetes.io/projected/811451db-4e74-4bfd-9916-d0036698f3f2-kube-api-access-hdpqb\") on node \"crc\" DevicePath \"\"" Oct 01 15:22:31 crc kubenswrapper[4869]: I1001 15:22:31.412957 4869 generic.go:334] "Generic (PLEG): container finished" podID="811451db-4e74-4bfd-9916-d0036698f3f2" containerID="7a16dd3de0b603e0345f570343ba44c58e96ebf2d9eb8e02fb3cba044bec2028" exitCode=0 Oct 01 15:22:31 crc kubenswrapper[4869]: I1001 15:22:31.413061 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Oct 01 15:22:31 crc kubenswrapper[4869]: I1001 15:22:31.413277 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"811451db-4e74-4bfd-9916-d0036698f3f2","Type":"ContainerDied","Data":"7a16dd3de0b603e0345f570343ba44c58e96ebf2d9eb8e02fb3cba044bec2028"} Oct 01 15:22:31 crc kubenswrapper[4869]: I1001 15:22:31.413326 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"811451db-4e74-4bfd-9916-d0036698f3f2","Type":"ContainerDied","Data":"6ce98187e4ddd2379fbbad88e1022b5061a9822b114e843a54d9029a91ef6d5a"} Oct 01 15:22:31 crc kubenswrapper[4869]: I1001 15:22:31.413345 4869 scope.go:117] "RemoveContainer" containerID="7a16dd3de0b603e0345f570343ba44c58e96ebf2d9eb8e02fb3cba044bec2028" Oct 01 15:22:31 crc kubenswrapper[4869]: I1001 15:22:31.416987 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"a85f4a7e-3c28-4c54-be1f-723b7bca17cb","Type":"ContainerStarted","Data":"f804d56478987f7113e177c4d908481bc49e8ef7b05f0e3daf28c8ae6cf91665"} Oct 01 15:22:31 crc kubenswrapper[4869]: I1001 15:22:31.446998 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Oct 01 15:22:31 crc kubenswrapper[4869]: I1001 15:22:31.456818 4869 scope.go:117] "RemoveContainer" containerID="12e54c9d8996869e9ae82636fe4218199f1f6691a870e6af921e866f18da713b" Oct 01 15:22:31 crc kubenswrapper[4869]: I1001 15:22:31.457505 4869 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-0"] Oct 01 15:22:31 crc kubenswrapper[4869]: I1001 15:22:31.465818 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-0"] Oct 01 15:22:31 crc kubenswrapper[4869]: E1001 15:22:31.466169 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="811451db-4e74-4bfd-9916-d0036698f3f2" containerName="nova-api-api" Oct 01 15:22:31 crc kubenswrapper[4869]: I1001 15:22:31.466185 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="811451db-4e74-4bfd-9916-d0036698f3f2" containerName="nova-api-api" Oct 01 15:22:31 crc kubenswrapper[4869]: E1001 15:22:31.466200 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="811451db-4e74-4bfd-9916-d0036698f3f2" containerName="nova-api-log" Oct 01 15:22:31 crc kubenswrapper[4869]: I1001 15:22:31.466206 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="811451db-4e74-4bfd-9916-d0036698f3f2" containerName="nova-api-log" Oct 01 15:22:31 crc kubenswrapper[4869]: I1001 15:22:31.466384 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="811451db-4e74-4bfd-9916-d0036698f3f2" containerName="nova-api-log" Oct 01 15:22:31 crc kubenswrapper[4869]: I1001 15:22:31.466414 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="811451db-4e74-4bfd-9916-d0036698f3f2" containerName="nova-api-api" Oct 01 15:22:31 crc kubenswrapper[4869]: I1001 15:22:31.470474 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Oct 01 15:22:31 crc kubenswrapper[4869]: I1001 15:22:31.472980 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-config-data" Oct 01 15:22:31 crc kubenswrapper[4869]: I1001 15:22:31.473588 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-public-svc" Oct 01 15:22:31 crc kubenswrapper[4869]: I1001 15:22:31.473828 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-internal-svc" Oct 01 15:22:31 crc kubenswrapper[4869]: I1001 15:22:31.477198 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Oct 01 15:22:31 crc kubenswrapper[4869]: I1001 15:22:31.486378 4869 scope.go:117] "RemoveContainer" containerID="7a16dd3de0b603e0345f570343ba44c58e96ebf2d9eb8e02fb3cba044bec2028" Oct 01 15:22:31 crc kubenswrapper[4869]: E1001 15:22:31.486779 4869 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7a16dd3de0b603e0345f570343ba44c58e96ebf2d9eb8e02fb3cba044bec2028\": container with ID starting with 7a16dd3de0b603e0345f570343ba44c58e96ebf2d9eb8e02fb3cba044bec2028 not found: ID does not exist" containerID="7a16dd3de0b603e0345f570343ba44c58e96ebf2d9eb8e02fb3cba044bec2028" Oct 01 15:22:31 crc kubenswrapper[4869]: I1001 15:22:31.486813 4869 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7a16dd3de0b603e0345f570343ba44c58e96ebf2d9eb8e02fb3cba044bec2028"} err="failed to get container status \"7a16dd3de0b603e0345f570343ba44c58e96ebf2d9eb8e02fb3cba044bec2028\": rpc error: code = NotFound desc = could not find container \"7a16dd3de0b603e0345f570343ba44c58e96ebf2d9eb8e02fb3cba044bec2028\": container with ID starting with 7a16dd3de0b603e0345f570343ba44c58e96ebf2d9eb8e02fb3cba044bec2028 not found: ID does not exist" Oct 01 15:22:31 crc kubenswrapper[4869]: I1001 15:22:31.486834 4869 scope.go:117] "RemoveContainer" containerID="12e54c9d8996869e9ae82636fe4218199f1f6691a870e6af921e866f18da713b" Oct 01 15:22:31 crc kubenswrapper[4869]: E1001 15:22:31.487069 4869 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"12e54c9d8996869e9ae82636fe4218199f1f6691a870e6af921e866f18da713b\": container with ID starting with 12e54c9d8996869e9ae82636fe4218199f1f6691a870e6af921e866f18da713b not found: ID does not exist" containerID="12e54c9d8996869e9ae82636fe4218199f1f6691a870e6af921e866f18da713b" Oct 01 15:22:31 crc kubenswrapper[4869]: I1001 15:22:31.487100 4869 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"12e54c9d8996869e9ae82636fe4218199f1f6691a870e6af921e866f18da713b"} err="failed to get container status \"12e54c9d8996869e9ae82636fe4218199f1f6691a870e6af921e866f18da713b\": rpc error: code = NotFound desc = could not find container \"12e54c9d8996869e9ae82636fe4218199f1f6691a870e6af921e866f18da713b\": container with ID starting with 12e54c9d8996869e9ae82636fe4218199f1f6691a870e6af921e866f18da713b not found: ID does not exist" Oct 01 15:22:31 crc kubenswrapper[4869]: I1001 15:22:31.593459 4869 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="811451db-4e74-4bfd-9916-d0036698f3f2" path="/var/lib/kubelet/pods/811451db-4e74-4bfd-9916-d0036698f3f2/volumes" Oct 01 15:22:31 crc kubenswrapper[4869]: I1001 15:22:31.594308 4869 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="cd73cdc8-d865-4b29-842c-5f11fe20cf66" path="/var/lib/kubelet/pods/cd73cdc8-d865-4b29-842c-5f11fe20cf66/volumes" Oct 01 15:22:31 crc kubenswrapper[4869]: I1001 15:22:31.649012 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e191eafa-7a88-4bf2-baef-434b83941630-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"e191eafa-7a88-4bf2-baef-434b83941630\") " pod="openstack/nova-api-0" Oct 01 15:22:31 crc kubenswrapper[4869]: I1001 15:22:31.649117 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e191eafa-7a88-4bf2-baef-434b83941630-logs\") pod \"nova-api-0\" (UID: \"e191eafa-7a88-4bf2-baef-434b83941630\") " pod="openstack/nova-api-0" Oct 01 15:22:31 crc kubenswrapper[4869]: I1001 15:22:31.649193 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e191eafa-7a88-4bf2-baef-434b83941630-config-data\") pod \"nova-api-0\" (UID: \"e191eafa-7a88-4bf2-baef-434b83941630\") " pod="openstack/nova-api-0" Oct 01 15:22:31 crc kubenswrapper[4869]: I1001 15:22:31.649294 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/e191eafa-7a88-4bf2-baef-434b83941630-public-tls-certs\") pod \"nova-api-0\" (UID: \"e191eafa-7a88-4bf2-baef-434b83941630\") " pod="openstack/nova-api-0" Oct 01 15:22:31 crc kubenswrapper[4869]: I1001 15:22:31.649361 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/e191eafa-7a88-4bf2-baef-434b83941630-internal-tls-certs\") pod \"nova-api-0\" (UID: \"e191eafa-7a88-4bf2-baef-434b83941630\") " pod="openstack/nova-api-0" Oct 01 15:22:31 crc kubenswrapper[4869]: I1001 15:22:31.649429 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qs97n\" (UniqueName: \"kubernetes.io/projected/e191eafa-7a88-4bf2-baef-434b83941630-kube-api-access-qs97n\") pod \"nova-api-0\" (UID: \"e191eafa-7a88-4bf2-baef-434b83941630\") " pod="openstack/nova-api-0" Oct 01 15:22:31 crc kubenswrapper[4869]: I1001 15:22:31.750717 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/e191eafa-7a88-4bf2-baef-434b83941630-public-tls-certs\") pod \"nova-api-0\" (UID: \"e191eafa-7a88-4bf2-baef-434b83941630\") " pod="openstack/nova-api-0" Oct 01 15:22:31 crc kubenswrapper[4869]: I1001 15:22:31.750798 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/e191eafa-7a88-4bf2-baef-434b83941630-internal-tls-certs\") pod \"nova-api-0\" (UID: \"e191eafa-7a88-4bf2-baef-434b83941630\") " pod="openstack/nova-api-0" Oct 01 15:22:31 crc kubenswrapper[4869]: I1001 15:22:31.750865 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qs97n\" (UniqueName: \"kubernetes.io/projected/e191eafa-7a88-4bf2-baef-434b83941630-kube-api-access-qs97n\") pod \"nova-api-0\" (UID: \"e191eafa-7a88-4bf2-baef-434b83941630\") " pod="openstack/nova-api-0" Oct 01 15:22:31 crc kubenswrapper[4869]: I1001 15:22:31.750907 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e191eafa-7a88-4bf2-baef-434b83941630-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"e191eafa-7a88-4bf2-baef-434b83941630\") " pod="openstack/nova-api-0" Oct 01 15:22:31 crc kubenswrapper[4869]: I1001 15:22:31.750935 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e191eafa-7a88-4bf2-baef-434b83941630-logs\") pod \"nova-api-0\" (UID: \"e191eafa-7a88-4bf2-baef-434b83941630\") " pod="openstack/nova-api-0" Oct 01 15:22:31 crc kubenswrapper[4869]: I1001 15:22:31.751000 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e191eafa-7a88-4bf2-baef-434b83941630-config-data\") pod \"nova-api-0\" (UID: \"e191eafa-7a88-4bf2-baef-434b83941630\") " pod="openstack/nova-api-0" Oct 01 15:22:31 crc kubenswrapper[4869]: I1001 15:22:31.751522 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e191eafa-7a88-4bf2-baef-434b83941630-logs\") pod \"nova-api-0\" (UID: \"e191eafa-7a88-4bf2-baef-434b83941630\") " pod="openstack/nova-api-0" Oct 01 15:22:31 crc kubenswrapper[4869]: I1001 15:22:31.754589 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e191eafa-7a88-4bf2-baef-434b83941630-config-data\") pod \"nova-api-0\" (UID: \"e191eafa-7a88-4bf2-baef-434b83941630\") " pod="openstack/nova-api-0" Oct 01 15:22:31 crc kubenswrapper[4869]: I1001 15:22:31.755049 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/e191eafa-7a88-4bf2-baef-434b83941630-public-tls-certs\") pod \"nova-api-0\" (UID: \"e191eafa-7a88-4bf2-baef-434b83941630\") " pod="openstack/nova-api-0" Oct 01 15:22:31 crc kubenswrapper[4869]: I1001 15:22:31.761677 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e191eafa-7a88-4bf2-baef-434b83941630-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"e191eafa-7a88-4bf2-baef-434b83941630\") " pod="openstack/nova-api-0" Oct 01 15:22:31 crc kubenswrapper[4869]: I1001 15:22:31.761705 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/e191eafa-7a88-4bf2-baef-434b83941630-internal-tls-certs\") pod \"nova-api-0\" (UID: \"e191eafa-7a88-4bf2-baef-434b83941630\") " pod="openstack/nova-api-0" Oct 01 15:22:31 crc kubenswrapper[4869]: I1001 15:22:31.771050 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qs97n\" (UniqueName: \"kubernetes.io/projected/e191eafa-7a88-4bf2-baef-434b83941630-kube-api-access-qs97n\") pod \"nova-api-0\" (UID: \"e191eafa-7a88-4bf2-baef-434b83941630\") " pod="openstack/nova-api-0" Oct 01 15:22:31 crc kubenswrapper[4869]: I1001 15:22:31.792177 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Oct 01 15:22:32 crc kubenswrapper[4869]: I1001 15:22:32.332106 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Oct 01 15:22:32 crc kubenswrapper[4869]: W1001 15:22:32.339399 4869 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pode191eafa_7a88_4bf2_baef_434b83941630.slice/crio-fc038caf65210914cc80790844d0a8d8f7ad1404c6c261967939d96b226ff8fd WatchSource:0}: Error finding container fc038caf65210914cc80790844d0a8d8f7ad1404c6c261967939d96b226ff8fd: Status 404 returned error can't find the container with id fc038caf65210914cc80790844d0a8d8f7ad1404c6c261967939d96b226ff8fd Oct 01 15:22:32 crc kubenswrapper[4869]: I1001 15:22:32.429989 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"e191eafa-7a88-4bf2-baef-434b83941630","Type":"ContainerStarted","Data":"fc038caf65210914cc80790844d0a8d8f7ad1404c6c261967939d96b226ff8fd"} Oct 01 15:22:32 crc kubenswrapper[4869]: I1001 15:22:32.433246 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"a85f4a7e-3c28-4c54-be1f-723b7bca17cb","Type":"ContainerStarted","Data":"5d7f6bb569c2d509ac156b44fe77860c43340f6cebae00646167e172b3843863"} Oct 01 15:22:33 crc kubenswrapper[4869]: I1001 15:22:33.037095 4869 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-cell1-novncproxy-0" Oct 01 15:22:33 crc kubenswrapper[4869]: I1001 15:22:33.056078 4869 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-cell1-novncproxy-0" Oct 01 15:22:33 crc kubenswrapper[4869]: I1001 15:22:33.451080 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"e191eafa-7a88-4bf2-baef-434b83941630","Type":"ContainerStarted","Data":"229a38763592d665ca81e0e486b8dbb49692b888965e126f831d4c9037f071c8"} Oct 01 15:22:33 crc kubenswrapper[4869]: I1001 15:22:33.451411 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"e191eafa-7a88-4bf2-baef-434b83941630","Type":"ContainerStarted","Data":"f5a29ec0736db0777190c3e98539cd8e8611a9cf68a8070d7d7d3c794c974a64"} Oct 01 15:22:33 crc kubenswrapper[4869]: I1001 15:22:33.458056 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"a85f4a7e-3c28-4c54-be1f-723b7bca17cb","Type":"ContainerStarted","Data":"7e608337fd0018c6ebc75695e00d468c4bd848f5712c6466fcb75bbe530c7950"} Oct 01 15:22:33 crc kubenswrapper[4869]: I1001 15:22:33.479836 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-0" podStartSLOduration=2.479811984 podStartE2EDuration="2.479811984s" podCreationTimestamp="2025-10-01 15:22:31 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-01 15:22:33.471346711 +0000 UTC m=+1062.618189837" watchObservedRunningTime="2025-10-01 15:22:33.479811984 +0000 UTC m=+1062.626655120" Oct 01 15:22:33 crc kubenswrapper[4869]: I1001 15:22:33.494504 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-cell1-novncproxy-0" Oct 01 15:22:33 crc kubenswrapper[4869]: I1001 15:22:33.643802 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-cell-mapping-zhzd5"] Oct 01 15:22:33 crc kubenswrapper[4869]: I1001 15:22:33.660813 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-cell-mapping-zhzd5"] Oct 01 15:22:33 crc kubenswrapper[4869]: I1001 15:22:33.660910 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-cell-mapping-zhzd5" Oct 01 15:22:33 crc kubenswrapper[4869]: I1001 15:22:33.663409 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-manage-config-data" Oct 01 15:22:33 crc kubenswrapper[4869]: I1001 15:22:33.665865 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-manage-scripts" Oct 01 15:22:33 crc kubenswrapper[4869]: I1001 15:22:33.789110 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/03242a06-2186-4f5d-9f1b-9b11db33e397-combined-ca-bundle\") pod \"nova-cell1-cell-mapping-zhzd5\" (UID: \"03242a06-2186-4f5d-9f1b-9b11db33e397\") " pod="openstack/nova-cell1-cell-mapping-zhzd5" Oct 01 15:22:33 crc kubenswrapper[4869]: I1001 15:22:33.789416 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/03242a06-2186-4f5d-9f1b-9b11db33e397-config-data\") pod \"nova-cell1-cell-mapping-zhzd5\" (UID: \"03242a06-2186-4f5d-9f1b-9b11db33e397\") " pod="openstack/nova-cell1-cell-mapping-zhzd5" Oct 01 15:22:33 crc kubenswrapper[4869]: I1001 15:22:33.789442 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/03242a06-2186-4f5d-9f1b-9b11db33e397-scripts\") pod \"nova-cell1-cell-mapping-zhzd5\" (UID: \"03242a06-2186-4f5d-9f1b-9b11db33e397\") " pod="openstack/nova-cell1-cell-mapping-zhzd5" Oct 01 15:22:33 crc kubenswrapper[4869]: I1001 15:22:33.789471 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4h6v7\" (UniqueName: \"kubernetes.io/projected/03242a06-2186-4f5d-9f1b-9b11db33e397-kube-api-access-4h6v7\") pod \"nova-cell1-cell-mapping-zhzd5\" (UID: \"03242a06-2186-4f5d-9f1b-9b11db33e397\") " pod="openstack/nova-cell1-cell-mapping-zhzd5" Oct 01 15:22:33 crc kubenswrapper[4869]: I1001 15:22:33.891023 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/03242a06-2186-4f5d-9f1b-9b11db33e397-combined-ca-bundle\") pod \"nova-cell1-cell-mapping-zhzd5\" (UID: \"03242a06-2186-4f5d-9f1b-9b11db33e397\") " pod="openstack/nova-cell1-cell-mapping-zhzd5" Oct 01 15:22:33 crc kubenswrapper[4869]: I1001 15:22:33.891338 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/03242a06-2186-4f5d-9f1b-9b11db33e397-config-data\") pod \"nova-cell1-cell-mapping-zhzd5\" (UID: \"03242a06-2186-4f5d-9f1b-9b11db33e397\") " pod="openstack/nova-cell1-cell-mapping-zhzd5" Oct 01 15:22:33 crc kubenswrapper[4869]: I1001 15:22:33.891438 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/03242a06-2186-4f5d-9f1b-9b11db33e397-scripts\") pod \"nova-cell1-cell-mapping-zhzd5\" (UID: \"03242a06-2186-4f5d-9f1b-9b11db33e397\") " pod="openstack/nova-cell1-cell-mapping-zhzd5" Oct 01 15:22:33 crc kubenswrapper[4869]: I1001 15:22:33.891531 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4h6v7\" (UniqueName: \"kubernetes.io/projected/03242a06-2186-4f5d-9f1b-9b11db33e397-kube-api-access-4h6v7\") pod \"nova-cell1-cell-mapping-zhzd5\" (UID: \"03242a06-2186-4f5d-9f1b-9b11db33e397\") " pod="openstack/nova-cell1-cell-mapping-zhzd5" Oct 01 15:22:33 crc kubenswrapper[4869]: I1001 15:22:33.895433 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/03242a06-2186-4f5d-9f1b-9b11db33e397-scripts\") pod \"nova-cell1-cell-mapping-zhzd5\" (UID: \"03242a06-2186-4f5d-9f1b-9b11db33e397\") " pod="openstack/nova-cell1-cell-mapping-zhzd5" Oct 01 15:22:33 crc kubenswrapper[4869]: I1001 15:22:33.895891 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/03242a06-2186-4f5d-9f1b-9b11db33e397-combined-ca-bundle\") pod \"nova-cell1-cell-mapping-zhzd5\" (UID: \"03242a06-2186-4f5d-9f1b-9b11db33e397\") " pod="openstack/nova-cell1-cell-mapping-zhzd5" Oct 01 15:22:33 crc kubenswrapper[4869]: I1001 15:22:33.902606 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/03242a06-2186-4f5d-9f1b-9b11db33e397-config-data\") pod \"nova-cell1-cell-mapping-zhzd5\" (UID: \"03242a06-2186-4f5d-9f1b-9b11db33e397\") " pod="openstack/nova-cell1-cell-mapping-zhzd5" Oct 01 15:22:33 crc kubenswrapper[4869]: I1001 15:22:33.910073 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4h6v7\" (UniqueName: \"kubernetes.io/projected/03242a06-2186-4f5d-9f1b-9b11db33e397-kube-api-access-4h6v7\") pod \"nova-cell1-cell-mapping-zhzd5\" (UID: \"03242a06-2186-4f5d-9f1b-9b11db33e397\") " pod="openstack/nova-cell1-cell-mapping-zhzd5" Oct 01 15:22:33 crc kubenswrapper[4869]: I1001 15:22:33.977734 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-cell-mapping-zhzd5" Oct 01 15:22:34 crc kubenswrapper[4869]: I1001 15:22:34.458532 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-cell-mapping-zhzd5"] Oct 01 15:22:34 crc kubenswrapper[4869]: I1001 15:22:34.476355 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"a85f4a7e-3c28-4c54-be1f-723b7bca17cb","Type":"ContainerStarted","Data":"27c896f79ef9992f021af92cb85e0b552a056402feee8e6d6d999b744c9f0674"} Oct 01 15:22:35 crc kubenswrapper[4869]: I1001 15:22:35.383418 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-6fcdb98747-4stnn" Oct 01 15:22:35 crc kubenswrapper[4869]: I1001 15:22:35.445497 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-7c57d6658c-9jnss"] Oct 01 15:22:35 crc kubenswrapper[4869]: I1001 15:22:35.445796 4869 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-7c57d6658c-9jnss" podUID="54903b9a-2e49-43f1-9989-ff8d13276fe7" containerName="dnsmasq-dns" containerID="cri-o://7768ef1facd2bcbde779e78384d383b9b7ae89a72a4b666417b8d9b57ac19d7b" gracePeriod=10 Oct 01 15:22:35 crc kubenswrapper[4869]: I1001 15:22:35.488938 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-cell-mapping-zhzd5" event={"ID":"03242a06-2186-4f5d-9f1b-9b11db33e397","Type":"ContainerStarted","Data":"63bdd2cb5281851b9793c3d9cdf1618ecff3b8f2d442a1b0795322788144dd0a"} Oct 01 15:22:35 crc kubenswrapper[4869]: I1001 15:22:35.488982 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-cell-mapping-zhzd5" event={"ID":"03242a06-2186-4f5d-9f1b-9b11db33e397","Type":"ContainerStarted","Data":"3e6c72ffc2891c72f9efb0b53ed88e46c85ecb69ffcaf28a3f849e5f7b9a70b3"} Oct 01 15:22:35 crc kubenswrapper[4869]: I1001 15:22:35.492921 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"a85f4a7e-3c28-4c54-be1f-723b7bca17cb","Type":"ContainerStarted","Data":"50af1c3ce7191283bfc6bc3fe08fa17509298223aa1e97b01c5d0f266373f2b5"} Oct 01 15:22:35 crc kubenswrapper[4869]: I1001 15:22:35.493184 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Oct 01 15:22:35 crc kubenswrapper[4869]: I1001 15:22:35.525718 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-cell-mapping-zhzd5" podStartSLOduration=2.525693609 podStartE2EDuration="2.525693609s" podCreationTimestamp="2025-10-01 15:22:33 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-01 15:22:35.508317781 +0000 UTC m=+1064.655160897" watchObservedRunningTime="2025-10-01 15:22:35.525693609 +0000 UTC m=+1064.672536725" Oct 01 15:22:35 crc kubenswrapper[4869]: I1001 15:22:35.544683 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=1.814278458 podStartE2EDuration="5.544664408s" podCreationTimestamp="2025-10-01 15:22:30 +0000 UTC" firstStartedPulling="2025-10-01 15:22:31.310789575 +0000 UTC m=+1060.457632691" lastFinishedPulling="2025-10-01 15:22:35.041175525 +0000 UTC m=+1064.188018641" observedRunningTime="2025-10-01 15:22:35.529769762 +0000 UTC m=+1064.676612898" watchObservedRunningTime="2025-10-01 15:22:35.544664408 +0000 UTC m=+1064.691507524" Oct 01 15:22:36 crc kubenswrapper[4869]: I1001 15:22:36.064074 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7c57d6658c-9jnss" Oct 01 15:22:36 crc kubenswrapper[4869]: I1001 15:22:36.250997 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/54903b9a-2e49-43f1-9989-ff8d13276fe7-ovsdbserver-sb\") pod \"54903b9a-2e49-43f1-9989-ff8d13276fe7\" (UID: \"54903b9a-2e49-43f1-9989-ff8d13276fe7\") " Oct 01 15:22:36 crc kubenswrapper[4869]: I1001 15:22:36.251315 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/54903b9a-2e49-43f1-9989-ff8d13276fe7-dns-svc\") pod \"54903b9a-2e49-43f1-9989-ff8d13276fe7\" (UID: \"54903b9a-2e49-43f1-9989-ff8d13276fe7\") " Oct 01 15:22:36 crc kubenswrapper[4869]: I1001 15:22:36.251410 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mdbj4\" (UniqueName: \"kubernetes.io/projected/54903b9a-2e49-43f1-9989-ff8d13276fe7-kube-api-access-mdbj4\") pod \"54903b9a-2e49-43f1-9989-ff8d13276fe7\" (UID: \"54903b9a-2e49-43f1-9989-ff8d13276fe7\") " Oct 01 15:22:36 crc kubenswrapper[4869]: I1001 15:22:36.251505 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/54903b9a-2e49-43f1-9989-ff8d13276fe7-config\") pod \"54903b9a-2e49-43f1-9989-ff8d13276fe7\" (UID: \"54903b9a-2e49-43f1-9989-ff8d13276fe7\") " Oct 01 15:22:36 crc kubenswrapper[4869]: I1001 15:22:36.251646 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/54903b9a-2e49-43f1-9989-ff8d13276fe7-ovsdbserver-nb\") pod \"54903b9a-2e49-43f1-9989-ff8d13276fe7\" (UID: \"54903b9a-2e49-43f1-9989-ff8d13276fe7\") " Oct 01 15:22:36 crc kubenswrapper[4869]: I1001 15:22:36.259741 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/54903b9a-2e49-43f1-9989-ff8d13276fe7-kube-api-access-mdbj4" (OuterVolumeSpecName: "kube-api-access-mdbj4") pod "54903b9a-2e49-43f1-9989-ff8d13276fe7" (UID: "54903b9a-2e49-43f1-9989-ff8d13276fe7"). InnerVolumeSpecName "kube-api-access-mdbj4". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 15:22:36 crc kubenswrapper[4869]: I1001 15:22:36.301928 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/54903b9a-2e49-43f1-9989-ff8d13276fe7-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "54903b9a-2e49-43f1-9989-ff8d13276fe7" (UID: "54903b9a-2e49-43f1-9989-ff8d13276fe7"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 15:22:36 crc kubenswrapper[4869]: I1001 15:22:36.308022 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/54903b9a-2e49-43f1-9989-ff8d13276fe7-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "54903b9a-2e49-43f1-9989-ff8d13276fe7" (UID: "54903b9a-2e49-43f1-9989-ff8d13276fe7"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 15:22:36 crc kubenswrapper[4869]: I1001 15:22:36.317020 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/54903b9a-2e49-43f1-9989-ff8d13276fe7-config" (OuterVolumeSpecName: "config") pod "54903b9a-2e49-43f1-9989-ff8d13276fe7" (UID: "54903b9a-2e49-43f1-9989-ff8d13276fe7"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 15:22:36 crc kubenswrapper[4869]: I1001 15:22:36.332719 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/54903b9a-2e49-43f1-9989-ff8d13276fe7-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "54903b9a-2e49-43f1-9989-ff8d13276fe7" (UID: "54903b9a-2e49-43f1-9989-ff8d13276fe7"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 15:22:36 crc kubenswrapper[4869]: I1001 15:22:36.353913 4869 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/54903b9a-2e49-43f1-9989-ff8d13276fe7-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Oct 01 15:22:36 crc kubenswrapper[4869]: I1001 15:22:36.353947 4869 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/54903b9a-2e49-43f1-9989-ff8d13276fe7-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Oct 01 15:22:36 crc kubenswrapper[4869]: I1001 15:22:36.353957 4869 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/54903b9a-2e49-43f1-9989-ff8d13276fe7-dns-svc\") on node \"crc\" DevicePath \"\"" Oct 01 15:22:36 crc kubenswrapper[4869]: I1001 15:22:36.353968 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mdbj4\" (UniqueName: \"kubernetes.io/projected/54903b9a-2e49-43f1-9989-ff8d13276fe7-kube-api-access-mdbj4\") on node \"crc\" DevicePath \"\"" Oct 01 15:22:36 crc kubenswrapper[4869]: I1001 15:22:36.353979 4869 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/54903b9a-2e49-43f1-9989-ff8d13276fe7-config\") on node \"crc\" DevicePath \"\"" Oct 01 15:22:36 crc kubenswrapper[4869]: I1001 15:22:36.503192 4869 generic.go:334] "Generic (PLEG): container finished" podID="54903b9a-2e49-43f1-9989-ff8d13276fe7" containerID="7768ef1facd2bcbde779e78384d383b9b7ae89a72a4b666417b8d9b57ac19d7b" exitCode=0 Oct 01 15:22:36 crc kubenswrapper[4869]: I1001 15:22:36.504840 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7c57d6658c-9jnss" Oct 01 15:22:36 crc kubenswrapper[4869]: I1001 15:22:36.508300 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7c57d6658c-9jnss" event={"ID":"54903b9a-2e49-43f1-9989-ff8d13276fe7","Type":"ContainerDied","Data":"7768ef1facd2bcbde779e78384d383b9b7ae89a72a4b666417b8d9b57ac19d7b"} Oct 01 15:22:36 crc kubenswrapper[4869]: I1001 15:22:36.508332 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7c57d6658c-9jnss" event={"ID":"54903b9a-2e49-43f1-9989-ff8d13276fe7","Type":"ContainerDied","Data":"667cae2c7a132c2dd606b2ad7f038ddc4bbbcfc3cb8fc8e3cfebf758dedcb473"} Oct 01 15:22:36 crc kubenswrapper[4869]: I1001 15:22:36.508347 4869 scope.go:117] "RemoveContainer" containerID="7768ef1facd2bcbde779e78384d383b9b7ae89a72a4b666417b8d9b57ac19d7b" Oct 01 15:22:36 crc kubenswrapper[4869]: I1001 15:22:36.542029 4869 scope.go:117] "RemoveContainer" containerID="f812380e5b3951d377839bd2e1d6958419ac33f083a8c9278239abf409f5ee74" Oct 01 15:22:36 crc kubenswrapper[4869]: I1001 15:22:36.544762 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-7c57d6658c-9jnss"] Oct 01 15:22:36 crc kubenswrapper[4869]: I1001 15:22:36.552903 4869 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-7c57d6658c-9jnss"] Oct 01 15:22:36 crc kubenswrapper[4869]: I1001 15:22:36.564126 4869 scope.go:117] "RemoveContainer" containerID="7768ef1facd2bcbde779e78384d383b9b7ae89a72a4b666417b8d9b57ac19d7b" Oct 01 15:22:36 crc kubenswrapper[4869]: E1001 15:22:36.564603 4869 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7768ef1facd2bcbde779e78384d383b9b7ae89a72a4b666417b8d9b57ac19d7b\": container with ID starting with 7768ef1facd2bcbde779e78384d383b9b7ae89a72a4b666417b8d9b57ac19d7b not found: ID does not exist" containerID="7768ef1facd2bcbde779e78384d383b9b7ae89a72a4b666417b8d9b57ac19d7b" Oct 01 15:22:36 crc kubenswrapper[4869]: I1001 15:22:36.564631 4869 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7768ef1facd2bcbde779e78384d383b9b7ae89a72a4b666417b8d9b57ac19d7b"} err="failed to get container status \"7768ef1facd2bcbde779e78384d383b9b7ae89a72a4b666417b8d9b57ac19d7b\": rpc error: code = NotFound desc = could not find container \"7768ef1facd2bcbde779e78384d383b9b7ae89a72a4b666417b8d9b57ac19d7b\": container with ID starting with 7768ef1facd2bcbde779e78384d383b9b7ae89a72a4b666417b8d9b57ac19d7b not found: ID does not exist" Oct 01 15:22:36 crc kubenswrapper[4869]: I1001 15:22:36.564652 4869 scope.go:117] "RemoveContainer" containerID="f812380e5b3951d377839bd2e1d6958419ac33f083a8c9278239abf409f5ee74" Oct 01 15:22:36 crc kubenswrapper[4869]: E1001 15:22:36.565002 4869 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f812380e5b3951d377839bd2e1d6958419ac33f083a8c9278239abf409f5ee74\": container with ID starting with f812380e5b3951d377839bd2e1d6958419ac33f083a8c9278239abf409f5ee74 not found: ID does not exist" containerID="f812380e5b3951d377839bd2e1d6958419ac33f083a8c9278239abf409f5ee74" Oct 01 15:22:36 crc kubenswrapper[4869]: I1001 15:22:36.565024 4869 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f812380e5b3951d377839bd2e1d6958419ac33f083a8c9278239abf409f5ee74"} err="failed to get container status \"f812380e5b3951d377839bd2e1d6958419ac33f083a8c9278239abf409f5ee74\": rpc error: code = NotFound desc = could not find container \"f812380e5b3951d377839bd2e1d6958419ac33f083a8c9278239abf409f5ee74\": container with ID starting with f812380e5b3951d377839bd2e1d6958419ac33f083a8c9278239abf409f5ee74 not found: ID does not exist" Oct 01 15:22:37 crc kubenswrapper[4869]: I1001 15:22:37.596849 4869 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="54903b9a-2e49-43f1-9989-ff8d13276fe7" path="/var/lib/kubelet/pods/54903b9a-2e49-43f1-9989-ff8d13276fe7/volumes" Oct 01 15:22:39 crc kubenswrapper[4869]: I1001 15:22:39.548040 4869 generic.go:334] "Generic (PLEG): container finished" podID="03242a06-2186-4f5d-9f1b-9b11db33e397" containerID="63bdd2cb5281851b9793c3d9cdf1618ecff3b8f2d442a1b0795322788144dd0a" exitCode=0 Oct 01 15:22:39 crc kubenswrapper[4869]: I1001 15:22:39.548337 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-cell-mapping-zhzd5" event={"ID":"03242a06-2186-4f5d-9f1b-9b11db33e397","Type":"ContainerDied","Data":"63bdd2cb5281851b9793c3d9cdf1618ecff3b8f2d442a1b0795322788144dd0a"} Oct 01 15:22:40 crc kubenswrapper[4869]: I1001 15:22:40.957015 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-cell-mapping-zhzd5" Oct 01 15:22:41 crc kubenswrapper[4869]: I1001 15:22:41.046903 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/03242a06-2186-4f5d-9f1b-9b11db33e397-scripts\") pod \"03242a06-2186-4f5d-9f1b-9b11db33e397\" (UID: \"03242a06-2186-4f5d-9f1b-9b11db33e397\") " Oct 01 15:22:41 crc kubenswrapper[4869]: I1001 15:22:41.046980 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/03242a06-2186-4f5d-9f1b-9b11db33e397-combined-ca-bundle\") pod \"03242a06-2186-4f5d-9f1b-9b11db33e397\" (UID: \"03242a06-2186-4f5d-9f1b-9b11db33e397\") " Oct 01 15:22:41 crc kubenswrapper[4869]: I1001 15:22:41.047152 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4h6v7\" (UniqueName: \"kubernetes.io/projected/03242a06-2186-4f5d-9f1b-9b11db33e397-kube-api-access-4h6v7\") pod \"03242a06-2186-4f5d-9f1b-9b11db33e397\" (UID: \"03242a06-2186-4f5d-9f1b-9b11db33e397\") " Oct 01 15:22:41 crc kubenswrapper[4869]: I1001 15:22:41.047235 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/03242a06-2186-4f5d-9f1b-9b11db33e397-config-data\") pod \"03242a06-2186-4f5d-9f1b-9b11db33e397\" (UID: \"03242a06-2186-4f5d-9f1b-9b11db33e397\") " Oct 01 15:22:41 crc kubenswrapper[4869]: I1001 15:22:41.052299 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/03242a06-2186-4f5d-9f1b-9b11db33e397-scripts" (OuterVolumeSpecName: "scripts") pod "03242a06-2186-4f5d-9f1b-9b11db33e397" (UID: "03242a06-2186-4f5d-9f1b-9b11db33e397"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 15:22:41 crc kubenswrapper[4869]: I1001 15:22:41.052315 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/03242a06-2186-4f5d-9f1b-9b11db33e397-kube-api-access-4h6v7" (OuterVolumeSpecName: "kube-api-access-4h6v7") pod "03242a06-2186-4f5d-9f1b-9b11db33e397" (UID: "03242a06-2186-4f5d-9f1b-9b11db33e397"). InnerVolumeSpecName "kube-api-access-4h6v7". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 15:22:41 crc kubenswrapper[4869]: I1001 15:22:41.071952 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/03242a06-2186-4f5d-9f1b-9b11db33e397-config-data" (OuterVolumeSpecName: "config-data") pod "03242a06-2186-4f5d-9f1b-9b11db33e397" (UID: "03242a06-2186-4f5d-9f1b-9b11db33e397"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 15:22:41 crc kubenswrapper[4869]: I1001 15:22:41.084824 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/03242a06-2186-4f5d-9f1b-9b11db33e397-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "03242a06-2186-4f5d-9f1b-9b11db33e397" (UID: "03242a06-2186-4f5d-9f1b-9b11db33e397"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 15:22:41 crc kubenswrapper[4869]: I1001 15:22:41.149644 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4h6v7\" (UniqueName: \"kubernetes.io/projected/03242a06-2186-4f5d-9f1b-9b11db33e397-kube-api-access-4h6v7\") on node \"crc\" DevicePath \"\"" Oct 01 15:22:41 crc kubenswrapper[4869]: I1001 15:22:41.149681 4869 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/03242a06-2186-4f5d-9f1b-9b11db33e397-config-data\") on node \"crc\" DevicePath \"\"" Oct 01 15:22:41 crc kubenswrapper[4869]: I1001 15:22:41.149693 4869 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/03242a06-2186-4f5d-9f1b-9b11db33e397-scripts\") on node \"crc\" DevicePath \"\"" Oct 01 15:22:41 crc kubenswrapper[4869]: I1001 15:22:41.149706 4869 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/03242a06-2186-4f5d-9f1b-9b11db33e397-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 01 15:22:41 crc kubenswrapper[4869]: I1001 15:22:41.571935 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-cell-mapping-zhzd5" event={"ID":"03242a06-2186-4f5d-9f1b-9b11db33e397","Type":"ContainerDied","Data":"3e6c72ffc2891c72f9efb0b53ed88e46c85ecb69ffcaf28a3f849e5f7b9a70b3"} Oct 01 15:22:41 crc kubenswrapper[4869]: I1001 15:22:41.571993 4869 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="3e6c72ffc2891c72f9efb0b53ed88e46c85ecb69ffcaf28a3f849e5f7b9a70b3" Oct 01 15:22:41 crc kubenswrapper[4869]: I1001 15:22:41.572452 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-cell-mapping-zhzd5" Oct 01 15:22:41 crc kubenswrapper[4869]: I1001 15:22:41.772773 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Oct 01 15:22:41 crc kubenswrapper[4869]: I1001 15:22:41.773501 4869 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="e191eafa-7a88-4bf2-baef-434b83941630" containerName="nova-api-log" containerID="cri-o://f5a29ec0736db0777190c3e98539cd8e8611a9cf68a8070d7d7d3c794c974a64" gracePeriod=30 Oct 01 15:22:41 crc kubenswrapper[4869]: I1001 15:22:41.773586 4869 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="e191eafa-7a88-4bf2-baef-434b83941630" containerName="nova-api-api" containerID="cri-o://229a38763592d665ca81e0e486b8dbb49692b888965e126f831d4c9037f071c8" gracePeriod=30 Oct 01 15:22:41 crc kubenswrapper[4869]: I1001 15:22:41.780897 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Oct 01 15:22:41 crc kubenswrapper[4869]: I1001 15:22:41.781150 4869 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-scheduler-0" podUID="0c777f8b-6eec-48f7-97a6-137936bfb76c" containerName="nova-scheduler-scheduler" containerID="cri-o://80234bd86f4f625f7e0c5d77c5f34b4e2bb1be316c4d95d9b98c256b6f845c54" gracePeriod=30 Oct 01 15:22:41 crc kubenswrapper[4869]: I1001 15:22:41.800531 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Oct 01 15:22:41 crc kubenswrapper[4869]: I1001 15:22:41.801048 4869 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="38137ffc-97e2-4517-ac14-42bfc87df875" containerName="nova-metadata-log" containerID="cri-o://1000a8aaedaaf024cc396a8c5f36dd0099d653e970e541963855eab7a6db8ecf" gracePeriod=30 Oct 01 15:22:41 crc kubenswrapper[4869]: I1001 15:22:41.801423 4869 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="38137ffc-97e2-4517-ac14-42bfc87df875" containerName="nova-metadata-metadata" containerID="cri-o://6c1a4147750e3cb22d93d1cdad0d418617d251192c68dcfcc455e4556ee1e139" gracePeriod=30 Oct 01 15:22:42 crc kubenswrapper[4869]: I1001 15:22:42.477060 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Oct 01 15:22:42 crc kubenswrapper[4869]: I1001 15:22:42.576713 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e191eafa-7a88-4bf2-baef-434b83941630-combined-ca-bundle\") pod \"e191eafa-7a88-4bf2-baef-434b83941630\" (UID: \"e191eafa-7a88-4bf2-baef-434b83941630\") " Oct 01 15:22:42 crc kubenswrapper[4869]: I1001 15:22:42.576777 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/e191eafa-7a88-4bf2-baef-434b83941630-internal-tls-certs\") pod \"e191eafa-7a88-4bf2-baef-434b83941630\" (UID: \"e191eafa-7a88-4bf2-baef-434b83941630\") " Oct 01 15:22:42 crc kubenswrapper[4869]: I1001 15:22:42.576863 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/e191eafa-7a88-4bf2-baef-434b83941630-public-tls-certs\") pod \"e191eafa-7a88-4bf2-baef-434b83941630\" (UID: \"e191eafa-7a88-4bf2-baef-434b83941630\") " Oct 01 15:22:42 crc kubenswrapper[4869]: I1001 15:22:42.576919 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e191eafa-7a88-4bf2-baef-434b83941630-config-data\") pod \"e191eafa-7a88-4bf2-baef-434b83941630\" (UID: \"e191eafa-7a88-4bf2-baef-434b83941630\") " Oct 01 15:22:42 crc kubenswrapper[4869]: I1001 15:22:42.576980 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e191eafa-7a88-4bf2-baef-434b83941630-logs\") pod \"e191eafa-7a88-4bf2-baef-434b83941630\" (UID: \"e191eafa-7a88-4bf2-baef-434b83941630\") " Oct 01 15:22:42 crc kubenswrapper[4869]: I1001 15:22:42.577019 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qs97n\" (UniqueName: \"kubernetes.io/projected/e191eafa-7a88-4bf2-baef-434b83941630-kube-api-access-qs97n\") pod \"e191eafa-7a88-4bf2-baef-434b83941630\" (UID: \"e191eafa-7a88-4bf2-baef-434b83941630\") " Oct 01 15:22:42 crc kubenswrapper[4869]: I1001 15:22:42.579111 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e191eafa-7a88-4bf2-baef-434b83941630-logs" (OuterVolumeSpecName: "logs") pod "e191eafa-7a88-4bf2-baef-434b83941630" (UID: "e191eafa-7a88-4bf2-baef-434b83941630"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 15:22:42 crc kubenswrapper[4869]: I1001 15:22:42.582554 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e191eafa-7a88-4bf2-baef-434b83941630-kube-api-access-qs97n" (OuterVolumeSpecName: "kube-api-access-qs97n") pod "e191eafa-7a88-4bf2-baef-434b83941630" (UID: "e191eafa-7a88-4bf2-baef-434b83941630"). InnerVolumeSpecName "kube-api-access-qs97n". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 15:22:42 crc kubenswrapper[4869]: I1001 15:22:42.603811 4869 generic.go:334] "Generic (PLEG): container finished" podID="e191eafa-7a88-4bf2-baef-434b83941630" containerID="229a38763592d665ca81e0e486b8dbb49692b888965e126f831d4c9037f071c8" exitCode=0 Oct 01 15:22:42 crc kubenswrapper[4869]: I1001 15:22:42.603856 4869 generic.go:334] "Generic (PLEG): container finished" podID="e191eafa-7a88-4bf2-baef-434b83941630" containerID="f5a29ec0736db0777190c3e98539cd8e8611a9cf68a8070d7d7d3c794c974a64" exitCode=143 Oct 01 15:22:42 crc kubenswrapper[4869]: I1001 15:22:42.603907 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Oct 01 15:22:42 crc kubenswrapper[4869]: I1001 15:22:42.603940 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"e191eafa-7a88-4bf2-baef-434b83941630","Type":"ContainerDied","Data":"229a38763592d665ca81e0e486b8dbb49692b888965e126f831d4c9037f071c8"} Oct 01 15:22:42 crc kubenswrapper[4869]: I1001 15:22:42.603967 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"e191eafa-7a88-4bf2-baef-434b83941630","Type":"ContainerDied","Data":"f5a29ec0736db0777190c3e98539cd8e8611a9cf68a8070d7d7d3c794c974a64"} Oct 01 15:22:42 crc kubenswrapper[4869]: I1001 15:22:42.603978 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"e191eafa-7a88-4bf2-baef-434b83941630","Type":"ContainerDied","Data":"fc038caf65210914cc80790844d0a8d8f7ad1404c6c261967939d96b226ff8fd"} Oct 01 15:22:42 crc kubenswrapper[4869]: I1001 15:22:42.603994 4869 scope.go:117] "RemoveContainer" containerID="229a38763592d665ca81e0e486b8dbb49692b888965e126f831d4c9037f071c8" Oct 01 15:22:42 crc kubenswrapper[4869]: I1001 15:22:42.610240 4869 generic.go:334] "Generic (PLEG): container finished" podID="38137ffc-97e2-4517-ac14-42bfc87df875" containerID="1000a8aaedaaf024cc396a8c5f36dd0099d653e970e541963855eab7a6db8ecf" exitCode=143 Oct 01 15:22:42 crc kubenswrapper[4869]: I1001 15:22:42.610326 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"38137ffc-97e2-4517-ac14-42bfc87df875","Type":"ContainerDied","Data":"1000a8aaedaaf024cc396a8c5f36dd0099d653e970e541963855eab7a6db8ecf"} Oct 01 15:22:42 crc kubenswrapper[4869]: I1001 15:22:42.623723 4869 generic.go:334] "Generic (PLEG): container finished" podID="0c777f8b-6eec-48f7-97a6-137936bfb76c" containerID="80234bd86f4f625f7e0c5d77c5f34b4e2bb1be316c4d95d9b98c256b6f845c54" exitCode=0 Oct 01 15:22:42 crc kubenswrapper[4869]: I1001 15:22:42.623779 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"0c777f8b-6eec-48f7-97a6-137936bfb76c","Type":"ContainerDied","Data":"80234bd86f4f625f7e0c5d77c5f34b4e2bb1be316c4d95d9b98c256b6f845c54"} Oct 01 15:22:42 crc kubenswrapper[4869]: I1001 15:22:42.643720 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e191eafa-7a88-4bf2-baef-434b83941630-config-data" (OuterVolumeSpecName: "config-data") pod "e191eafa-7a88-4bf2-baef-434b83941630" (UID: "e191eafa-7a88-4bf2-baef-434b83941630"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 15:22:42 crc kubenswrapper[4869]: I1001 15:22:42.653421 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e191eafa-7a88-4bf2-baef-434b83941630-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "e191eafa-7a88-4bf2-baef-434b83941630" (UID: "e191eafa-7a88-4bf2-baef-434b83941630"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 15:22:42 crc kubenswrapper[4869]: I1001 15:22:42.658587 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e191eafa-7a88-4bf2-baef-434b83941630-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "e191eafa-7a88-4bf2-baef-434b83941630" (UID: "e191eafa-7a88-4bf2-baef-434b83941630"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 15:22:42 crc kubenswrapper[4869]: I1001 15:22:42.668318 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e191eafa-7a88-4bf2-baef-434b83941630-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "e191eafa-7a88-4bf2-baef-434b83941630" (UID: "e191eafa-7a88-4bf2-baef-434b83941630"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 15:22:42 crc kubenswrapper[4869]: I1001 15:22:42.675787 4869 scope.go:117] "RemoveContainer" containerID="f5a29ec0736db0777190c3e98539cd8e8611a9cf68a8070d7d7d3c794c974a64" Oct 01 15:22:42 crc kubenswrapper[4869]: I1001 15:22:42.679175 4869 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/e191eafa-7a88-4bf2-baef-434b83941630-public-tls-certs\") on node \"crc\" DevicePath \"\"" Oct 01 15:22:42 crc kubenswrapper[4869]: I1001 15:22:42.679212 4869 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e191eafa-7a88-4bf2-baef-434b83941630-config-data\") on node \"crc\" DevicePath \"\"" Oct 01 15:22:42 crc kubenswrapper[4869]: I1001 15:22:42.679227 4869 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e191eafa-7a88-4bf2-baef-434b83941630-logs\") on node \"crc\" DevicePath \"\"" Oct 01 15:22:42 crc kubenswrapper[4869]: I1001 15:22:42.679240 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qs97n\" (UniqueName: \"kubernetes.io/projected/e191eafa-7a88-4bf2-baef-434b83941630-kube-api-access-qs97n\") on node \"crc\" DevicePath \"\"" Oct 01 15:22:42 crc kubenswrapper[4869]: I1001 15:22:42.679256 4869 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e191eafa-7a88-4bf2-baef-434b83941630-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 01 15:22:42 crc kubenswrapper[4869]: I1001 15:22:42.679363 4869 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/e191eafa-7a88-4bf2-baef-434b83941630-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Oct 01 15:22:42 crc kubenswrapper[4869]: I1001 15:22:42.700607 4869 scope.go:117] "RemoveContainer" containerID="229a38763592d665ca81e0e486b8dbb49692b888965e126f831d4c9037f071c8" Oct 01 15:22:42 crc kubenswrapper[4869]: E1001 15:22:42.701141 4869 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"229a38763592d665ca81e0e486b8dbb49692b888965e126f831d4c9037f071c8\": container with ID starting with 229a38763592d665ca81e0e486b8dbb49692b888965e126f831d4c9037f071c8 not found: ID does not exist" containerID="229a38763592d665ca81e0e486b8dbb49692b888965e126f831d4c9037f071c8" Oct 01 15:22:42 crc kubenswrapper[4869]: I1001 15:22:42.701177 4869 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"229a38763592d665ca81e0e486b8dbb49692b888965e126f831d4c9037f071c8"} err="failed to get container status \"229a38763592d665ca81e0e486b8dbb49692b888965e126f831d4c9037f071c8\": rpc error: code = NotFound desc = could not find container \"229a38763592d665ca81e0e486b8dbb49692b888965e126f831d4c9037f071c8\": container with ID starting with 229a38763592d665ca81e0e486b8dbb49692b888965e126f831d4c9037f071c8 not found: ID does not exist" Oct 01 15:22:42 crc kubenswrapper[4869]: I1001 15:22:42.701230 4869 scope.go:117] "RemoveContainer" containerID="f5a29ec0736db0777190c3e98539cd8e8611a9cf68a8070d7d7d3c794c974a64" Oct 01 15:22:42 crc kubenswrapper[4869]: E1001 15:22:42.701729 4869 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f5a29ec0736db0777190c3e98539cd8e8611a9cf68a8070d7d7d3c794c974a64\": container with ID starting with f5a29ec0736db0777190c3e98539cd8e8611a9cf68a8070d7d7d3c794c974a64 not found: ID does not exist" containerID="f5a29ec0736db0777190c3e98539cd8e8611a9cf68a8070d7d7d3c794c974a64" Oct 01 15:22:42 crc kubenswrapper[4869]: I1001 15:22:42.701785 4869 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f5a29ec0736db0777190c3e98539cd8e8611a9cf68a8070d7d7d3c794c974a64"} err="failed to get container status \"f5a29ec0736db0777190c3e98539cd8e8611a9cf68a8070d7d7d3c794c974a64\": rpc error: code = NotFound desc = could not find container \"f5a29ec0736db0777190c3e98539cd8e8611a9cf68a8070d7d7d3c794c974a64\": container with ID starting with f5a29ec0736db0777190c3e98539cd8e8611a9cf68a8070d7d7d3c794c974a64 not found: ID does not exist" Oct 01 15:22:42 crc kubenswrapper[4869]: I1001 15:22:42.701821 4869 scope.go:117] "RemoveContainer" containerID="229a38763592d665ca81e0e486b8dbb49692b888965e126f831d4c9037f071c8" Oct 01 15:22:42 crc kubenswrapper[4869]: I1001 15:22:42.702097 4869 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"229a38763592d665ca81e0e486b8dbb49692b888965e126f831d4c9037f071c8"} err="failed to get container status \"229a38763592d665ca81e0e486b8dbb49692b888965e126f831d4c9037f071c8\": rpc error: code = NotFound desc = could not find container \"229a38763592d665ca81e0e486b8dbb49692b888965e126f831d4c9037f071c8\": container with ID starting with 229a38763592d665ca81e0e486b8dbb49692b888965e126f831d4c9037f071c8 not found: ID does not exist" Oct 01 15:22:42 crc kubenswrapper[4869]: I1001 15:22:42.702120 4869 scope.go:117] "RemoveContainer" containerID="f5a29ec0736db0777190c3e98539cd8e8611a9cf68a8070d7d7d3c794c974a64" Oct 01 15:22:42 crc kubenswrapper[4869]: I1001 15:22:42.702418 4869 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f5a29ec0736db0777190c3e98539cd8e8611a9cf68a8070d7d7d3c794c974a64"} err="failed to get container status \"f5a29ec0736db0777190c3e98539cd8e8611a9cf68a8070d7d7d3c794c974a64\": rpc error: code = NotFound desc = could not find container \"f5a29ec0736db0777190c3e98539cd8e8611a9cf68a8070d7d7d3c794c974a64\": container with ID starting with f5a29ec0736db0777190c3e98539cd8e8611a9cf68a8070d7d7d3c794c974a64 not found: ID does not exist" Oct 01 15:22:42 crc kubenswrapper[4869]: I1001 15:22:42.712146 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Oct 01 15:22:42 crc kubenswrapper[4869]: I1001 15:22:42.881665 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pmnmq\" (UniqueName: \"kubernetes.io/projected/0c777f8b-6eec-48f7-97a6-137936bfb76c-kube-api-access-pmnmq\") pod \"0c777f8b-6eec-48f7-97a6-137936bfb76c\" (UID: \"0c777f8b-6eec-48f7-97a6-137936bfb76c\") " Oct 01 15:22:42 crc kubenswrapper[4869]: I1001 15:22:42.881729 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0c777f8b-6eec-48f7-97a6-137936bfb76c-config-data\") pod \"0c777f8b-6eec-48f7-97a6-137936bfb76c\" (UID: \"0c777f8b-6eec-48f7-97a6-137936bfb76c\") " Oct 01 15:22:42 crc kubenswrapper[4869]: I1001 15:22:42.881762 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0c777f8b-6eec-48f7-97a6-137936bfb76c-combined-ca-bundle\") pod \"0c777f8b-6eec-48f7-97a6-137936bfb76c\" (UID: \"0c777f8b-6eec-48f7-97a6-137936bfb76c\") " Oct 01 15:22:42 crc kubenswrapper[4869]: I1001 15:22:42.884788 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0c777f8b-6eec-48f7-97a6-137936bfb76c-kube-api-access-pmnmq" (OuterVolumeSpecName: "kube-api-access-pmnmq") pod "0c777f8b-6eec-48f7-97a6-137936bfb76c" (UID: "0c777f8b-6eec-48f7-97a6-137936bfb76c"). InnerVolumeSpecName "kube-api-access-pmnmq". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 15:22:42 crc kubenswrapper[4869]: I1001 15:22:42.908205 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0c777f8b-6eec-48f7-97a6-137936bfb76c-config-data" (OuterVolumeSpecName: "config-data") pod "0c777f8b-6eec-48f7-97a6-137936bfb76c" (UID: "0c777f8b-6eec-48f7-97a6-137936bfb76c"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 15:22:42 crc kubenswrapper[4869]: I1001 15:22:42.918908 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0c777f8b-6eec-48f7-97a6-137936bfb76c-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "0c777f8b-6eec-48f7-97a6-137936bfb76c" (UID: "0c777f8b-6eec-48f7-97a6-137936bfb76c"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 15:22:42 crc kubenswrapper[4869]: I1001 15:22:42.984614 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pmnmq\" (UniqueName: \"kubernetes.io/projected/0c777f8b-6eec-48f7-97a6-137936bfb76c-kube-api-access-pmnmq\") on node \"crc\" DevicePath \"\"" Oct 01 15:22:42 crc kubenswrapper[4869]: I1001 15:22:42.984644 4869 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0c777f8b-6eec-48f7-97a6-137936bfb76c-config-data\") on node \"crc\" DevicePath \"\"" Oct 01 15:22:42 crc kubenswrapper[4869]: I1001 15:22:42.984656 4869 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0c777f8b-6eec-48f7-97a6-137936bfb76c-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 01 15:22:43 crc kubenswrapper[4869]: I1001 15:22:43.012886 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Oct 01 15:22:43 crc kubenswrapper[4869]: I1001 15:22:43.027402 4869 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-0"] Oct 01 15:22:43 crc kubenswrapper[4869]: I1001 15:22:43.038682 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-0"] Oct 01 15:22:43 crc kubenswrapper[4869]: E1001 15:22:43.039117 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="54903b9a-2e49-43f1-9989-ff8d13276fe7" containerName="init" Oct 01 15:22:43 crc kubenswrapper[4869]: I1001 15:22:43.039135 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="54903b9a-2e49-43f1-9989-ff8d13276fe7" containerName="init" Oct 01 15:22:43 crc kubenswrapper[4869]: E1001 15:22:43.039152 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="03242a06-2186-4f5d-9f1b-9b11db33e397" containerName="nova-manage" Oct 01 15:22:43 crc kubenswrapper[4869]: I1001 15:22:43.039161 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="03242a06-2186-4f5d-9f1b-9b11db33e397" containerName="nova-manage" Oct 01 15:22:43 crc kubenswrapper[4869]: E1001 15:22:43.039183 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e191eafa-7a88-4bf2-baef-434b83941630" containerName="nova-api-api" Oct 01 15:22:43 crc kubenswrapper[4869]: I1001 15:22:43.039191 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="e191eafa-7a88-4bf2-baef-434b83941630" containerName="nova-api-api" Oct 01 15:22:43 crc kubenswrapper[4869]: E1001 15:22:43.039216 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="54903b9a-2e49-43f1-9989-ff8d13276fe7" containerName="dnsmasq-dns" Oct 01 15:22:43 crc kubenswrapper[4869]: I1001 15:22:43.039224 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="54903b9a-2e49-43f1-9989-ff8d13276fe7" containerName="dnsmasq-dns" Oct 01 15:22:43 crc kubenswrapper[4869]: E1001 15:22:43.039242 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e191eafa-7a88-4bf2-baef-434b83941630" containerName="nova-api-log" Oct 01 15:22:43 crc kubenswrapper[4869]: I1001 15:22:43.039249 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="e191eafa-7a88-4bf2-baef-434b83941630" containerName="nova-api-log" Oct 01 15:22:43 crc kubenswrapper[4869]: E1001 15:22:43.039295 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0c777f8b-6eec-48f7-97a6-137936bfb76c" containerName="nova-scheduler-scheduler" Oct 01 15:22:43 crc kubenswrapper[4869]: I1001 15:22:43.039303 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="0c777f8b-6eec-48f7-97a6-137936bfb76c" containerName="nova-scheduler-scheduler" Oct 01 15:22:43 crc kubenswrapper[4869]: I1001 15:22:43.039494 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="e191eafa-7a88-4bf2-baef-434b83941630" containerName="nova-api-api" Oct 01 15:22:43 crc kubenswrapper[4869]: I1001 15:22:43.039509 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="54903b9a-2e49-43f1-9989-ff8d13276fe7" containerName="dnsmasq-dns" Oct 01 15:22:43 crc kubenswrapper[4869]: I1001 15:22:43.039529 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="03242a06-2186-4f5d-9f1b-9b11db33e397" containerName="nova-manage" Oct 01 15:22:43 crc kubenswrapper[4869]: I1001 15:22:43.039546 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="0c777f8b-6eec-48f7-97a6-137936bfb76c" containerName="nova-scheduler-scheduler" Oct 01 15:22:43 crc kubenswrapper[4869]: I1001 15:22:43.039562 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="e191eafa-7a88-4bf2-baef-434b83941630" containerName="nova-api-log" Oct 01 15:22:43 crc kubenswrapper[4869]: I1001 15:22:43.040813 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Oct 01 15:22:43 crc kubenswrapper[4869]: I1001 15:22:43.053065 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-config-data" Oct 01 15:22:43 crc kubenswrapper[4869]: I1001 15:22:43.053376 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-public-svc" Oct 01 15:22:43 crc kubenswrapper[4869]: I1001 15:22:43.053523 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-internal-svc" Oct 01 15:22:43 crc kubenswrapper[4869]: I1001 15:22:43.058078 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Oct 01 15:22:43 crc kubenswrapper[4869]: I1001 15:22:43.188356 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/2eb2029a-beab-4fec-8235-eb5cdfd2ff1a-public-tls-certs\") pod \"nova-api-0\" (UID: \"2eb2029a-beab-4fec-8235-eb5cdfd2ff1a\") " pod="openstack/nova-api-0" Oct 01 15:22:43 crc kubenswrapper[4869]: I1001 15:22:43.188417 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/2eb2029a-beab-4fec-8235-eb5cdfd2ff1a-internal-tls-certs\") pod \"nova-api-0\" (UID: \"2eb2029a-beab-4fec-8235-eb5cdfd2ff1a\") " pod="openstack/nova-api-0" Oct 01 15:22:43 crc kubenswrapper[4869]: I1001 15:22:43.188441 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2eb2029a-beab-4fec-8235-eb5cdfd2ff1a-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"2eb2029a-beab-4fec-8235-eb5cdfd2ff1a\") " pod="openstack/nova-api-0" Oct 01 15:22:43 crc kubenswrapper[4869]: I1001 15:22:43.188633 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-h5xhf\" (UniqueName: \"kubernetes.io/projected/2eb2029a-beab-4fec-8235-eb5cdfd2ff1a-kube-api-access-h5xhf\") pod \"nova-api-0\" (UID: \"2eb2029a-beab-4fec-8235-eb5cdfd2ff1a\") " pod="openstack/nova-api-0" Oct 01 15:22:43 crc kubenswrapper[4869]: I1001 15:22:43.188739 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/2eb2029a-beab-4fec-8235-eb5cdfd2ff1a-logs\") pod \"nova-api-0\" (UID: \"2eb2029a-beab-4fec-8235-eb5cdfd2ff1a\") " pod="openstack/nova-api-0" Oct 01 15:22:43 crc kubenswrapper[4869]: I1001 15:22:43.189004 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2eb2029a-beab-4fec-8235-eb5cdfd2ff1a-config-data\") pod \"nova-api-0\" (UID: \"2eb2029a-beab-4fec-8235-eb5cdfd2ff1a\") " pod="openstack/nova-api-0" Oct 01 15:22:43 crc kubenswrapper[4869]: I1001 15:22:43.290479 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/2eb2029a-beab-4fec-8235-eb5cdfd2ff1a-public-tls-certs\") pod \"nova-api-0\" (UID: \"2eb2029a-beab-4fec-8235-eb5cdfd2ff1a\") " pod="openstack/nova-api-0" Oct 01 15:22:43 crc kubenswrapper[4869]: I1001 15:22:43.290548 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/2eb2029a-beab-4fec-8235-eb5cdfd2ff1a-internal-tls-certs\") pod \"nova-api-0\" (UID: \"2eb2029a-beab-4fec-8235-eb5cdfd2ff1a\") " pod="openstack/nova-api-0" Oct 01 15:22:43 crc kubenswrapper[4869]: I1001 15:22:43.290571 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2eb2029a-beab-4fec-8235-eb5cdfd2ff1a-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"2eb2029a-beab-4fec-8235-eb5cdfd2ff1a\") " pod="openstack/nova-api-0" Oct 01 15:22:43 crc kubenswrapper[4869]: I1001 15:22:43.290623 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-h5xhf\" (UniqueName: \"kubernetes.io/projected/2eb2029a-beab-4fec-8235-eb5cdfd2ff1a-kube-api-access-h5xhf\") pod \"nova-api-0\" (UID: \"2eb2029a-beab-4fec-8235-eb5cdfd2ff1a\") " pod="openstack/nova-api-0" Oct 01 15:22:43 crc kubenswrapper[4869]: I1001 15:22:43.290647 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/2eb2029a-beab-4fec-8235-eb5cdfd2ff1a-logs\") pod \"nova-api-0\" (UID: \"2eb2029a-beab-4fec-8235-eb5cdfd2ff1a\") " pod="openstack/nova-api-0" Oct 01 15:22:43 crc kubenswrapper[4869]: I1001 15:22:43.290732 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2eb2029a-beab-4fec-8235-eb5cdfd2ff1a-config-data\") pod \"nova-api-0\" (UID: \"2eb2029a-beab-4fec-8235-eb5cdfd2ff1a\") " pod="openstack/nova-api-0" Oct 01 15:22:43 crc kubenswrapper[4869]: I1001 15:22:43.292025 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/2eb2029a-beab-4fec-8235-eb5cdfd2ff1a-logs\") pod \"nova-api-0\" (UID: \"2eb2029a-beab-4fec-8235-eb5cdfd2ff1a\") " pod="openstack/nova-api-0" Oct 01 15:22:43 crc kubenswrapper[4869]: I1001 15:22:43.296456 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2eb2029a-beab-4fec-8235-eb5cdfd2ff1a-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"2eb2029a-beab-4fec-8235-eb5cdfd2ff1a\") " pod="openstack/nova-api-0" Oct 01 15:22:43 crc kubenswrapper[4869]: I1001 15:22:43.297556 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2eb2029a-beab-4fec-8235-eb5cdfd2ff1a-config-data\") pod \"nova-api-0\" (UID: \"2eb2029a-beab-4fec-8235-eb5cdfd2ff1a\") " pod="openstack/nova-api-0" Oct 01 15:22:43 crc kubenswrapper[4869]: I1001 15:22:43.298410 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/2eb2029a-beab-4fec-8235-eb5cdfd2ff1a-public-tls-certs\") pod \"nova-api-0\" (UID: \"2eb2029a-beab-4fec-8235-eb5cdfd2ff1a\") " pod="openstack/nova-api-0" Oct 01 15:22:43 crc kubenswrapper[4869]: I1001 15:22:43.310074 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/2eb2029a-beab-4fec-8235-eb5cdfd2ff1a-internal-tls-certs\") pod \"nova-api-0\" (UID: \"2eb2029a-beab-4fec-8235-eb5cdfd2ff1a\") " pod="openstack/nova-api-0" Oct 01 15:22:43 crc kubenswrapper[4869]: I1001 15:22:43.317101 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-h5xhf\" (UniqueName: \"kubernetes.io/projected/2eb2029a-beab-4fec-8235-eb5cdfd2ff1a-kube-api-access-h5xhf\") pod \"nova-api-0\" (UID: \"2eb2029a-beab-4fec-8235-eb5cdfd2ff1a\") " pod="openstack/nova-api-0" Oct 01 15:22:43 crc kubenswrapper[4869]: I1001 15:22:43.354411 4869 patch_prober.go:28] interesting pod/machine-config-daemon-c86m8 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 01 15:22:43 crc kubenswrapper[4869]: I1001 15:22:43.354492 4869 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-c86m8" podUID="a4b64f7f-0b03-4f47-965b-9fde048b735c" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 01 15:22:43 crc kubenswrapper[4869]: I1001 15:22:43.383681 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Oct 01 15:22:43 crc kubenswrapper[4869]: I1001 15:22:43.592947 4869 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e191eafa-7a88-4bf2-baef-434b83941630" path="/var/lib/kubelet/pods/e191eafa-7a88-4bf2-baef-434b83941630/volumes" Oct 01 15:22:43 crc kubenswrapper[4869]: I1001 15:22:43.640077 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Oct 01 15:22:43 crc kubenswrapper[4869]: I1001 15:22:43.640793 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"0c777f8b-6eec-48f7-97a6-137936bfb76c","Type":"ContainerDied","Data":"57cf95c75a950531e9fd39a60f9c53837b8484e5e0416a2750dd7d099aed4e0c"} Oct 01 15:22:43 crc kubenswrapper[4869]: I1001 15:22:43.640834 4869 scope.go:117] "RemoveContainer" containerID="80234bd86f4f625f7e0c5d77c5f34b4e2bb1be316c4d95d9b98c256b6f845c54" Oct 01 15:22:43 crc kubenswrapper[4869]: I1001 15:22:43.675639 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Oct 01 15:22:43 crc kubenswrapper[4869]: I1001 15:22:43.686795 4869 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-scheduler-0"] Oct 01 15:22:43 crc kubenswrapper[4869]: I1001 15:22:43.699009 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-scheduler-0"] Oct 01 15:22:43 crc kubenswrapper[4869]: I1001 15:22:43.700594 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Oct 01 15:22:43 crc kubenswrapper[4869]: I1001 15:22:43.703703 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-scheduler-config-data" Oct 01 15:22:43 crc kubenswrapper[4869]: I1001 15:22:43.713024 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Oct 01 15:22:43 crc kubenswrapper[4869]: I1001 15:22:43.799993 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f1cbcc35-99b2-4580-80c2-727dc1cb96d7-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"f1cbcc35-99b2-4580-80c2-727dc1cb96d7\") " pod="openstack/nova-scheduler-0" Oct 01 15:22:43 crc kubenswrapper[4869]: I1001 15:22:43.800037 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f1cbcc35-99b2-4580-80c2-727dc1cb96d7-config-data\") pod \"nova-scheduler-0\" (UID: \"f1cbcc35-99b2-4580-80c2-727dc1cb96d7\") " pod="openstack/nova-scheduler-0" Oct 01 15:22:43 crc kubenswrapper[4869]: I1001 15:22:43.800076 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-b455z\" (UniqueName: \"kubernetes.io/projected/f1cbcc35-99b2-4580-80c2-727dc1cb96d7-kube-api-access-b455z\") pod \"nova-scheduler-0\" (UID: \"f1cbcc35-99b2-4580-80c2-727dc1cb96d7\") " pod="openstack/nova-scheduler-0" Oct 01 15:22:43 crc kubenswrapper[4869]: I1001 15:22:43.835146 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Oct 01 15:22:43 crc kubenswrapper[4869]: W1001 15:22:43.837066 4869 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod2eb2029a_beab_4fec_8235_eb5cdfd2ff1a.slice/crio-bf0b65a1b9a60d25c02c3809563ef5ff64d890c2a72e14f637cf41fe902044d0 WatchSource:0}: Error finding container bf0b65a1b9a60d25c02c3809563ef5ff64d890c2a72e14f637cf41fe902044d0: Status 404 returned error can't find the container with id bf0b65a1b9a60d25c02c3809563ef5ff64d890c2a72e14f637cf41fe902044d0 Oct 01 15:22:43 crc kubenswrapper[4869]: I1001 15:22:43.904799 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f1cbcc35-99b2-4580-80c2-727dc1cb96d7-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"f1cbcc35-99b2-4580-80c2-727dc1cb96d7\") " pod="openstack/nova-scheduler-0" Oct 01 15:22:43 crc kubenswrapper[4869]: I1001 15:22:43.904879 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f1cbcc35-99b2-4580-80c2-727dc1cb96d7-config-data\") pod \"nova-scheduler-0\" (UID: \"f1cbcc35-99b2-4580-80c2-727dc1cb96d7\") " pod="openstack/nova-scheduler-0" Oct 01 15:22:43 crc kubenswrapper[4869]: I1001 15:22:43.904952 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-b455z\" (UniqueName: \"kubernetes.io/projected/f1cbcc35-99b2-4580-80c2-727dc1cb96d7-kube-api-access-b455z\") pod \"nova-scheduler-0\" (UID: \"f1cbcc35-99b2-4580-80c2-727dc1cb96d7\") " pod="openstack/nova-scheduler-0" Oct 01 15:22:43 crc kubenswrapper[4869]: I1001 15:22:43.913509 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f1cbcc35-99b2-4580-80c2-727dc1cb96d7-config-data\") pod \"nova-scheduler-0\" (UID: \"f1cbcc35-99b2-4580-80c2-727dc1cb96d7\") " pod="openstack/nova-scheduler-0" Oct 01 15:22:43 crc kubenswrapper[4869]: I1001 15:22:43.915152 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f1cbcc35-99b2-4580-80c2-727dc1cb96d7-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"f1cbcc35-99b2-4580-80c2-727dc1cb96d7\") " pod="openstack/nova-scheduler-0" Oct 01 15:22:43 crc kubenswrapper[4869]: I1001 15:22:43.926991 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-b455z\" (UniqueName: \"kubernetes.io/projected/f1cbcc35-99b2-4580-80c2-727dc1cb96d7-kube-api-access-b455z\") pod \"nova-scheduler-0\" (UID: \"f1cbcc35-99b2-4580-80c2-727dc1cb96d7\") " pod="openstack/nova-scheduler-0" Oct 01 15:22:44 crc kubenswrapper[4869]: I1001 15:22:44.023684 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Oct 01 15:22:44 crc kubenswrapper[4869]: I1001 15:22:44.471859 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Oct 01 15:22:44 crc kubenswrapper[4869]: W1001 15:22:44.474287 4869 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podf1cbcc35_99b2_4580_80c2_727dc1cb96d7.slice/crio-58a0b43c904d5348fa817cf640c0d1aee92880133f841fdff64c099294ef0838 WatchSource:0}: Error finding container 58a0b43c904d5348fa817cf640c0d1aee92880133f841fdff64c099294ef0838: Status 404 returned error can't find the container with id 58a0b43c904d5348fa817cf640c0d1aee92880133f841fdff64c099294ef0838 Oct 01 15:22:44 crc kubenswrapper[4869]: I1001 15:22:44.657520 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"f1cbcc35-99b2-4580-80c2-727dc1cb96d7","Type":"ContainerStarted","Data":"58a0b43c904d5348fa817cf640c0d1aee92880133f841fdff64c099294ef0838"} Oct 01 15:22:44 crc kubenswrapper[4869]: I1001 15:22:44.660701 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"2eb2029a-beab-4fec-8235-eb5cdfd2ff1a","Type":"ContainerStarted","Data":"bf0b65a1b9a60d25c02c3809563ef5ff64d890c2a72e14f637cf41fe902044d0"} Oct 01 15:22:44 crc kubenswrapper[4869]: I1001 15:22:44.967307 4869 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/nova-metadata-0" podUID="38137ffc-97e2-4517-ac14-42bfc87df875" containerName="nova-metadata-log" probeResult="failure" output="Get \"https://10.217.0.182:8775/\": read tcp 10.217.0.2:41786->10.217.0.182:8775: read: connection reset by peer" Oct 01 15:22:44 crc kubenswrapper[4869]: I1001 15:22:44.967319 4869 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/nova-metadata-0" podUID="38137ffc-97e2-4517-ac14-42bfc87df875" containerName="nova-metadata-metadata" probeResult="failure" output="Get \"https://10.217.0.182:8775/\": read tcp 10.217.0.2:41798->10.217.0.182:8775: read: connection reset by peer" Oct 01 15:22:45 crc kubenswrapper[4869]: I1001 15:22:45.419048 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Oct 01 15:22:45 crc kubenswrapper[4869]: I1001 15:22:45.464248 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/38137ffc-97e2-4517-ac14-42bfc87df875-nova-metadata-tls-certs\") pod \"38137ffc-97e2-4517-ac14-42bfc87df875\" (UID: \"38137ffc-97e2-4517-ac14-42bfc87df875\") " Oct 01 15:22:45 crc kubenswrapper[4869]: I1001 15:22:45.464325 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/38137ffc-97e2-4517-ac14-42bfc87df875-combined-ca-bundle\") pod \"38137ffc-97e2-4517-ac14-42bfc87df875\" (UID: \"38137ffc-97e2-4517-ac14-42bfc87df875\") " Oct 01 15:22:45 crc kubenswrapper[4869]: I1001 15:22:45.464377 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xjbdw\" (UniqueName: \"kubernetes.io/projected/38137ffc-97e2-4517-ac14-42bfc87df875-kube-api-access-xjbdw\") pod \"38137ffc-97e2-4517-ac14-42bfc87df875\" (UID: \"38137ffc-97e2-4517-ac14-42bfc87df875\") " Oct 01 15:22:45 crc kubenswrapper[4869]: I1001 15:22:45.464421 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/38137ffc-97e2-4517-ac14-42bfc87df875-logs\") pod \"38137ffc-97e2-4517-ac14-42bfc87df875\" (UID: \"38137ffc-97e2-4517-ac14-42bfc87df875\") " Oct 01 15:22:45 crc kubenswrapper[4869]: I1001 15:22:45.464480 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/38137ffc-97e2-4517-ac14-42bfc87df875-config-data\") pod \"38137ffc-97e2-4517-ac14-42bfc87df875\" (UID: \"38137ffc-97e2-4517-ac14-42bfc87df875\") " Oct 01 15:22:45 crc kubenswrapper[4869]: I1001 15:22:45.467359 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/38137ffc-97e2-4517-ac14-42bfc87df875-logs" (OuterVolumeSpecName: "logs") pod "38137ffc-97e2-4517-ac14-42bfc87df875" (UID: "38137ffc-97e2-4517-ac14-42bfc87df875"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 15:22:45 crc kubenswrapper[4869]: I1001 15:22:45.472744 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/38137ffc-97e2-4517-ac14-42bfc87df875-kube-api-access-xjbdw" (OuterVolumeSpecName: "kube-api-access-xjbdw") pod "38137ffc-97e2-4517-ac14-42bfc87df875" (UID: "38137ffc-97e2-4517-ac14-42bfc87df875"). InnerVolumeSpecName "kube-api-access-xjbdw". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 15:22:45 crc kubenswrapper[4869]: I1001 15:22:45.496855 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/38137ffc-97e2-4517-ac14-42bfc87df875-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "38137ffc-97e2-4517-ac14-42bfc87df875" (UID: "38137ffc-97e2-4517-ac14-42bfc87df875"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 15:22:45 crc kubenswrapper[4869]: I1001 15:22:45.500771 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/38137ffc-97e2-4517-ac14-42bfc87df875-config-data" (OuterVolumeSpecName: "config-data") pod "38137ffc-97e2-4517-ac14-42bfc87df875" (UID: "38137ffc-97e2-4517-ac14-42bfc87df875"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 15:22:45 crc kubenswrapper[4869]: I1001 15:22:45.526482 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/38137ffc-97e2-4517-ac14-42bfc87df875-nova-metadata-tls-certs" (OuterVolumeSpecName: "nova-metadata-tls-certs") pod "38137ffc-97e2-4517-ac14-42bfc87df875" (UID: "38137ffc-97e2-4517-ac14-42bfc87df875"). InnerVolumeSpecName "nova-metadata-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 15:22:45 crc kubenswrapper[4869]: I1001 15:22:45.568433 4869 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/38137ffc-97e2-4517-ac14-42bfc87df875-config-data\") on node \"crc\" DevicePath \"\"" Oct 01 15:22:45 crc kubenswrapper[4869]: I1001 15:22:45.568472 4869 reconciler_common.go:293] "Volume detached for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/38137ffc-97e2-4517-ac14-42bfc87df875-nova-metadata-tls-certs\") on node \"crc\" DevicePath \"\"" Oct 01 15:22:45 crc kubenswrapper[4869]: I1001 15:22:45.568490 4869 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/38137ffc-97e2-4517-ac14-42bfc87df875-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 01 15:22:45 crc kubenswrapper[4869]: I1001 15:22:45.568503 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xjbdw\" (UniqueName: \"kubernetes.io/projected/38137ffc-97e2-4517-ac14-42bfc87df875-kube-api-access-xjbdw\") on node \"crc\" DevicePath \"\"" Oct 01 15:22:45 crc kubenswrapper[4869]: I1001 15:22:45.568514 4869 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/38137ffc-97e2-4517-ac14-42bfc87df875-logs\") on node \"crc\" DevicePath \"\"" Oct 01 15:22:45 crc kubenswrapper[4869]: I1001 15:22:45.598463 4869 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0c777f8b-6eec-48f7-97a6-137936bfb76c" path="/var/lib/kubelet/pods/0c777f8b-6eec-48f7-97a6-137936bfb76c/volumes" Oct 01 15:22:45 crc kubenswrapper[4869]: I1001 15:22:45.675254 4869 generic.go:334] "Generic (PLEG): container finished" podID="38137ffc-97e2-4517-ac14-42bfc87df875" containerID="6c1a4147750e3cb22d93d1cdad0d418617d251192c68dcfcc455e4556ee1e139" exitCode=0 Oct 01 15:22:45 crc kubenswrapper[4869]: I1001 15:22:45.675370 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"38137ffc-97e2-4517-ac14-42bfc87df875","Type":"ContainerDied","Data":"6c1a4147750e3cb22d93d1cdad0d418617d251192c68dcfcc455e4556ee1e139"} Oct 01 15:22:45 crc kubenswrapper[4869]: I1001 15:22:45.675407 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"38137ffc-97e2-4517-ac14-42bfc87df875","Type":"ContainerDied","Data":"c9bbde8a54ec6054719650948ad4e94c0d26195fe97a2411f1d973ead3cd9d93"} Oct 01 15:22:45 crc kubenswrapper[4869]: I1001 15:22:45.675432 4869 scope.go:117] "RemoveContainer" containerID="6c1a4147750e3cb22d93d1cdad0d418617d251192c68dcfcc455e4556ee1e139" Oct 01 15:22:45 crc kubenswrapper[4869]: I1001 15:22:45.675623 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Oct 01 15:22:45 crc kubenswrapper[4869]: I1001 15:22:45.680801 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"2eb2029a-beab-4fec-8235-eb5cdfd2ff1a","Type":"ContainerStarted","Data":"0ebb7d242e7ad67b3dd19a66761c7775b26546fa362be0642d665bc5a97e9655"} Oct 01 15:22:45 crc kubenswrapper[4869]: I1001 15:22:45.680850 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"2eb2029a-beab-4fec-8235-eb5cdfd2ff1a","Type":"ContainerStarted","Data":"6992fdaf0023bc05ebc0a08d96ad8ce9ac1b519a5f5ba96078f06cf9529ab973"} Oct 01 15:22:45 crc kubenswrapper[4869]: I1001 15:22:45.683565 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"f1cbcc35-99b2-4580-80c2-727dc1cb96d7","Type":"ContainerStarted","Data":"4886c67e63e5e1540104f6c0f57017c15aacb5f590ace96c67de95132284ad8c"} Oct 01 15:22:45 crc kubenswrapper[4869]: I1001 15:22:45.700913 4869 scope.go:117] "RemoveContainer" containerID="1000a8aaedaaf024cc396a8c5f36dd0099d653e970e541963855eab7a6db8ecf" Oct 01 15:22:45 crc kubenswrapper[4869]: I1001 15:22:45.720385 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Oct 01 15:22:45 crc kubenswrapper[4869]: I1001 15:22:45.740603 4869 scope.go:117] "RemoveContainer" containerID="6c1a4147750e3cb22d93d1cdad0d418617d251192c68dcfcc455e4556ee1e139" Oct 01 15:22:45 crc kubenswrapper[4869]: E1001 15:22:45.741127 4869 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6c1a4147750e3cb22d93d1cdad0d418617d251192c68dcfcc455e4556ee1e139\": container with ID starting with 6c1a4147750e3cb22d93d1cdad0d418617d251192c68dcfcc455e4556ee1e139 not found: ID does not exist" containerID="6c1a4147750e3cb22d93d1cdad0d418617d251192c68dcfcc455e4556ee1e139" Oct 01 15:22:45 crc kubenswrapper[4869]: I1001 15:22:45.741178 4869 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6c1a4147750e3cb22d93d1cdad0d418617d251192c68dcfcc455e4556ee1e139"} err="failed to get container status \"6c1a4147750e3cb22d93d1cdad0d418617d251192c68dcfcc455e4556ee1e139\": rpc error: code = NotFound desc = could not find container \"6c1a4147750e3cb22d93d1cdad0d418617d251192c68dcfcc455e4556ee1e139\": container with ID starting with 6c1a4147750e3cb22d93d1cdad0d418617d251192c68dcfcc455e4556ee1e139 not found: ID does not exist" Oct 01 15:22:45 crc kubenswrapper[4869]: I1001 15:22:45.741201 4869 scope.go:117] "RemoveContainer" containerID="1000a8aaedaaf024cc396a8c5f36dd0099d653e970e541963855eab7a6db8ecf" Oct 01 15:22:45 crc kubenswrapper[4869]: E1001 15:22:45.741631 4869 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1000a8aaedaaf024cc396a8c5f36dd0099d653e970e541963855eab7a6db8ecf\": container with ID starting with 1000a8aaedaaf024cc396a8c5f36dd0099d653e970e541963855eab7a6db8ecf not found: ID does not exist" containerID="1000a8aaedaaf024cc396a8c5f36dd0099d653e970e541963855eab7a6db8ecf" Oct 01 15:22:45 crc kubenswrapper[4869]: I1001 15:22:45.741658 4869 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1000a8aaedaaf024cc396a8c5f36dd0099d653e970e541963855eab7a6db8ecf"} err="failed to get container status \"1000a8aaedaaf024cc396a8c5f36dd0099d653e970e541963855eab7a6db8ecf\": rpc error: code = NotFound desc = could not find container \"1000a8aaedaaf024cc396a8c5f36dd0099d653e970e541963855eab7a6db8ecf\": container with ID starting with 1000a8aaedaaf024cc396a8c5f36dd0099d653e970e541963855eab7a6db8ecf not found: ID does not exist" Oct 01 15:22:45 crc kubenswrapper[4869]: I1001 15:22:45.742335 4869 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-metadata-0"] Oct 01 15:22:45 crc kubenswrapper[4869]: I1001 15:22:45.749674 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-metadata-0"] Oct 01 15:22:45 crc kubenswrapper[4869]: E1001 15:22:45.750010 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="38137ffc-97e2-4517-ac14-42bfc87df875" containerName="nova-metadata-log" Oct 01 15:22:45 crc kubenswrapper[4869]: I1001 15:22:45.750026 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="38137ffc-97e2-4517-ac14-42bfc87df875" containerName="nova-metadata-log" Oct 01 15:22:45 crc kubenswrapper[4869]: E1001 15:22:45.750043 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="38137ffc-97e2-4517-ac14-42bfc87df875" containerName="nova-metadata-metadata" Oct 01 15:22:45 crc kubenswrapper[4869]: I1001 15:22:45.750049 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="38137ffc-97e2-4517-ac14-42bfc87df875" containerName="nova-metadata-metadata" Oct 01 15:22:45 crc kubenswrapper[4869]: I1001 15:22:45.750214 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="38137ffc-97e2-4517-ac14-42bfc87df875" containerName="nova-metadata-metadata" Oct 01 15:22:45 crc kubenswrapper[4869]: I1001 15:22:45.750229 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="38137ffc-97e2-4517-ac14-42bfc87df875" containerName="nova-metadata-log" Oct 01 15:22:45 crc kubenswrapper[4869]: I1001 15:22:45.751066 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Oct 01 15:22:45 crc kubenswrapper[4869]: I1001 15:22:45.752999 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-metadata-internal-svc" Oct 01 15:22:45 crc kubenswrapper[4869]: I1001 15:22:45.754337 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-0" podStartSLOduration=2.754306013 podStartE2EDuration="2.754306013s" podCreationTimestamp="2025-10-01 15:22:43 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-01 15:22:45.723444775 +0000 UTC m=+1074.870287911" watchObservedRunningTime="2025-10-01 15:22:45.754306013 +0000 UTC m=+1074.901149129" Oct 01 15:22:45 crc kubenswrapper[4869]: I1001 15:22:45.755811 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-config-data" Oct 01 15:22:45 crc kubenswrapper[4869]: I1001 15:22:45.787139 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Oct 01 15:22:45 crc kubenswrapper[4869]: I1001 15:22:45.793691 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-scheduler-0" podStartSLOduration=2.793680286 podStartE2EDuration="2.793680286s" podCreationTimestamp="2025-10-01 15:22:43 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-01 15:22:45.746924477 +0000 UTC m=+1074.893767593" watchObservedRunningTime="2025-10-01 15:22:45.793680286 +0000 UTC m=+1074.940523392" Oct 01 15:22:45 crc kubenswrapper[4869]: I1001 15:22:45.873281 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/448e4afc-2c6f-48b3-8a80-cd7c5bfbabc2-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"448e4afc-2c6f-48b3-8a80-cd7c5bfbabc2\") " pod="openstack/nova-metadata-0" Oct 01 15:22:45 crc kubenswrapper[4869]: I1001 15:22:45.873359 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jvn2s\" (UniqueName: \"kubernetes.io/projected/448e4afc-2c6f-48b3-8a80-cd7c5bfbabc2-kube-api-access-jvn2s\") pod \"nova-metadata-0\" (UID: \"448e4afc-2c6f-48b3-8a80-cd7c5bfbabc2\") " pod="openstack/nova-metadata-0" Oct 01 15:22:45 crc kubenswrapper[4869]: I1001 15:22:45.873434 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/448e4afc-2c6f-48b3-8a80-cd7c5bfbabc2-logs\") pod \"nova-metadata-0\" (UID: \"448e4afc-2c6f-48b3-8a80-cd7c5bfbabc2\") " pod="openstack/nova-metadata-0" Oct 01 15:22:45 crc kubenswrapper[4869]: I1001 15:22:45.873502 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/448e4afc-2c6f-48b3-8a80-cd7c5bfbabc2-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"448e4afc-2c6f-48b3-8a80-cd7c5bfbabc2\") " pod="openstack/nova-metadata-0" Oct 01 15:22:45 crc kubenswrapper[4869]: I1001 15:22:45.873565 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/448e4afc-2c6f-48b3-8a80-cd7c5bfbabc2-config-data\") pod \"nova-metadata-0\" (UID: \"448e4afc-2c6f-48b3-8a80-cd7c5bfbabc2\") " pod="openstack/nova-metadata-0" Oct 01 15:22:45 crc kubenswrapper[4869]: I1001 15:22:45.977823 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/448e4afc-2c6f-48b3-8a80-cd7c5bfbabc2-config-data\") pod \"nova-metadata-0\" (UID: \"448e4afc-2c6f-48b3-8a80-cd7c5bfbabc2\") " pod="openstack/nova-metadata-0" Oct 01 15:22:45 crc kubenswrapper[4869]: I1001 15:22:45.977968 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/448e4afc-2c6f-48b3-8a80-cd7c5bfbabc2-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"448e4afc-2c6f-48b3-8a80-cd7c5bfbabc2\") " pod="openstack/nova-metadata-0" Oct 01 15:22:45 crc kubenswrapper[4869]: I1001 15:22:45.978026 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jvn2s\" (UniqueName: \"kubernetes.io/projected/448e4afc-2c6f-48b3-8a80-cd7c5bfbabc2-kube-api-access-jvn2s\") pod \"nova-metadata-0\" (UID: \"448e4afc-2c6f-48b3-8a80-cd7c5bfbabc2\") " pod="openstack/nova-metadata-0" Oct 01 15:22:45 crc kubenswrapper[4869]: I1001 15:22:45.978070 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/448e4afc-2c6f-48b3-8a80-cd7c5bfbabc2-logs\") pod \"nova-metadata-0\" (UID: \"448e4afc-2c6f-48b3-8a80-cd7c5bfbabc2\") " pod="openstack/nova-metadata-0" Oct 01 15:22:45 crc kubenswrapper[4869]: I1001 15:22:45.978109 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/448e4afc-2c6f-48b3-8a80-cd7c5bfbabc2-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"448e4afc-2c6f-48b3-8a80-cd7c5bfbabc2\") " pod="openstack/nova-metadata-0" Oct 01 15:22:45 crc kubenswrapper[4869]: I1001 15:22:45.978718 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/448e4afc-2c6f-48b3-8a80-cd7c5bfbabc2-logs\") pod \"nova-metadata-0\" (UID: \"448e4afc-2c6f-48b3-8a80-cd7c5bfbabc2\") " pod="openstack/nova-metadata-0" Oct 01 15:22:45 crc kubenswrapper[4869]: I1001 15:22:45.982825 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/448e4afc-2c6f-48b3-8a80-cd7c5bfbabc2-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"448e4afc-2c6f-48b3-8a80-cd7c5bfbabc2\") " pod="openstack/nova-metadata-0" Oct 01 15:22:45 crc kubenswrapper[4869]: I1001 15:22:45.985681 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/448e4afc-2c6f-48b3-8a80-cd7c5bfbabc2-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"448e4afc-2c6f-48b3-8a80-cd7c5bfbabc2\") " pod="openstack/nova-metadata-0" Oct 01 15:22:45 crc kubenswrapper[4869]: I1001 15:22:45.985893 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/448e4afc-2c6f-48b3-8a80-cd7c5bfbabc2-config-data\") pod \"nova-metadata-0\" (UID: \"448e4afc-2c6f-48b3-8a80-cd7c5bfbabc2\") " pod="openstack/nova-metadata-0" Oct 01 15:22:46 crc kubenswrapper[4869]: I1001 15:22:46.004826 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jvn2s\" (UniqueName: \"kubernetes.io/projected/448e4afc-2c6f-48b3-8a80-cd7c5bfbabc2-kube-api-access-jvn2s\") pod \"nova-metadata-0\" (UID: \"448e4afc-2c6f-48b3-8a80-cd7c5bfbabc2\") " pod="openstack/nova-metadata-0" Oct 01 15:22:46 crc kubenswrapper[4869]: I1001 15:22:46.071277 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Oct 01 15:22:46 crc kubenswrapper[4869]: I1001 15:22:46.518594 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Oct 01 15:22:46 crc kubenswrapper[4869]: W1001 15:22:46.523518 4869 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod448e4afc_2c6f_48b3_8a80_cd7c5bfbabc2.slice/crio-6a23c882b33a115174ea0856cb092e0118e6ae4bac45bba1a23c53e291c21a4f WatchSource:0}: Error finding container 6a23c882b33a115174ea0856cb092e0118e6ae4bac45bba1a23c53e291c21a4f: Status 404 returned error can't find the container with id 6a23c882b33a115174ea0856cb092e0118e6ae4bac45bba1a23c53e291c21a4f Oct 01 15:22:46 crc kubenswrapper[4869]: I1001 15:22:46.693620 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"448e4afc-2c6f-48b3-8a80-cd7c5bfbabc2","Type":"ContainerStarted","Data":"6a23c882b33a115174ea0856cb092e0118e6ae4bac45bba1a23c53e291c21a4f"} Oct 01 15:22:47 crc kubenswrapper[4869]: I1001 15:22:47.591967 4869 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="38137ffc-97e2-4517-ac14-42bfc87df875" path="/var/lib/kubelet/pods/38137ffc-97e2-4517-ac14-42bfc87df875/volumes" Oct 01 15:22:47 crc kubenswrapper[4869]: I1001 15:22:47.708995 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"448e4afc-2c6f-48b3-8a80-cd7c5bfbabc2","Type":"ContainerStarted","Data":"d1e5f073998e86e4d02cd889285e144767b6d33470b3045e0fa24bdd404e3090"} Oct 01 15:22:47 crc kubenswrapper[4869]: I1001 15:22:47.709045 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"448e4afc-2c6f-48b3-8a80-cd7c5bfbabc2","Type":"ContainerStarted","Data":"ebacb9155546984576ff52735bf91c65bb188081964563a3497f09dc306f4868"} Oct 01 15:22:47 crc kubenswrapper[4869]: I1001 15:22:47.746195 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-metadata-0" podStartSLOduration=2.746174076 podStartE2EDuration="2.746174076s" podCreationTimestamp="2025-10-01 15:22:45 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-01 15:22:47.737849166 +0000 UTC m=+1076.884692302" watchObservedRunningTime="2025-10-01 15:22:47.746174076 +0000 UTC m=+1076.893017192" Oct 01 15:22:49 crc kubenswrapper[4869]: I1001 15:22:49.023812 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-scheduler-0" Oct 01 15:22:51 crc kubenswrapper[4869]: I1001 15:22:51.071895 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Oct 01 15:22:51 crc kubenswrapper[4869]: I1001 15:22:51.072157 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Oct 01 15:22:53 crc kubenswrapper[4869]: I1001 15:22:53.384756 4869 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Oct 01 15:22:53 crc kubenswrapper[4869]: I1001 15:22:53.385195 4869 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Oct 01 15:22:54 crc kubenswrapper[4869]: I1001 15:22:54.024665 4869 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-scheduler-0" Oct 01 15:22:54 crc kubenswrapper[4869]: I1001 15:22:54.061324 4869 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-scheduler-0" Oct 01 15:22:54 crc kubenswrapper[4869]: I1001 15:22:54.396515 4869 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="2eb2029a-beab-4fec-8235-eb5cdfd2ff1a" containerName="nova-api-log" probeResult="failure" output="Get \"https://10.217.0.191:8774/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Oct 01 15:22:54 crc kubenswrapper[4869]: I1001 15:22:54.396542 4869 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="2eb2029a-beab-4fec-8235-eb5cdfd2ff1a" containerName="nova-api-api" probeResult="failure" output="Get \"https://10.217.0.191:8774/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Oct 01 15:22:54 crc kubenswrapper[4869]: I1001 15:22:54.851981 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-scheduler-0" Oct 01 15:22:56 crc kubenswrapper[4869]: I1001 15:22:56.071567 4869 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Oct 01 15:22:56 crc kubenswrapper[4869]: I1001 15:22:56.071616 4869 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Oct 01 15:22:57 crc kubenswrapper[4869]: I1001 15:22:57.084549 4869 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="448e4afc-2c6f-48b3-8a80-cd7c5bfbabc2" containerName="nova-metadata-log" probeResult="failure" output="Get \"https://10.217.0.193:8775/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Oct 01 15:22:57 crc kubenswrapper[4869]: I1001 15:22:57.084612 4869 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="448e4afc-2c6f-48b3-8a80-cd7c5bfbabc2" containerName="nova-metadata-metadata" probeResult="failure" output="Get \"https://10.217.0.193:8775/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Oct 01 15:23:00 crc kubenswrapper[4869]: I1001 15:23:00.805904 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ceilometer-0" Oct 01 15:23:03 crc kubenswrapper[4869]: I1001 15:23:03.394545 4869 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-api-0" Oct 01 15:23:03 crc kubenswrapper[4869]: I1001 15:23:03.395242 4869 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-api-0" Oct 01 15:23:03 crc kubenswrapper[4869]: I1001 15:23:03.395890 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-api-0" Oct 01 15:23:03 crc kubenswrapper[4869]: I1001 15:23:03.396324 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-api-0" Oct 01 15:23:03 crc kubenswrapper[4869]: I1001 15:23:03.404338 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-api-0" Oct 01 15:23:03 crc kubenswrapper[4869]: I1001 15:23:03.404427 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-api-0" Oct 01 15:23:06 crc kubenswrapper[4869]: I1001 15:23:06.080104 4869 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-metadata-0" Oct 01 15:23:06 crc kubenswrapper[4869]: I1001 15:23:06.082801 4869 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-metadata-0" Oct 01 15:23:06 crc kubenswrapper[4869]: I1001 15:23:06.091727 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-metadata-0" Oct 01 15:23:06 crc kubenswrapper[4869]: I1001 15:23:06.955561 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-metadata-0" Oct 01 15:23:13 crc kubenswrapper[4869]: I1001 15:23:13.354404 4869 patch_prober.go:28] interesting pod/machine-config-daemon-c86m8 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 01 15:23:13 crc kubenswrapper[4869]: I1001 15:23:13.355056 4869 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-c86m8" podUID="a4b64f7f-0b03-4f47-965b-9fde048b735c" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 01 15:23:13 crc kubenswrapper[4869]: I1001 15:23:13.355120 4869 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-c86m8" Oct 01 15:23:13 crc kubenswrapper[4869]: I1001 15:23:13.356438 4869 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"0674a6010e4abaf43ad0d52524028fcd0e0d167c67609073a8bff51bfec2aabf"} pod="openshift-machine-config-operator/machine-config-daemon-c86m8" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Oct 01 15:23:13 crc kubenswrapper[4869]: I1001 15:23:13.356576 4869 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-c86m8" podUID="a4b64f7f-0b03-4f47-965b-9fde048b735c" containerName="machine-config-daemon" containerID="cri-o://0674a6010e4abaf43ad0d52524028fcd0e0d167c67609073a8bff51bfec2aabf" gracePeriod=600 Oct 01 15:23:14 crc kubenswrapper[4869]: I1001 15:23:14.028624 4869 generic.go:334] "Generic (PLEG): container finished" podID="a4b64f7f-0b03-4f47-965b-9fde048b735c" containerID="0674a6010e4abaf43ad0d52524028fcd0e0d167c67609073a8bff51bfec2aabf" exitCode=0 Oct 01 15:23:14 crc kubenswrapper[4869]: I1001 15:23:14.028698 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-c86m8" event={"ID":"a4b64f7f-0b03-4f47-965b-9fde048b735c","Type":"ContainerDied","Data":"0674a6010e4abaf43ad0d52524028fcd0e0d167c67609073a8bff51bfec2aabf"} Oct 01 15:23:14 crc kubenswrapper[4869]: I1001 15:23:14.029050 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-c86m8" event={"ID":"a4b64f7f-0b03-4f47-965b-9fde048b735c","Type":"ContainerStarted","Data":"fbafe3c9358f68065a4bc8a44e7f7c6c280ff5086e57618c387c454ac514cc06"} Oct 01 15:23:14 crc kubenswrapper[4869]: I1001 15:23:14.029080 4869 scope.go:117] "RemoveContainer" containerID="4be1a24ad49d8ad1b9a1395c62f6541610f0ea2bbd6f24d661f794435b423dc8" Oct 01 15:23:14 crc kubenswrapper[4869]: I1001 15:23:14.874019 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/rabbitmq-server-0"] Oct 01 15:23:16 crc kubenswrapper[4869]: I1001 15:23:16.214593 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Oct 01 15:23:19 crc kubenswrapper[4869]: I1001 15:23:19.502215 4869 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/rabbitmq-server-0" podUID="25c1e81e-fa0e-4ec6-b29c-bda2529fde66" containerName="rabbitmq" containerID="cri-o://2cdb4a3848e9ce2788889d11169e2b89ffe9fd6474ba6f492157f5a0774267f1" gracePeriod=604796 Oct 01 15:23:20 crc kubenswrapper[4869]: I1001 15:23:20.886801 4869 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/rabbitmq-cell1-server-0" podUID="004ab312-4718-4cf2-80df-5a2b1eccc301" containerName="rabbitmq" containerID="cri-o://8bb7c698ac78361fbdd84081d11aa849720c8828f54f87f1420ea74cb36a5faf" gracePeriod=604796 Oct 01 15:23:26 crc kubenswrapper[4869]: I1001 15:23:26.148167 4869 generic.go:334] "Generic (PLEG): container finished" podID="25c1e81e-fa0e-4ec6-b29c-bda2529fde66" containerID="2cdb4a3848e9ce2788889d11169e2b89ffe9fd6474ba6f492157f5a0774267f1" exitCode=0 Oct 01 15:23:26 crc kubenswrapper[4869]: I1001 15:23:26.148339 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"25c1e81e-fa0e-4ec6-b29c-bda2529fde66","Type":"ContainerDied","Data":"2cdb4a3848e9ce2788889d11169e2b89ffe9fd6474ba6f492157f5a0774267f1"} Oct 01 15:23:26 crc kubenswrapper[4869]: I1001 15:23:26.240071 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Oct 01 15:23:26 crc kubenswrapper[4869]: I1001 15:23:26.384790 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-59bth\" (UniqueName: \"kubernetes.io/projected/25c1e81e-fa0e-4ec6-b29c-bda2529fde66-kube-api-access-59bth\") pod \"25c1e81e-fa0e-4ec6-b29c-bda2529fde66\" (UID: \"25c1e81e-fa0e-4ec6-b29c-bda2529fde66\") " Oct 01 15:23:26 crc kubenswrapper[4869]: I1001 15:23:26.384855 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/25c1e81e-fa0e-4ec6-b29c-bda2529fde66-rabbitmq-confd\") pod \"25c1e81e-fa0e-4ec6-b29c-bda2529fde66\" (UID: \"25c1e81e-fa0e-4ec6-b29c-bda2529fde66\") " Oct 01 15:23:26 crc kubenswrapper[4869]: I1001 15:23:26.384886 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/25c1e81e-fa0e-4ec6-b29c-bda2529fde66-plugins-conf\") pod \"25c1e81e-fa0e-4ec6-b29c-bda2529fde66\" (UID: \"25c1e81e-fa0e-4ec6-b29c-bda2529fde66\") " Oct 01 15:23:26 crc kubenswrapper[4869]: I1001 15:23:26.385003 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/25c1e81e-fa0e-4ec6-b29c-bda2529fde66-rabbitmq-tls\") pod \"25c1e81e-fa0e-4ec6-b29c-bda2529fde66\" (UID: \"25c1e81e-fa0e-4ec6-b29c-bda2529fde66\") " Oct 01 15:23:26 crc kubenswrapper[4869]: I1001 15:23:26.385036 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/25c1e81e-fa0e-4ec6-b29c-bda2529fde66-rabbitmq-erlang-cookie\") pod \"25c1e81e-fa0e-4ec6-b29c-bda2529fde66\" (UID: \"25c1e81e-fa0e-4ec6-b29c-bda2529fde66\") " Oct 01 15:23:26 crc kubenswrapper[4869]: I1001 15:23:26.385094 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/25c1e81e-fa0e-4ec6-b29c-bda2529fde66-config-data\") pod \"25c1e81e-fa0e-4ec6-b29c-bda2529fde66\" (UID: \"25c1e81e-fa0e-4ec6-b29c-bda2529fde66\") " Oct 01 15:23:26 crc kubenswrapper[4869]: I1001 15:23:26.385118 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/25c1e81e-fa0e-4ec6-b29c-bda2529fde66-server-conf\") pod \"25c1e81e-fa0e-4ec6-b29c-bda2529fde66\" (UID: \"25c1e81e-fa0e-4ec6-b29c-bda2529fde66\") " Oct 01 15:23:26 crc kubenswrapper[4869]: I1001 15:23:26.385168 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/25c1e81e-fa0e-4ec6-b29c-bda2529fde66-rabbitmq-plugins\") pod \"25c1e81e-fa0e-4ec6-b29c-bda2529fde66\" (UID: \"25c1e81e-fa0e-4ec6-b29c-bda2529fde66\") " Oct 01 15:23:26 crc kubenswrapper[4869]: I1001 15:23:26.385191 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/25c1e81e-fa0e-4ec6-b29c-bda2529fde66-pod-info\") pod \"25c1e81e-fa0e-4ec6-b29c-bda2529fde66\" (UID: \"25c1e81e-fa0e-4ec6-b29c-bda2529fde66\") " Oct 01 15:23:26 crc kubenswrapper[4869]: I1001 15:23:26.385206 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"persistence\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"25c1e81e-fa0e-4ec6-b29c-bda2529fde66\" (UID: \"25c1e81e-fa0e-4ec6-b29c-bda2529fde66\") " Oct 01 15:23:26 crc kubenswrapper[4869]: I1001 15:23:26.385291 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/25c1e81e-fa0e-4ec6-b29c-bda2529fde66-erlang-cookie-secret\") pod \"25c1e81e-fa0e-4ec6-b29c-bda2529fde66\" (UID: \"25c1e81e-fa0e-4ec6-b29c-bda2529fde66\") " Oct 01 15:23:26 crc kubenswrapper[4869]: I1001 15:23:26.385703 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/25c1e81e-fa0e-4ec6-b29c-bda2529fde66-rabbitmq-plugins" (OuterVolumeSpecName: "rabbitmq-plugins") pod "25c1e81e-fa0e-4ec6-b29c-bda2529fde66" (UID: "25c1e81e-fa0e-4ec6-b29c-bda2529fde66"). InnerVolumeSpecName "rabbitmq-plugins". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 15:23:26 crc kubenswrapper[4869]: I1001 15:23:26.386009 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/25c1e81e-fa0e-4ec6-b29c-bda2529fde66-plugins-conf" (OuterVolumeSpecName: "plugins-conf") pod "25c1e81e-fa0e-4ec6-b29c-bda2529fde66" (UID: "25c1e81e-fa0e-4ec6-b29c-bda2529fde66"). InnerVolumeSpecName "plugins-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 15:23:26 crc kubenswrapper[4869]: I1001 15:23:26.386226 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/25c1e81e-fa0e-4ec6-b29c-bda2529fde66-rabbitmq-erlang-cookie" (OuterVolumeSpecName: "rabbitmq-erlang-cookie") pod "25c1e81e-fa0e-4ec6-b29c-bda2529fde66" (UID: "25c1e81e-fa0e-4ec6-b29c-bda2529fde66"). InnerVolumeSpecName "rabbitmq-erlang-cookie". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 15:23:26 crc kubenswrapper[4869]: I1001 15:23:26.390750 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/25c1e81e-fa0e-4ec6-b29c-bda2529fde66-rabbitmq-tls" (OuterVolumeSpecName: "rabbitmq-tls") pod "25c1e81e-fa0e-4ec6-b29c-bda2529fde66" (UID: "25c1e81e-fa0e-4ec6-b29c-bda2529fde66"). InnerVolumeSpecName "rabbitmq-tls". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 15:23:26 crc kubenswrapper[4869]: I1001 15:23:26.391162 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/25c1e81e-fa0e-4ec6-b29c-bda2529fde66-erlang-cookie-secret" (OuterVolumeSpecName: "erlang-cookie-secret") pod "25c1e81e-fa0e-4ec6-b29c-bda2529fde66" (UID: "25c1e81e-fa0e-4ec6-b29c-bda2529fde66"). InnerVolumeSpecName "erlang-cookie-secret". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 15:23:26 crc kubenswrapper[4869]: I1001 15:23:26.393355 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/downward-api/25c1e81e-fa0e-4ec6-b29c-bda2529fde66-pod-info" (OuterVolumeSpecName: "pod-info") pod "25c1e81e-fa0e-4ec6-b29c-bda2529fde66" (UID: "25c1e81e-fa0e-4ec6-b29c-bda2529fde66"). InnerVolumeSpecName "pod-info". PluginName "kubernetes.io/downward-api", VolumeGidValue "" Oct 01 15:23:26 crc kubenswrapper[4869]: I1001 15:23:26.393824 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage01-crc" (OuterVolumeSpecName: "persistence") pod "25c1e81e-fa0e-4ec6-b29c-bda2529fde66" (UID: "25c1e81e-fa0e-4ec6-b29c-bda2529fde66"). InnerVolumeSpecName "local-storage01-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Oct 01 15:23:26 crc kubenswrapper[4869]: I1001 15:23:26.401478 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/25c1e81e-fa0e-4ec6-b29c-bda2529fde66-kube-api-access-59bth" (OuterVolumeSpecName: "kube-api-access-59bth") pod "25c1e81e-fa0e-4ec6-b29c-bda2529fde66" (UID: "25c1e81e-fa0e-4ec6-b29c-bda2529fde66"). InnerVolumeSpecName "kube-api-access-59bth". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 15:23:26 crc kubenswrapper[4869]: I1001 15:23:26.416975 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/25c1e81e-fa0e-4ec6-b29c-bda2529fde66-config-data" (OuterVolumeSpecName: "config-data") pod "25c1e81e-fa0e-4ec6-b29c-bda2529fde66" (UID: "25c1e81e-fa0e-4ec6-b29c-bda2529fde66"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 15:23:26 crc kubenswrapper[4869]: I1001 15:23:26.486039 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/25c1e81e-fa0e-4ec6-b29c-bda2529fde66-server-conf" (OuterVolumeSpecName: "server-conf") pod "25c1e81e-fa0e-4ec6-b29c-bda2529fde66" (UID: "25c1e81e-fa0e-4ec6-b29c-bda2529fde66"). InnerVolumeSpecName "server-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 15:23:26 crc kubenswrapper[4869]: I1001 15:23:26.486692 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/25c1e81e-fa0e-4ec6-b29c-bda2529fde66-server-conf\") pod \"25c1e81e-fa0e-4ec6-b29c-bda2529fde66\" (UID: \"25c1e81e-fa0e-4ec6-b29c-bda2529fde66\") " Oct 01 15:23:26 crc kubenswrapper[4869]: W1001 15:23:26.486807 4869 empty_dir.go:500] Warning: Unmount skipped because path does not exist: /var/lib/kubelet/pods/25c1e81e-fa0e-4ec6-b29c-bda2529fde66/volumes/kubernetes.io~configmap/server-conf Oct 01 15:23:26 crc kubenswrapper[4869]: I1001 15:23:26.486822 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/25c1e81e-fa0e-4ec6-b29c-bda2529fde66-server-conf" (OuterVolumeSpecName: "server-conf") pod "25c1e81e-fa0e-4ec6-b29c-bda2529fde66" (UID: "25c1e81e-fa0e-4ec6-b29c-bda2529fde66"). InnerVolumeSpecName "server-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 15:23:26 crc kubenswrapper[4869]: I1001 15:23:26.487029 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-59bth\" (UniqueName: \"kubernetes.io/projected/25c1e81e-fa0e-4ec6-b29c-bda2529fde66-kube-api-access-59bth\") on node \"crc\" DevicePath \"\"" Oct 01 15:23:26 crc kubenswrapper[4869]: I1001 15:23:26.487046 4869 reconciler_common.go:293] "Volume detached for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/25c1e81e-fa0e-4ec6-b29c-bda2529fde66-plugins-conf\") on node \"crc\" DevicePath \"\"" Oct 01 15:23:26 crc kubenswrapper[4869]: I1001 15:23:26.487055 4869 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/25c1e81e-fa0e-4ec6-b29c-bda2529fde66-rabbitmq-tls\") on node \"crc\" DevicePath \"\"" Oct 01 15:23:26 crc kubenswrapper[4869]: I1001 15:23:26.487066 4869 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/25c1e81e-fa0e-4ec6-b29c-bda2529fde66-rabbitmq-erlang-cookie\") on node \"crc\" DevicePath \"\"" Oct 01 15:23:26 crc kubenswrapper[4869]: I1001 15:23:26.487075 4869 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/25c1e81e-fa0e-4ec6-b29c-bda2529fde66-config-data\") on node \"crc\" DevicePath \"\"" Oct 01 15:23:26 crc kubenswrapper[4869]: I1001 15:23:26.487083 4869 reconciler_common.go:293] "Volume detached for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/25c1e81e-fa0e-4ec6-b29c-bda2529fde66-server-conf\") on node \"crc\" DevicePath \"\"" Oct 01 15:23:26 crc kubenswrapper[4869]: I1001 15:23:26.487092 4869 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/25c1e81e-fa0e-4ec6-b29c-bda2529fde66-rabbitmq-plugins\") on node \"crc\" DevicePath \"\"" Oct 01 15:23:26 crc kubenswrapper[4869]: I1001 15:23:26.487100 4869 reconciler_common.go:293] "Volume detached for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/25c1e81e-fa0e-4ec6-b29c-bda2529fde66-pod-info\") on node \"crc\" DevicePath \"\"" Oct 01 15:23:26 crc kubenswrapper[4869]: I1001 15:23:26.487124 4869 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") on node \"crc\" " Oct 01 15:23:26 crc kubenswrapper[4869]: I1001 15:23:26.487133 4869 reconciler_common.go:293] "Volume detached for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/25c1e81e-fa0e-4ec6-b29c-bda2529fde66-erlang-cookie-secret\") on node \"crc\" DevicePath \"\"" Oct 01 15:23:26 crc kubenswrapper[4869]: I1001 15:23:26.508191 4869 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage01-crc" (UniqueName: "kubernetes.io/local-volume/local-storage01-crc") on node "crc" Oct 01 15:23:26 crc kubenswrapper[4869]: I1001 15:23:26.544227 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/25c1e81e-fa0e-4ec6-b29c-bda2529fde66-rabbitmq-confd" (OuterVolumeSpecName: "rabbitmq-confd") pod "25c1e81e-fa0e-4ec6-b29c-bda2529fde66" (UID: "25c1e81e-fa0e-4ec6-b29c-bda2529fde66"). InnerVolumeSpecName "rabbitmq-confd". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 15:23:26 crc kubenswrapper[4869]: I1001 15:23:26.588466 4869 reconciler_common.go:293] "Volume detached for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") on node \"crc\" DevicePath \"\"" Oct 01 15:23:26 crc kubenswrapper[4869]: I1001 15:23:26.588493 4869 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/25c1e81e-fa0e-4ec6-b29c-bda2529fde66-rabbitmq-confd\") on node \"crc\" DevicePath \"\"" Oct 01 15:23:27 crc kubenswrapper[4869]: I1001 15:23:27.164848 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"25c1e81e-fa0e-4ec6-b29c-bda2529fde66","Type":"ContainerDied","Data":"212451a3d82ba1930abb9091bb7bf2db7427ad3b63a4620558bd76313d9513dc"} Oct 01 15:23:27 crc kubenswrapper[4869]: I1001 15:23:27.165440 4869 scope.go:117] "RemoveContainer" containerID="2cdb4a3848e9ce2788889d11169e2b89ffe9fd6474ba6f492157f5a0774267f1" Oct 01 15:23:27 crc kubenswrapper[4869]: I1001 15:23:27.164914 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Oct 01 15:23:27 crc kubenswrapper[4869]: I1001 15:23:27.174719 4869 generic.go:334] "Generic (PLEG): container finished" podID="004ab312-4718-4cf2-80df-5a2b1eccc301" containerID="8bb7c698ac78361fbdd84081d11aa849720c8828f54f87f1420ea74cb36a5faf" exitCode=0 Oct 01 15:23:27 crc kubenswrapper[4869]: I1001 15:23:27.174777 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"004ab312-4718-4cf2-80df-5a2b1eccc301","Type":"ContainerDied","Data":"8bb7c698ac78361fbdd84081d11aa849720c8828f54f87f1420ea74cb36a5faf"} Oct 01 15:23:27 crc kubenswrapper[4869]: I1001 15:23:27.221193 4869 scope.go:117] "RemoveContainer" containerID="676eea2624cce6e1eb68bf2c8575b8ec869fb8ee8184f7395a0ca6650eae7267" Oct 01 15:23:27 crc kubenswrapper[4869]: I1001 15:23:27.250328 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/rabbitmq-server-0"] Oct 01 15:23:27 crc kubenswrapper[4869]: I1001 15:23:27.297168 4869 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/rabbitmq-server-0"] Oct 01 15:23:27 crc kubenswrapper[4869]: I1001 15:23:27.305315 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/rabbitmq-server-0"] Oct 01 15:23:27 crc kubenswrapper[4869]: E1001 15:23:27.305767 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="25c1e81e-fa0e-4ec6-b29c-bda2529fde66" containerName="setup-container" Oct 01 15:23:27 crc kubenswrapper[4869]: I1001 15:23:27.305784 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="25c1e81e-fa0e-4ec6-b29c-bda2529fde66" containerName="setup-container" Oct 01 15:23:27 crc kubenswrapper[4869]: E1001 15:23:27.305812 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="25c1e81e-fa0e-4ec6-b29c-bda2529fde66" containerName="rabbitmq" Oct 01 15:23:27 crc kubenswrapper[4869]: I1001 15:23:27.305822 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="25c1e81e-fa0e-4ec6-b29c-bda2529fde66" containerName="rabbitmq" Oct 01 15:23:27 crc kubenswrapper[4869]: I1001 15:23:27.306058 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="25c1e81e-fa0e-4ec6-b29c-bda2529fde66" containerName="rabbitmq" Oct 01 15:23:27 crc kubenswrapper[4869]: I1001 15:23:27.307377 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Oct 01 15:23:27 crc kubenswrapper[4869]: I1001 15:23:27.310613 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-server-0"] Oct 01 15:23:27 crc kubenswrapper[4869]: I1001 15:23:27.315895 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-config-data" Oct 01 15:23:27 crc kubenswrapper[4869]: I1001 15:23:27.316176 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-server-dockercfg-4lr4p" Oct 01 15:23:27 crc kubenswrapper[4869]: I1001 15:23:27.316337 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-rabbitmq-svc" Oct 01 15:23:27 crc kubenswrapper[4869]: I1001 15:23:27.316442 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-server-conf" Oct 01 15:23:27 crc kubenswrapper[4869]: I1001 15:23:27.316545 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-erlang-cookie" Oct 01 15:23:27 crc kubenswrapper[4869]: I1001 15:23:27.316718 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-default-user" Oct 01 15:23:27 crc kubenswrapper[4869]: I1001 15:23:27.326685 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-plugins-conf" Oct 01 15:23:27 crc kubenswrapper[4869]: I1001 15:23:27.439763 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/e2d4e177-3ee6-497a-a0c6-db9305809a81-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"e2d4e177-3ee6-497a-a0c6-db9305809a81\") " pod="openstack/rabbitmq-server-0" Oct 01 15:23:27 crc kubenswrapper[4869]: I1001 15:23:27.439837 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/e2d4e177-3ee6-497a-a0c6-db9305809a81-server-conf\") pod \"rabbitmq-server-0\" (UID: \"e2d4e177-3ee6-497a-a0c6-db9305809a81\") " pod="openstack/rabbitmq-server-0" Oct 01 15:23:27 crc kubenswrapper[4869]: I1001 15:23:27.439895 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/e2d4e177-3ee6-497a-a0c6-db9305809a81-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"e2d4e177-3ee6-497a-a0c6-db9305809a81\") " pod="openstack/rabbitmq-server-0" Oct 01 15:23:27 crc kubenswrapper[4869]: I1001 15:23:27.439918 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/e2d4e177-3ee6-497a-a0c6-db9305809a81-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"e2d4e177-3ee6-497a-a0c6-db9305809a81\") " pod="openstack/rabbitmq-server-0" Oct 01 15:23:27 crc kubenswrapper[4869]: I1001 15:23:27.439939 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/e2d4e177-3ee6-497a-a0c6-db9305809a81-config-data\") pod \"rabbitmq-server-0\" (UID: \"e2d4e177-3ee6-497a-a0c6-db9305809a81\") " pod="openstack/rabbitmq-server-0" Oct 01 15:23:27 crc kubenswrapper[4869]: I1001 15:23:27.439963 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/e2d4e177-3ee6-497a-a0c6-db9305809a81-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"e2d4e177-3ee6-497a-a0c6-db9305809a81\") " pod="openstack/rabbitmq-server-0" Oct 01 15:23:27 crc kubenswrapper[4869]: I1001 15:23:27.439988 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/e2d4e177-3ee6-497a-a0c6-db9305809a81-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"e2d4e177-3ee6-497a-a0c6-db9305809a81\") " pod="openstack/rabbitmq-server-0" Oct 01 15:23:27 crc kubenswrapper[4869]: I1001 15:23:27.440011 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/e2d4e177-3ee6-497a-a0c6-db9305809a81-pod-info\") pod \"rabbitmq-server-0\" (UID: \"e2d4e177-3ee6-497a-a0c6-db9305809a81\") " pod="openstack/rabbitmq-server-0" Oct 01 15:23:27 crc kubenswrapper[4869]: I1001 15:23:27.440069 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lsspc\" (UniqueName: \"kubernetes.io/projected/e2d4e177-3ee6-497a-a0c6-db9305809a81-kube-api-access-lsspc\") pod \"rabbitmq-server-0\" (UID: \"e2d4e177-3ee6-497a-a0c6-db9305809a81\") " pod="openstack/rabbitmq-server-0" Oct 01 15:23:27 crc kubenswrapper[4869]: I1001 15:23:27.440133 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"rabbitmq-server-0\" (UID: \"e2d4e177-3ee6-497a-a0c6-db9305809a81\") " pod="openstack/rabbitmq-server-0" Oct 01 15:23:27 crc kubenswrapper[4869]: I1001 15:23:27.440185 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/e2d4e177-3ee6-497a-a0c6-db9305809a81-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"e2d4e177-3ee6-497a-a0c6-db9305809a81\") " pod="openstack/rabbitmq-server-0" Oct 01 15:23:27 crc kubenswrapper[4869]: I1001 15:23:27.541726 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/e2d4e177-3ee6-497a-a0c6-db9305809a81-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"e2d4e177-3ee6-497a-a0c6-db9305809a81\") " pod="openstack/rabbitmq-server-0" Oct 01 15:23:27 crc kubenswrapper[4869]: I1001 15:23:27.541786 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/e2d4e177-3ee6-497a-a0c6-db9305809a81-server-conf\") pod \"rabbitmq-server-0\" (UID: \"e2d4e177-3ee6-497a-a0c6-db9305809a81\") " pod="openstack/rabbitmq-server-0" Oct 01 15:23:27 crc kubenswrapper[4869]: I1001 15:23:27.541829 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/e2d4e177-3ee6-497a-a0c6-db9305809a81-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"e2d4e177-3ee6-497a-a0c6-db9305809a81\") " pod="openstack/rabbitmq-server-0" Oct 01 15:23:27 crc kubenswrapper[4869]: I1001 15:23:27.541847 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/e2d4e177-3ee6-497a-a0c6-db9305809a81-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"e2d4e177-3ee6-497a-a0c6-db9305809a81\") " pod="openstack/rabbitmq-server-0" Oct 01 15:23:27 crc kubenswrapper[4869]: I1001 15:23:27.541865 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/e2d4e177-3ee6-497a-a0c6-db9305809a81-config-data\") pod \"rabbitmq-server-0\" (UID: \"e2d4e177-3ee6-497a-a0c6-db9305809a81\") " pod="openstack/rabbitmq-server-0" Oct 01 15:23:27 crc kubenswrapper[4869]: I1001 15:23:27.541885 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/e2d4e177-3ee6-497a-a0c6-db9305809a81-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"e2d4e177-3ee6-497a-a0c6-db9305809a81\") " pod="openstack/rabbitmq-server-0" Oct 01 15:23:27 crc kubenswrapper[4869]: I1001 15:23:27.541902 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/e2d4e177-3ee6-497a-a0c6-db9305809a81-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"e2d4e177-3ee6-497a-a0c6-db9305809a81\") " pod="openstack/rabbitmq-server-0" Oct 01 15:23:27 crc kubenswrapper[4869]: I1001 15:23:27.541919 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/e2d4e177-3ee6-497a-a0c6-db9305809a81-pod-info\") pod \"rabbitmq-server-0\" (UID: \"e2d4e177-3ee6-497a-a0c6-db9305809a81\") " pod="openstack/rabbitmq-server-0" Oct 01 15:23:27 crc kubenswrapper[4869]: I1001 15:23:27.541959 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lsspc\" (UniqueName: \"kubernetes.io/projected/e2d4e177-3ee6-497a-a0c6-db9305809a81-kube-api-access-lsspc\") pod \"rabbitmq-server-0\" (UID: \"e2d4e177-3ee6-497a-a0c6-db9305809a81\") " pod="openstack/rabbitmq-server-0" Oct 01 15:23:27 crc kubenswrapper[4869]: I1001 15:23:27.541993 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"rabbitmq-server-0\" (UID: \"e2d4e177-3ee6-497a-a0c6-db9305809a81\") " pod="openstack/rabbitmq-server-0" Oct 01 15:23:27 crc kubenswrapper[4869]: I1001 15:23:27.542025 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/e2d4e177-3ee6-497a-a0c6-db9305809a81-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"e2d4e177-3ee6-497a-a0c6-db9305809a81\") " pod="openstack/rabbitmq-server-0" Oct 01 15:23:27 crc kubenswrapper[4869]: I1001 15:23:27.542784 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/e2d4e177-3ee6-497a-a0c6-db9305809a81-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"e2d4e177-3ee6-497a-a0c6-db9305809a81\") " pod="openstack/rabbitmq-server-0" Oct 01 15:23:27 crc kubenswrapper[4869]: I1001 15:23:27.542876 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/e2d4e177-3ee6-497a-a0c6-db9305809a81-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"e2d4e177-3ee6-497a-a0c6-db9305809a81\") " pod="openstack/rabbitmq-server-0" Oct 01 15:23:27 crc kubenswrapper[4869]: I1001 15:23:27.543126 4869 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"rabbitmq-server-0\" (UID: \"e2d4e177-3ee6-497a-a0c6-db9305809a81\") device mount path \"/mnt/openstack/pv01\"" pod="openstack/rabbitmq-server-0" Oct 01 15:23:27 crc kubenswrapper[4869]: I1001 15:23:27.543493 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/e2d4e177-3ee6-497a-a0c6-db9305809a81-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"e2d4e177-3ee6-497a-a0c6-db9305809a81\") " pod="openstack/rabbitmq-server-0" Oct 01 15:23:27 crc kubenswrapper[4869]: I1001 15:23:27.543843 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/e2d4e177-3ee6-497a-a0c6-db9305809a81-config-data\") pod \"rabbitmq-server-0\" (UID: \"e2d4e177-3ee6-497a-a0c6-db9305809a81\") " pod="openstack/rabbitmq-server-0" Oct 01 15:23:27 crc kubenswrapper[4869]: I1001 15:23:27.544435 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/e2d4e177-3ee6-497a-a0c6-db9305809a81-server-conf\") pod \"rabbitmq-server-0\" (UID: \"e2d4e177-3ee6-497a-a0c6-db9305809a81\") " pod="openstack/rabbitmq-server-0" Oct 01 15:23:27 crc kubenswrapper[4869]: I1001 15:23:27.548252 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/e2d4e177-3ee6-497a-a0c6-db9305809a81-pod-info\") pod \"rabbitmq-server-0\" (UID: \"e2d4e177-3ee6-497a-a0c6-db9305809a81\") " pod="openstack/rabbitmq-server-0" Oct 01 15:23:27 crc kubenswrapper[4869]: I1001 15:23:27.548284 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/e2d4e177-3ee6-497a-a0c6-db9305809a81-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"e2d4e177-3ee6-497a-a0c6-db9305809a81\") " pod="openstack/rabbitmq-server-0" Oct 01 15:23:27 crc kubenswrapper[4869]: I1001 15:23:27.557620 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/e2d4e177-3ee6-497a-a0c6-db9305809a81-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"e2d4e177-3ee6-497a-a0c6-db9305809a81\") " pod="openstack/rabbitmq-server-0" Oct 01 15:23:27 crc kubenswrapper[4869]: I1001 15:23:27.559274 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/e2d4e177-3ee6-497a-a0c6-db9305809a81-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"e2d4e177-3ee6-497a-a0c6-db9305809a81\") " pod="openstack/rabbitmq-server-0" Oct 01 15:23:27 crc kubenswrapper[4869]: I1001 15:23:27.569614 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lsspc\" (UniqueName: \"kubernetes.io/projected/e2d4e177-3ee6-497a-a0c6-db9305809a81-kube-api-access-lsspc\") pod \"rabbitmq-server-0\" (UID: \"e2d4e177-3ee6-497a-a0c6-db9305809a81\") " pod="openstack/rabbitmq-server-0" Oct 01 15:23:27 crc kubenswrapper[4869]: I1001 15:23:27.581680 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"rabbitmq-server-0\" (UID: \"e2d4e177-3ee6-497a-a0c6-db9305809a81\") " pod="openstack/rabbitmq-server-0" Oct 01 15:23:27 crc kubenswrapper[4869]: I1001 15:23:27.591188 4869 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="25c1e81e-fa0e-4ec6-b29c-bda2529fde66" path="/var/lib/kubelet/pods/25c1e81e-fa0e-4ec6-b29c-bda2529fde66/volumes" Oct 01 15:23:27 crc kubenswrapper[4869]: I1001 15:23:27.665693 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Oct 01 15:23:27 crc kubenswrapper[4869]: I1001 15:23:27.673389 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Oct 01 15:23:27 crc kubenswrapper[4869]: I1001 15:23:27.768086 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/004ab312-4718-4cf2-80df-5a2b1eccc301-pod-info\") pod \"004ab312-4718-4cf2-80df-5a2b1eccc301\" (UID: \"004ab312-4718-4cf2-80df-5a2b1eccc301\") " Oct 01 15:23:27 crc kubenswrapper[4869]: I1001 15:23:27.768174 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/004ab312-4718-4cf2-80df-5a2b1eccc301-rabbitmq-erlang-cookie\") pod \"004ab312-4718-4cf2-80df-5a2b1eccc301\" (UID: \"004ab312-4718-4cf2-80df-5a2b1eccc301\") " Oct 01 15:23:27 crc kubenswrapper[4869]: I1001 15:23:27.768243 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2strd\" (UniqueName: \"kubernetes.io/projected/004ab312-4718-4cf2-80df-5a2b1eccc301-kube-api-access-2strd\") pod \"004ab312-4718-4cf2-80df-5a2b1eccc301\" (UID: \"004ab312-4718-4cf2-80df-5a2b1eccc301\") " Oct 01 15:23:27 crc kubenswrapper[4869]: I1001 15:23:27.768294 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/004ab312-4718-4cf2-80df-5a2b1eccc301-plugins-conf\") pod \"004ab312-4718-4cf2-80df-5a2b1eccc301\" (UID: \"004ab312-4718-4cf2-80df-5a2b1eccc301\") " Oct 01 15:23:27 crc kubenswrapper[4869]: I1001 15:23:27.768321 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/004ab312-4718-4cf2-80df-5a2b1eccc301-rabbitmq-tls\") pod \"004ab312-4718-4cf2-80df-5a2b1eccc301\" (UID: \"004ab312-4718-4cf2-80df-5a2b1eccc301\") " Oct 01 15:23:27 crc kubenswrapper[4869]: I1001 15:23:27.768361 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/004ab312-4718-4cf2-80df-5a2b1eccc301-server-conf\") pod \"004ab312-4718-4cf2-80df-5a2b1eccc301\" (UID: \"004ab312-4718-4cf2-80df-5a2b1eccc301\") " Oct 01 15:23:27 crc kubenswrapper[4869]: I1001 15:23:27.768473 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/004ab312-4718-4cf2-80df-5a2b1eccc301-config-data\") pod \"004ab312-4718-4cf2-80df-5a2b1eccc301\" (UID: \"004ab312-4718-4cf2-80df-5a2b1eccc301\") " Oct 01 15:23:27 crc kubenswrapper[4869]: I1001 15:23:27.768498 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"persistence\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"004ab312-4718-4cf2-80df-5a2b1eccc301\" (UID: \"004ab312-4718-4cf2-80df-5a2b1eccc301\") " Oct 01 15:23:27 crc kubenswrapper[4869]: I1001 15:23:27.768523 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/004ab312-4718-4cf2-80df-5a2b1eccc301-erlang-cookie-secret\") pod \"004ab312-4718-4cf2-80df-5a2b1eccc301\" (UID: \"004ab312-4718-4cf2-80df-5a2b1eccc301\") " Oct 01 15:23:27 crc kubenswrapper[4869]: I1001 15:23:27.768592 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/004ab312-4718-4cf2-80df-5a2b1eccc301-rabbitmq-plugins\") pod \"004ab312-4718-4cf2-80df-5a2b1eccc301\" (UID: \"004ab312-4718-4cf2-80df-5a2b1eccc301\") " Oct 01 15:23:27 crc kubenswrapper[4869]: I1001 15:23:27.768678 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/004ab312-4718-4cf2-80df-5a2b1eccc301-rabbitmq-confd\") pod \"004ab312-4718-4cf2-80df-5a2b1eccc301\" (UID: \"004ab312-4718-4cf2-80df-5a2b1eccc301\") " Oct 01 15:23:27 crc kubenswrapper[4869]: I1001 15:23:27.771513 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/004ab312-4718-4cf2-80df-5a2b1eccc301-rabbitmq-erlang-cookie" (OuterVolumeSpecName: "rabbitmq-erlang-cookie") pod "004ab312-4718-4cf2-80df-5a2b1eccc301" (UID: "004ab312-4718-4cf2-80df-5a2b1eccc301"). InnerVolumeSpecName "rabbitmq-erlang-cookie". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 15:23:27 crc kubenswrapper[4869]: I1001 15:23:27.771976 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/004ab312-4718-4cf2-80df-5a2b1eccc301-rabbitmq-plugins" (OuterVolumeSpecName: "rabbitmq-plugins") pod "004ab312-4718-4cf2-80df-5a2b1eccc301" (UID: "004ab312-4718-4cf2-80df-5a2b1eccc301"). InnerVolumeSpecName "rabbitmq-plugins". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 15:23:27 crc kubenswrapper[4869]: I1001 15:23:27.773872 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/004ab312-4718-4cf2-80df-5a2b1eccc301-plugins-conf" (OuterVolumeSpecName: "plugins-conf") pod "004ab312-4718-4cf2-80df-5a2b1eccc301" (UID: "004ab312-4718-4cf2-80df-5a2b1eccc301"). InnerVolumeSpecName "plugins-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 15:23:27 crc kubenswrapper[4869]: I1001 15:23:27.774687 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/004ab312-4718-4cf2-80df-5a2b1eccc301-rabbitmq-tls" (OuterVolumeSpecName: "rabbitmq-tls") pod "004ab312-4718-4cf2-80df-5a2b1eccc301" (UID: "004ab312-4718-4cf2-80df-5a2b1eccc301"). InnerVolumeSpecName "rabbitmq-tls". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 15:23:27 crc kubenswrapper[4869]: I1001 15:23:27.780805 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/downward-api/004ab312-4718-4cf2-80df-5a2b1eccc301-pod-info" (OuterVolumeSpecName: "pod-info") pod "004ab312-4718-4cf2-80df-5a2b1eccc301" (UID: "004ab312-4718-4cf2-80df-5a2b1eccc301"). InnerVolumeSpecName "pod-info". PluginName "kubernetes.io/downward-api", VolumeGidValue "" Oct 01 15:23:27 crc kubenswrapper[4869]: I1001 15:23:27.780816 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/004ab312-4718-4cf2-80df-5a2b1eccc301-kube-api-access-2strd" (OuterVolumeSpecName: "kube-api-access-2strd") pod "004ab312-4718-4cf2-80df-5a2b1eccc301" (UID: "004ab312-4718-4cf2-80df-5a2b1eccc301"). InnerVolumeSpecName "kube-api-access-2strd". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 15:23:27 crc kubenswrapper[4869]: I1001 15:23:27.780874 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage02-crc" (OuterVolumeSpecName: "persistence") pod "004ab312-4718-4cf2-80df-5a2b1eccc301" (UID: "004ab312-4718-4cf2-80df-5a2b1eccc301"). InnerVolumeSpecName "local-storage02-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Oct 01 15:23:27 crc kubenswrapper[4869]: I1001 15:23:27.780889 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/004ab312-4718-4cf2-80df-5a2b1eccc301-erlang-cookie-secret" (OuterVolumeSpecName: "erlang-cookie-secret") pod "004ab312-4718-4cf2-80df-5a2b1eccc301" (UID: "004ab312-4718-4cf2-80df-5a2b1eccc301"). InnerVolumeSpecName "erlang-cookie-secret". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 15:23:27 crc kubenswrapper[4869]: I1001 15:23:27.800885 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/004ab312-4718-4cf2-80df-5a2b1eccc301-config-data" (OuterVolumeSpecName: "config-data") pod "004ab312-4718-4cf2-80df-5a2b1eccc301" (UID: "004ab312-4718-4cf2-80df-5a2b1eccc301"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 15:23:27 crc kubenswrapper[4869]: I1001 15:23:27.835676 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/004ab312-4718-4cf2-80df-5a2b1eccc301-server-conf" (OuterVolumeSpecName: "server-conf") pod "004ab312-4718-4cf2-80df-5a2b1eccc301" (UID: "004ab312-4718-4cf2-80df-5a2b1eccc301"). InnerVolumeSpecName "server-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 15:23:27 crc kubenswrapper[4869]: I1001 15:23:27.870676 4869 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/004ab312-4718-4cf2-80df-5a2b1eccc301-rabbitmq-plugins\") on node \"crc\" DevicePath \"\"" Oct 01 15:23:27 crc kubenswrapper[4869]: I1001 15:23:27.870701 4869 reconciler_common.go:293] "Volume detached for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/004ab312-4718-4cf2-80df-5a2b1eccc301-pod-info\") on node \"crc\" DevicePath \"\"" Oct 01 15:23:27 crc kubenswrapper[4869]: I1001 15:23:27.870711 4869 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/004ab312-4718-4cf2-80df-5a2b1eccc301-rabbitmq-erlang-cookie\") on node \"crc\" DevicePath \"\"" Oct 01 15:23:27 crc kubenswrapper[4869]: I1001 15:23:27.870722 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2strd\" (UniqueName: \"kubernetes.io/projected/004ab312-4718-4cf2-80df-5a2b1eccc301-kube-api-access-2strd\") on node \"crc\" DevicePath \"\"" Oct 01 15:23:27 crc kubenswrapper[4869]: I1001 15:23:27.870730 4869 reconciler_common.go:293] "Volume detached for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/004ab312-4718-4cf2-80df-5a2b1eccc301-plugins-conf\") on node \"crc\" DevicePath \"\"" Oct 01 15:23:27 crc kubenswrapper[4869]: I1001 15:23:27.870740 4869 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/004ab312-4718-4cf2-80df-5a2b1eccc301-rabbitmq-tls\") on node \"crc\" DevicePath \"\"" Oct 01 15:23:27 crc kubenswrapper[4869]: I1001 15:23:27.870748 4869 reconciler_common.go:293] "Volume detached for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/004ab312-4718-4cf2-80df-5a2b1eccc301-server-conf\") on node \"crc\" DevicePath \"\"" Oct 01 15:23:27 crc kubenswrapper[4869]: I1001 15:23:27.870756 4869 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/004ab312-4718-4cf2-80df-5a2b1eccc301-config-data\") on node \"crc\" DevicePath \"\"" Oct 01 15:23:27 crc kubenswrapper[4869]: I1001 15:23:27.870782 4869 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") on node \"crc\" " Oct 01 15:23:27 crc kubenswrapper[4869]: I1001 15:23:27.870791 4869 reconciler_common.go:293] "Volume detached for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/004ab312-4718-4cf2-80df-5a2b1eccc301-erlang-cookie-secret\") on node \"crc\" DevicePath \"\"" Oct 01 15:23:27 crc kubenswrapper[4869]: I1001 15:23:27.878780 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/004ab312-4718-4cf2-80df-5a2b1eccc301-rabbitmq-confd" (OuterVolumeSpecName: "rabbitmq-confd") pod "004ab312-4718-4cf2-80df-5a2b1eccc301" (UID: "004ab312-4718-4cf2-80df-5a2b1eccc301"). InnerVolumeSpecName "rabbitmq-confd". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 15:23:27 crc kubenswrapper[4869]: I1001 15:23:27.907473 4869 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage02-crc" (UniqueName: "kubernetes.io/local-volume/local-storage02-crc") on node "crc" Oct 01 15:23:27 crc kubenswrapper[4869]: I1001 15:23:27.971864 4869 reconciler_common.go:293] "Volume detached for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") on node \"crc\" DevicePath \"\"" Oct 01 15:23:27 crc kubenswrapper[4869]: I1001 15:23:27.971896 4869 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/004ab312-4718-4cf2-80df-5a2b1eccc301-rabbitmq-confd\") on node \"crc\" DevicePath \"\"" Oct 01 15:23:28 crc kubenswrapper[4869]: I1001 15:23:28.158697 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-server-0"] Oct 01 15:23:28 crc kubenswrapper[4869]: I1001 15:23:28.189277 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"e2d4e177-3ee6-497a-a0c6-db9305809a81","Type":"ContainerStarted","Data":"0e62e8d4cb465f863a7af4afeab4e6e1de458ec61898acae6348de9b6c1ef75b"} Oct 01 15:23:28 crc kubenswrapper[4869]: I1001 15:23:28.191802 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"004ab312-4718-4cf2-80df-5a2b1eccc301","Type":"ContainerDied","Data":"2ca351ebd7a6d92609dd02a379587ee6f1bba90d0bbb20f656a7cbfc49060f8d"} Oct 01 15:23:28 crc kubenswrapper[4869]: I1001 15:23:28.191850 4869 scope.go:117] "RemoveContainer" containerID="8bb7c698ac78361fbdd84081d11aa849720c8828f54f87f1420ea74cb36a5faf" Oct 01 15:23:28 crc kubenswrapper[4869]: I1001 15:23:28.191908 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Oct 01 15:23:28 crc kubenswrapper[4869]: I1001 15:23:28.291663 4869 scope.go:117] "RemoveContainer" containerID="d20e4a80d05da9996c50b83728ad8a401a62da2d044eb3737994a92551baa863" Oct 01 15:23:28 crc kubenswrapper[4869]: I1001 15:23:28.317695 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Oct 01 15:23:28 crc kubenswrapper[4869]: I1001 15:23:28.333796 4869 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Oct 01 15:23:28 crc kubenswrapper[4869]: I1001 15:23:28.355431 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Oct 01 15:23:28 crc kubenswrapper[4869]: E1001 15:23:28.355916 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="004ab312-4718-4cf2-80df-5a2b1eccc301" containerName="rabbitmq" Oct 01 15:23:28 crc kubenswrapper[4869]: I1001 15:23:28.355938 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="004ab312-4718-4cf2-80df-5a2b1eccc301" containerName="rabbitmq" Oct 01 15:23:28 crc kubenswrapper[4869]: E1001 15:23:28.355973 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="004ab312-4718-4cf2-80df-5a2b1eccc301" containerName="setup-container" Oct 01 15:23:28 crc kubenswrapper[4869]: I1001 15:23:28.355981 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="004ab312-4718-4cf2-80df-5a2b1eccc301" containerName="setup-container" Oct 01 15:23:28 crc kubenswrapper[4869]: I1001 15:23:28.356232 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="004ab312-4718-4cf2-80df-5a2b1eccc301" containerName="rabbitmq" Oct 01 15:23:28 crc kubenswrapper[4869]: I1001 15:23:28.358091 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Oct 01 15:23:28 crc kubenswrapper[4869]: I1001 15:23:28.361761 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-default-user" Oct 01 15:23:28 crc kubenswrapper[4869]: I1001 15:23:28.361976 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-config-data" Oct 01 15:23:28 crc kubenswrapper[4869]: I1001 15:23:28.362133 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-server-conf" Oct 01 15:23:28 crc kubenswrapper[4869]: I1001 15:23:28.362435 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-erlang-cookie" Oct 01 15:23:28 crc kubenswrapper[4869]: I1001 15:23:28.362603 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-rabbitmq-cell1-svc" Oct 01 15:23:28 crc kubenswrapper[4869]: I1001 15:23:28.362879 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-server-dockercfg-djs62" Oct 01 15:23:28 crc kubenswrapper[4869]: I1001 15:23:28.363864 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-plugins-conf" Oct 01 15:23:28 crc kubenswrapper[4869]: I1001 15:23:28.374910 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Oct 01 15:23:28 crc kubenswrapper[4869]: I1001 15:23:28.482155 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/70946011-083d-41f8-acf9-ab0c4711b48b-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"70946011-083d-41f8-acf9-ab0c4711b48b\") " pod="openstack/rabbitmq-cell1-server-0" Oct 01 15:23:28 crc kubenswrapper[4869]: I1001 15:23:28.482200 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/70946011-083d-41f8-acf9-ab0c4711b48b-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"70946011-083d-41f8-acf9-ab0c4711b48b\") " pod="openstack/rabbitmq-cell1-server-0" Oct 01 15:23:28 crc kubenswrapper[4869]: I1001 15:23:28.482243 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"70946011-083d-41f8-acf9-ab0c4711b48b\") " pod="openstack/rabbitmq-cell1-server-0" Oct 01 15:23:28 crc kubenswrapper[4869]: I1001 15:23:28.482277 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/70946011-083d-41f8-acf9-ab0c4711b48b-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"70946011-083d-41f8-acf9-ab0c4711b48b\") " pod="openstack/rabbitmq-cell1-server-0" Oct 01 15:23:28 crc kubenswrapper[4869]: I1001 15:23:28.482352 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/70946011-083d-41f8-acf9-ab0c4711b48b-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"70946011-083d-41f8-acf9-ab0c4711b48b\") " pod="openstack/rabbitmq-cell1-server-0" Oct 01 15:23:28 crc kubenswrapper[4869]: I1001 15:23:28.482379 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/70946011-083d-41f8-acf9-ab0c4711b48b-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"70946011-083d-41f8-acf9-ab0c4711b48b\") " pod="openstack/rabbitmq-cell1-server-0" Oct 01 15:23:28 crc kubenswrapper[4869]: I1001 15:23:28.482394 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/70946011-083d-41f8-acf9-ab0c4711b48b-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"70946011-083d-41f8-acf9-ab0c4711b48b\") " pod="openstack/rabbitmq-cell1-server-0" Oct 01 15:23:28 crc kubenswrapper[4869]: I1001 15:23:28.482412 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/70946011-083d-41f8-acf9-ab0c4711b48b-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"70946011-083d-41f8-acf9-ab0c4711b48b\") " pod="openstack/rabbitmq-cell1-server-0" Oct 01 15:23:28 crc kubenswrapper[4869]: I1001 15:23:28.482429 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zd4c7\" (UniqueName: \"kubernetes.io/projected/70946011-083d-41f8-acf9-ab0c4711b48b-kube-api-access-zd4c7\") pod \"rabbitmq-cell1-server-0\" (UID: \"70946011-083d-41f8-acf9-ab0c4711b48b\") " pod="openstack/rabbitmq-cell1-server-0" Oct 01 15:23:28 crc kubenswrapper[4869]: I1001 15:23:28.482460 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/70946011-083d-41f8-acf9-ab0c4711b48b-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"70946011-083d-41f8-acf9-ab0c4711b48b\") " pod="openstack/rabbitmq-cell1-server-0" Oct 01 15:23:28 crc kubenswrapper[4869]: I1001 15:23:28.482500 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/70946011-083d-41f8-acf9-ab0c4711b48b-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"70946011-083d-41f8-acf9-ab0c4711b48b\") " pod="openstack/rabbitmq-cell1-server-0" Oct 01 15:23:28 crc kubenswrapper[4869]: I1001 15:23:28.584194 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/70946011-083d-41f8-acf9-ab0c4711b48b-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"70946011-083d-41f8-acf9-ab0c4711b48b\") " pod="openstack/rabbitmq-cell1-server-0" Oct 01 15:23:28 crc kubenswrapper[4869]: I1001 15:23:28.584641 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/70946011-083d-41f8-acf9-ab0c4711b48b-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"70946011-083d-41f8-acf9-ab0c4711b48b\") " pod="openstack/rabbitmq-cell1-server-0" Oct 01 15:23:28 crc kubenswrapper[4869]: I1001 15:23:28.584670 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/70946011-083d-41f8-acf9-ab0c4711b48b-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"70946011-083d-41f8-acf9-ab0c4711b48b\") " pod="openstack/rabbitmq-cell1-server-0" Oct 01 15:23:28 crc kubenswrapper[4869]: I1001 15:23:28.584724 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"70946011-083d-41f8-acf9-ab0c4711b48b\") " pod="openstack/rabbitmq-cell1-server-0" Oct 01 15:23:28 crc kubenswrapper[4869]: I1001 15:23:28.584758 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/70946011-083d-41f8-acf9-ab0c4711b48b-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"70946011-083d-41f8-acf9-ab0c4711b48b\") " pod="openstack/rabbitmq-cell1-server-0" Oct 01 15:23:28 crc kubenswrapper[4869]: I1001 15:23:28.584838 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/70946011-083d-41f8-acf9-ab0c4711b48b-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"70946011-083d-41f8-acf9-ab0c4711b48b\") " pod="openstack/rabbitmq-cell1-server-0" Oct 01 15:23:28 crc kubenswrapper[4869]: I1001 15:23:28.584875 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/70946011-083d-41f8-acf9-ab0c4711b48b-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"70946011-083d-41f8-acf9-ab0c4711b48b\") " pod="openstack/rabbitmq-cell1-server-0" Oct 01 15:23:28 crc kubenswrapper[4869]: I1001 15:23:28.584906 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/70946011-083d-41f8-acf9-ab0c4711b48b-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"70946011-083d-41f8-acf9-ab0c4711b48b\") " pod="openstack/rabbitmq-cell1-server-0" Oct 01 15:23:28 crc kubenswrapper[4869]: I1001 15:23:28.584945 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/70946011-083d-41f8-acf9-ab0c4711b48b-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"70946011-083d-41f8-acf9-ab0c4711b48b\") " pod="openstack/rabbitmq-cell1-server-0" Oct 01 15:23:28 crc kubenswrapper[4869]: I1001 15:23:28.584962 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zd4c7\" (UniqueName: \"kubernetes.io/projected/70946011-083d-41f8-acf9-ab0c4711b48b-kube-api-access-zd4c7\") pod \"rabbitmq-cell1-server-0\" (UID: \"70946011-083d-41f8-acf9-ab0c4711b48b\") " pod="openstack/rabbitmq-cell1-server-0" Oct 01 15:23:28 crc kubenswrapper[4869]: I1001 15:23:28.585001 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/70946011-083d-41f8-acf9-ab0c4711b48b-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"70946011-083d-41f8-acf9-ab0c4711b48b\") " pod="openstack/rabbitmq-cell1-server-0" Oct 01 15:23:28 crc kubenswrapper[4869]: I1001 15:23:28.586104 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/70946011-083d-41f8-acf9-ab0c4711b48b-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"70946011-083d-41f8-acf9-ab0c4711b48b\") " pod="openstack/rabbitmq-cell1-server-0" Oct 01 15:23:28 crc kubenswrapper[4869]: I1001 15:23:28.586249 4869 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"70946011-083d-41f8-acf9-ab0c4711b48b\") device mount path \"/mnt/openstack/pv02\"" pod="openstack/rabbitmq-cell1-server-0" Oct 01 15:23:28 crc kubenswrapper[4869]: I1001 15:23:28.586469 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/70946011-083d-41f8-acf9-ab0c4711b48b-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"70946011-083d-41f8-acf9-ab0c4711b48b\") " pod="openstack/rabbitmq-cell1-server-0" Oct 01 15:23:28 crc kubenswrapper[4869]: I1001 15:23:28.586508 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/70946011-083d-41f8-acf9-ab0c4711b48b-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"70946011-083d-41f8-acf9-ab0c4711b48b\") " pod="openstack/rabbitmq-cell1-server-0" Oct 01 15:23:28 crc kubenswrapper[4869]: I1001 15:23:28.586904 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/70946011-083d-41f8-acf9-ab0c4711b48b-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"70946011-083d-41f8-acf9-ab0c4711b48b\") " pod="openstack/rabbitmq-cell1-server-0" Oct 01 15:23:28 crc kubenswrapper[4869]: I1001 15:23:28.588333 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/70946011-083d-41f8-acf9-ab0c4711b48b-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"70946011-083d-41f8-acf9-ab0c4711b48b\") " pod="openstack/rabbitmq-cell1-server-0" Oct 01 15:23:28 crc kubenswrapper[4869]: I1001 15:23:28.590997 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/70946011-083d-41f8-acf9-ab0c4711b48b-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"70946011-083d-41f8-acf9-ab0c4711b48b\") " pod="openstack/rabbitmq-cell1-server-0" Oct 01 15:23:28 crc kubenswrapper[4869]: I1001 15:23:28.591728 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/70946011-083d-41f8-acf9-ab0c4711b48b-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"70946011-083d-41f8-acf9-ab0c4711b48b\") " pod="openstack/rabbitmq-cell1-server-0" Oct 01 15:23:28 crc kubenswrapper[4869]: I1001 15:23:28.592990 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/70946011-083d-41f8-acf9-ab0c4711b48b-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"70946011-083d-41f8-acf9-ab0c4711b48b\") " pod="openstack/rabbitmq-cell1-server-0" Oct 01 15:23:28 crc kubenswrapper[4869]: I1001 15:23:28.595091 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/70946011-083d-41f8-acf9-ab0c4711b48b-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"70946011-083d-41f8-acf9-ab0c4711b48b\") " pod="openstack/rabbitmq-cell1-server-0" Oct 01 15:23:28 crc kubenswrapper[4869]: I1001 15:23:28.615293 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zd4c7\" (UniqueName: \"kubernetes.io/projected/70946011-083d-41f8-acf9-ab0c4711b48b-kube-api-access-zd4c7\") pod \"rabbitmq-cell1-server-0\" (UID: \"70946011-083d-41f8-acf9-ab0c4711b48b\") " pod="openstack/rabbitmq-cell1-server-0" Oct 01 15:23:28 crc kubenswrapper[4869]: I1001 15:23:28.623457 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"70946011-083d-41f8-acf9-ab0c4711b48b\") " pod="openstack/rabbitmq-cell1-server-0" Oct 01 15:23:28 crc kubenswrapper[4869]: I1001 15:23:28.679784 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Oct 01 15:23:29 crc kubenswrapper[4869]: I1001 15:23:29.000631 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Oct 01 15:23:29 crc kubenswrapper[4869]: W1001 15:23:29.007748 4869 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod70946011_083d_41f8_acf9_ab0c4711b48b.slice/crio-bc96a31bfd69769799f28246ad4c0edf58de4f966f6d8be351eacd2ccd9e5cd4 WatchSource:0}: Error finding container bc96a31bfd69769799f28246ad4c0edf58de4f966f6d8be351eacd2ccd9e5cd4: Status 404 returned error can't find the container with id bc96a31bfd69769799f28246ad4c0edf58de4f966f6d8be351eacd2ccd9e5cd4 Oct 01 15:23:29 crc kubenswrapper[4869]: I1001 15:23:29.205106 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"70946011-083d-41f8-acf9-ab0c4711b48b","Type":"ContainerStarted","Data":"bc96a31bfd69769799f28246ad4c0edf58de4f966f6d8be351eacd2ccd9e5cd4"} Oct 01 15:23:29 crc kubenswrapper[4869]: I1001 15:23:29.595985 4869 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="004ab312-4718-4cf2-80df-5a2b1eccc301" path="/var/lib/kubelet/pods/004ab312-4718-4cf2-80df-5a2b1eccc301/volumes" Oct 01 15:23:30 crc kubenswrapper[4869]: I1001 15:23:30.222364 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"e2d4e177-3ee6-497a-a0c6-db9305809a81","Type":"ContainerStarted","Data":"fc212ca76b639e00ea7368fe380ebbc7c8ef4688715551d05f6bdea1604eefbc"} Oct 01 15:23:31 crc kubenswrapper[4869]: I1001 15:23:31.180490 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-5bfdb7854c-6bc4b"] Oct 01 15:23:31 crc kubenswrapper[4869]: I1001 15:23:31.183509 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5bfdb7854c-6bc4b" Oct 01 15:23:31 crc kubenswrapper[4869]: I1001 15:23:31.190851 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-edpm-ipam" Oct 01 15:23:31 crc kubenswrapper[4869]: I1001 15:23:31.199788 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5bfdb7854c-6bc4b"] Oct 01 15:23:31 crc kubenswrapper[4869]: I1001 15:23:31.268340 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/a2dce293-f4ab-4f01-a61d-ae6f4610ca09-openstack-edpm-ipam\") pod \"dnsmasq-dns-5bfdb7854c-6bc4b\" (UID: \"a2dce293-f4ab-4f01-a61d-ae6f4610ca09\") " pod="openstack/dnsmasq-dns-5bfdb7854c-6bc4b" Oct 01 15:23:31 crc kubenswrapper[4869]: I1001 15:23:31.268398 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a2dce293-f4ab-4f01-a61d-ae6f4610ca09-config\") pod \"dnsmasq-dns-5bfdb7854c-6bc4b\" (UID: \"a2dce293-f4ab-4f01-a61d-ae6f4610ca09\") " pod="openstack/dnsmasq-dns-5bfdb7854c-6bc4b" Oct 01 15:23:31 crc kubenswrapper[4869]: I1001 15:23:31.268498 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7v4hn\" (UniqueName: \"kubernetes.io/projected/a2dce293-f4ab-4f01-a61d-ae6f4610ca09-kube-api-access-7v4hn\") pod \"dnsmasq-dns-5bfdb7854c-6bc4b\" (UID: \"a2dce293-f4ab-4f01-a61d-ae6f4610ca09\") " pod="openstack/dnsmasq-dns-5bfdb7854c-6bc4b" Oct 01 15:23:31 crc kubenswrapper[4869]: I1001 15:23:31.268615 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/a2dce293-f4ab-4f01-a61d-ae6f4610ca09-ovsdbserver-nb\") pod \"dnsmasq-dns-5bfdb7854c-6bc4b\" (UID: \"a2dce293-f4ab-4f01-a61d-ae6f4610ca09\") " pod="openstack/dnsmasq-dns-5bfdb7854c-6bc4b" Oct 01 15:23:31 crc kubenswrapper[4869]: I1001 15:23:31.268643 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/a2dce293-f4ab-4f01-a61d-ae6f4610ca09-dns-svc\") pod \"dnsmasq-dns-5bfdb7854c-6bc4b\" (UID: \"a2dce293-f4ab-4f01-a61d-ae6f4610ca09\") " pod="openstack/dnsmasq-dns-5bfdb7854c-6bc4b" Oct 01 15:23:31 crc kubenswrapper[4869]: I1001 15:23:31.268662 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/a2dce293-f4ab-4f01-a61d-ae6f4610ca09-ovsdbserver-sb\") pod \"dnsmasq-dns-5bfdb7854c-6bc4b\" (UID: \"a2dce293-f4ab-4f01-a61d-ae6f4610ca09\") " pod="openstack/dnsmasq-dns-5bfdb7854c-6bc4b" Oct 01 15:23:31 crc kubenswrapper[4869]: I1001 15:23:31.370199 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/a2dce293-f4ab-4f01-a61d-ae6f4610ca09-dns-svc\") pod \"dnsmasq-dns-5bfdb7854c-6bc4b\" (UID: \"a2dce293-f4ab-4f01-a61d-ae6f4610ca09\") " pod="openstack/dnsmasq-dns-5bfdb7854c-6bc4b" Oct 01 15:23:31 crc kubenswrapper[4869]: I1001 15:23:31.370571 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/a2dce293-f4ab-4f01-a61d-ae6f4610ca09-ovsdbserver-sb\") pod \"dnsmasq-dns-5bfdb7854c-6bc4b\" (UID: \"a2dce293-f4ab-4f01-a61d-ae6f4610ca09\") " pod="openstack/dnsmasq-dns-5bfdb7854c-6bc4b" Oct 01 15:23:31 crc kubenswrapper[4869]: I1001 15:23:31.370713 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/a2dce293-f4ab-4f01-a61d-ae6f4610ca09-openstack-edpm-ipam\") pod \"dnsmasq-dns-5bfdb7854c-6bc4b\" (UID: \"a2dce293-f4ab-4f01-a61d-ae6f4610ca09\") " pod="openstack/dnsmasq-dns-5bfdb7854c-6bc4b" Oct 01 15:23:31 crc kubenswrapper[4869]: I1001 15:23:31.370747 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a2dce293-f4ab-4f01-a61d-ae6f4610ca09-config\") pod \"dnsmasq-dns-5bfdb7854c-6bc4b\" (UID: \"a2dce293-f4ab-4f01-a61d-ae6f4610ca09\") " pod="openstack/dnsmasq-dns-5bfdb7854c-6bc4b" Oct 01 15:23:31 crc kubenswrapper[4869]: I1001 15:23:31.371344 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/a2dce293-f4ab-4f01-a61d-ae6f4610ca09-dns-svc\") pod \"dnsmasq-dns-5bfdb7854c-6bc4b\" (UID: \"a2dce293-f4ab-4f01-a61d-ae6f4610ca09\") " pod="openstack/dnsmasq-dns-5bfdb7854c-6bc4b" Oct 01 15:23:31 crc kubenswrapper[4869]: I1001 15:23:31.371370 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/a2dce293-f4ab-4f01-a61d-ae6f4610ca09-ovsdbserver-sb\") pod \"dnsmasq-dns-5bfdb7854c-6bc4b\" (UID: \"a2dce293-f4ab-4f01-a61d-ae6f4610ca09\") " pod="openstack/dnsmasq-dns-5bfdb7854c-6bc4b" Oct 01 15:23:31 crc kubenswrapper[4869]: I1001 15:23:31.371611 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/a2dce293-f4ab-4f01-a61d-ae6f4610ca09-openstack-edpm-ipam\") pod \"dnsmasq-dns-5bfdb7854c-6bc4b\" (UID: \"a2dce293-f4ab-4f01-a61d-ae6f4610ca09\") " pod="openstack/dnsmasq-dns-5bfdb7854c-6bc4b" Oct 01 15:23:31 crc kubenswrapper[4869]: I1001 15:23:31.371693 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a2dce293-f4ab-4f01-a61d-ae6f4610ca09-config\") pod \"dnsmasq-dns-5bfdb7854c-6bc4b\" (UID: \"a2dce293-f4ab-4f01-a61d-ae6f4610ca09\") " pod="openstack/dnsmasq-dns-5bfdb7854c-6bc4b" Oct 01 15:23:31 crc kubenswrapper[4869]: I1001 15:23:31.371740 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7v4hn\" (UniqueName: \"kubernetes.io/projected/a2dce293-f4ab-4f01-a61d-ae6f4610ca09-kube-api-access-7v4hn\") pod \"dnsmasq-dns-5bfdb7854c-6bc4b\" (UID: \"a2dce293-f4ab-4f01-a61d-ae6f4610ca09\") " pod="openstack/dnsmasq-dns-5bfdb7854c-6bc4b" Oct 01 15:23:31 crc kubenswrapper[4869]: I1001 15:23:31.372326 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/a2dce293-f4ab-4f01-a61d-ae6f4610ca09-ovsdbserver-nb\") pod \"dnsmasq-dns-5bfdb7854c-6bc4b\" (UID: \"a2dce293-f4ab-4f01-a61d-ae6f4610ca09\") " pod="openstack/dnsmasq-dns-5bfdb7854c-6bc4b" Oct 01 15:23:31 crc kubenswrapper[4869]: I1001 15:23:31.373339 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/a2dce293-f4ab-4f01-a61d-ae6f4610ca09-ovsdbserver-nb\") pod \"dnsmasq-dns-5bfdb7854c-6bc4b\" (UID: \"a2dce293-f4ab-4f01-a61d-ae6f4610ca09\") " pod="openstack/dnsmasq-dns-5bfdb7854c-6bc4b" Oct 01 15:23:31 crc kubenswrapper[4869]: I1001 15:23:31.390720 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7v4hn\" (UniqueName: \"kubernetes.io/projected/a2dce293-f4ab-4f01-a61d-ae6f4610ca09-kube-api-access-7v4hn\") pod \"dnsmasq-dns-5bfdb7854c-6bc4b\" (UID: \"a2dce293-f4ab-4f01-a61d-ae6f4610ca09\") " pod="openstack/dnsmasq-dns-5bfdb7854c-6bc4b" Oct 01 15:23:31 crc kubenswrapper[4869]: I1001 15:23:31.513967 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5bfdb7854c-6bc4b" Oct 01 15:23:31 crc kubenswrapper[4869]: I1001 15:23:31.970912 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5bfdb7854c-6bc4b"] Oct 01 15:23:32 crc kubenswrapper[4869]: I1001 15:23:32.245170 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"70946011-083d-41f8-acf9-ab0c4711b48b","Type":"ContainerStarted","Data":"04578113632a59052138892123b9e2414104a170f9f57dd921dd4ccdf26512dc"} Oct 01 15:23:32 crc kubenswrapper[4869]: I1001 15:23:32.246742 4869 generic.go:334] "Generic (PLEG): container finished" podID="a2dce293-f4ab-4f01-a61d-ae6f4610ca09" containerID="f13e01ac03210d3686a8759d25bbab671d7a189763e709c5d3078b02bacab6cf" exitCode=0 Oct 01 15:23:32 crc kubenswrapper[4869]: I1001 15:23:32.246863 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5bfdb7854c-6bc4b" event={"ID":"a2dce293-f4ab-4f01-a61d-ae6f4610ca09","Type":"ContainerDied","Data":"f13e01ac03210d3686a8759d25bbab671d7a189763e709c5d3078b02bacab6cf"} Oct 01 15:23:32 crc kubenswrapper[4869]: I1001 15:23:32.247054 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5bfdb7854c-6bc4b" event={"ID":"a2dce293-f4ab-4f01-a61d-ae6f4610ca09","Type":"ContainerStarted","Data":"d3825971c299967bcbde962411a837b648496a08ca25febdc70421727e9c6c7e"} Oct 01 15:23:33 crc kubenswrapper[4869]: I1001 15:23:33.259306 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5bfdb7854c-6bc4b" event={"ID":"a2dce293-f4ab-4f01-a61d-ae6f4610ca09","Type":"ContainerStarted","Data":"527837c093e600f7783a814bd8ee2e5faac1b789298c86eea9dd0cd2379da62b"} Oct 01 15:23:33 crc kubenswrapper[4869]: I1001 15:23:33.259600 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-5bfdb7854c-6bc4b" Oct 01 15:23:33 crc kubenswrapper[4869]: I1001 15:23:33.290754 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-5bfdb7854c-6bc4b" podStartSLOduration=2.290733683 podStartE2EDuration="2.290733683s" podCreationTimestamp="2025-10-01 15:23:31 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-01 15:23:33.282011353 +0000 UTC m=+1122.428854459" watchObservedRunningTime="2025-10-01 15:23:33.290733683 +0000 UTC m=+1122.437576819" Oct 01 15:23:41 crc kubenswrapper[4869]: I1001 15:23:41.515434 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-5bfdb7854c-6bc4b" Oct 01 15:23:41 crc kubenswrapper[4869]: I1001 15:23:41.645971 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-6fcdb98747-4stnn"] Oct 01 15:23:41 crc kubenswrapper[4869]: I1001 15:23:41.646242 4869 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-6fcdb98747-4stnn" podUID="277a7a86-40f9-47c0-9e4c-ec4193086c16" containerName="dnsmasq-dns" containerID="cri-o://1a3f91626d055dedf37348ce45e16bf1c2049c8fe2fac5e3992abc3ca716c955" gracePeriod=10 Oct 01 15:23:41 crc kubenswrapper[4869]: I1001 15:23:41.740064 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-7bf874d8bf-rkktm"] Oct 01 15:23:41 crc kubenswrapper[4869]: I1001 15:23:41.742030 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7bf874d8bf-rkktm" Oct 01 15:23:41 crc kubenswrapper[4869]: I1001 15:23:41.771387 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-7bf874d8bf-rkktm"] Oct 01 15:23:41 crc kubenswrapper[4869]: I1001 15:23:41.798990 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2cc0314c-8edd-431b-a31a-3a2355225d9a-config\") pod \"dnsmasq-dns-7bf874d8bf-rkktm\" (UID: \"2cc0314c-8edd-431b-a31a-3a2355225d9a\") " pod="openstack/dnsmasq-dns-7bf874d8bf-rkktm" Oct 01 15:23:41 crc kubenswrapper[4869]: I1001 15:23:41.799028 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/2cc0314c-8edd-431b-a31a-3a2355225d9a-dns-svc\") pod \"dnsmasq-dns-7bf874d8bf-rkktm\" (UID: \"2cc0314c-8edd-431b-a31a-3a2355225d9a\") " pod="openstack/dnsmasq-dns-7bf874d8bf-rkktm" Oct 01 15:23:41 crc kubenswrapper[4869]: I1001 15:23:41.799136 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/2cc0314c-8edd-431b-a31a-3a2355225d9a-ovsdbserver-sb\") pod \"dnsmasq-dns-7bf874d8bf-rkktm\" (UID: \"2cc0314c-8edd-431b-a31a-3a2355225d9a\") " pod="openstack/dnsmasq-dns-7bf874d8bf-rkktm" Oct 01 15:23:41 crc kubenswrapper[4869]: I1001 15:23:41.799175 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/2cc0314c-8edd-431b-a31a-3a2355225d9a-ovsdbserver-nb\") pod \"dnsmasq-dns-7bf874d8bf-rkktm\" (UID: \"2cc0314c-8edd-431b-a31a-3a2355225d9a\") " pod="openstack/dnsmasq-dns-7bf874d8bf-rkktm" Oct 01 15:23:41 crc kubenswrapper[4869]: I1001 15:23:41.799199 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-j557c\" (UniqueName: \"kubernetes.io/projected/2cc0314c-8edd-431b-a31a-3a2355225d9a-kube-api-access-j557c\") pod \"dnsmasq-dns-7bf874d8bf-rkktm\" (UID: \"2cc0314c-8edd-431b-a31a-3a2355225d9a\") " pod="openstack/dnsmasq-dns-7bf874d8bf-rkktm" Oct 01 15:23:41 crc kubenswrapper[4869]: I1001 15:23:41.799230 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/2cc0314c-8edd-431b-a31a-3a2355225d9a-openstack-edpm-ipam\") pod \"dnsmasq-dns-7bf874d8bf-rkktm\" (UID: \"2cc0314c-8edd-431b-a31a-3a2355225d9a\") " pod="openstack/dnsmasq-dns-7bf874d8bf-rkktm" Oct 01 15:23:41 crc kubenswrapper[4869]: I1001 15:23:41.901229 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/2cc0314c-8edd-431b-a31a-3a2355225d9a-ovsdbserver-sb\") pod \"dnsmasq-dns-7bf874d8bf-rkktm\" (UID: \"2cc0314c-8edd-431b-a31a-3a2355225d9a\") " pod="openstack/dnsmasq-dns-7bf874d8bf-rkktm" Oct 01 15:23:41 crc kubenswrapper[4869]: I1001 15:23:41.901338 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/2cc0314c-8edd-431b-a31a-3a2355225d9a-ovsdbserver-nb\") pod \"dnsmasq-dns-7bf874d8bf-rkktm\" (UID: \"2cc0314c-8edd-431b-a31a-3a2355225d9a\") " pod="openstack/dnsmasq-dns-7bf874d8bf-rkktm" Oct 01 15:23:41 crc kubenswrapper[4869]: I1001 15:23:41.901361 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-j557c\" (UniqueName: \"kubernetes.io/projected/2cc0314c-8edd-431b-a31a-3a2355225d9a-kube-api-access-j557c\") pod \"dnsmasq-dns-7bf874d8bf-rkktm\" (UID: \"2cc0314c-8edd-431b-a31a-3a2355225d9a\") " pod="openstack/dnsmasq-dns-7bf874d8bf-rkktm" Oct 01 15:23:41 crc kubenswrapper[4869]: I1001 15:23:41.901392 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/2cc0314c-8edd-431b-a31a-3a2355225d9a-openstack-edpm-ipam\") pod \"dnsmasq-dns-7bf874d8bf-rkktm\" (UID: \"2cc0314c-8edd-431b-a31a-3a2355225d9a\") " pod="openstack/dnsmasq-dns-7bf874d8bf-rkktm" Oct 01 15:23:41 crc kubenswrapper[4869]: I1001 15:23:41.901421 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2cc0314c-8edd-431b-a31a-3a2355225d9a-config\") pod \"dnsmasq-dns-7bf874d8bf-rkktm\" (UID: \"2cc0314c-8edd-431b-a31a-3a2355225d9a\") " pod="openstack/dnsmasq-dns-7bf874d8bf-rkktm" Oct 01 15:23:41 crc kubenswrapper[4869]: I1001 15:23:41.901440 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/2cc0314c-8edd-431b-a31a-3a2355225d9a-dns-svc\") pod \"dnsmasq-dns-7bf874d8bf-rkktm\" (UID: \"2cc0314c-8edd-431b-a31a-3a2355225d9a\") " pod="openstack/dnsmasq-dns-7bf874d8bf-rkktm" Oct 01 15:23:41 crc kubenswrapper[4869]: I1001 15:23:41.903210 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/2cc0314c-8edd-431b-a31a-3a2355225d9a-ovsdbserver-nb\") pod \"dnsmasq-dns-7bf874d8bf-rkktm\" (UID: \"2cc0314c-8edd-431b-a31a-3a2355225d9a\") " pod="openstack/dnsmasq-dns-7bf874d8bf-rkktm" Oct 01 15:23:41 crc kubenswrapper[4869]: I1001 15:23:41.903471 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/2cc0314c-8edd-431b-a31a-3a2355225d9a-ovsdbserver-sb\") pod \"dnsmasq-dns-7bf874d8bf-rkktm\" (UID: \"2cc0314c-8edd-431b-a31a-3a2355225d9a\") " pod="openstack/dnsmasq-dns-7bf874d8bf-rkktm" Oct 01 15:23:41 crc kubenswrapper[4869]: I1001 15:23:41.904185 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/2cc0314c-8edd-431b-a31a-3a2355225d9a-openstack-edpm-ipam\") pod \"dnsmasq-dns-7bf874d8bf-rkktm\" (UID: \"2cc0314c-8edd-431b-a31a-3a2355225d9a\") " pod="openstack/dnsmasq-dns-7bf874d8bf-rkktm" Oct 01 15:23:41 crc kubenswrapper[4869]: I1001 15:23:41.905510 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2cc0314c-8edd-431b-a31a-3a2355225d9a-config\") pod \"dnsmasq-dns-7bf874d8bf-rkktm\" (UID: \"2cc0314c-8edd-431b-a31a-3a2355225d9a\") " pod="openstack/dnsmasq-dns-7bf874d8bf-rkktm" Oct 01 15:23:41 crc kubenswrapper[4869]: I1001 15:23:41.906901 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/2cc0314c-8edd-431b-a31a-3a2355225d9a-dns-svc\") pod \"dnsmasq-dns-7bf874d8bf-rkktm\" (UID: \"2cc0314c-8edd-431b-a31a-3a2355225d9a\") " pod="openstack/dnsmasq-dns-7bf874d8bf-rkktm" Oct 01 15:23:41 crc kubenswrapper[4869]: I1001 15:23:41.934113 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-j557c\" (UniqueName: \"kubernetes.io/projected/2cc0314c-8edd-431b-a31a-3a2355225d9a-kube-api-access-j557c\") pod \"dnsmasq-dns-7bf874d8bf-rkktm\" (UID: \"2cc0314c-8edd-431b-a31a-3a2355225d9a\") " pod="openstack/dnsmasq-dns-7bf874d8bf-rkktm" Oct 01 15:23:42 crc kubenswrapper[4869]: I1001 15:23:42.112202 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6fcdb98747-4stnn" Oct 01 15:23:42 crc kubenswrapper[4869]: I1001 15:23:42.115412 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7bf874d8bf-rkktm" Oct 01 15:23:42 crc kubenswrapper[4869]: I1001 15:23:42.210412 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/277a7a86-40f9-47c0-9e4c-ec4193086c16-ovsdbserver-sb\") pod \"277a7a86-40f9-47c0-9e4c-ec4193086c16\" (UID: \"277a7a86-40f9-47c0-9e4c-ec4193086c16\") " Oct 01 15:23:42 crc kubenswrapper[4869]: I1001 15:23:42.210583 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-96g85\" (UniqueName: \"kubernetes.io/projected/277a7a86-40f9-47c0-9e4c-ec4193086c16-kube-api-access-96g85\") pod \"277a7a86-40f9-47c0-9e4c-ec4193086c16\" (UID: \"277a7a86-40f9-47c0-9e4c-ec4193086c16\") " Oct 01 15:23:42 crc kubenswrapper[4869]: I1001 15:23:42.210689 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/277a7a86-40f9-47c0-9e4c-ec4193086c16-ovsdbserver-nb\") pod \"277a7a86-40f9-47c0-9e4c-ec4193086c16\" (UID: \"277a7a86-40f9-47c0-9e4c-ec4193086c16\") " Oct 01 15:23:42 crc kubenswrapper[4869]: I1001 15:23:42.210747 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/277a7a86-40f9-47c0-9e4c-ec4193086c16-dns-svc\") pod \"277a7a86-40f9-47c0-9e4c-ec4193086c16\" (UID: \"277a7a86-40f9-47c0-9e4c-ec4193086c16\") " Oct 01 15:23:42 crc kubenswrapper[4869]: I1001 15:23:42.210815 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/277a7a86-40f9-47c0-9e4c-ec4193086c16-config\") pod \"277a7a86-40f9-47c0-9e4c-ec4193086c16\" (UID: \"277a7a86-40f9-47c0-9e4c-ec4193086c16\") " Oct 01 15:23:42 crc kubenswrapper[4869]: I1001 15:23:42.219491 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/277a7a86-40f9-47c0-9e4c-ec4193086c16-kube-api-access-96g85" (OuterVolumeSpecName: "kube-api-access-96g85") pod "277a7a86-40f9-47c0-9e4c-ec4193086c16" (UID: "277a7a86-40f9-47c0-9e4c-ec4193086c16"). InnerVolumeSpecName "kube-api-access-96g85". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 15:23:42 crc kubenswrapper[4869]: I1001 15:23:42.261132 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/277a7a86-40f9-47c0-9e4c-ec4193086c16-config" (OuterVolumeSpecName: "config") pod "277a7a86-40f9-47c0-9e4c-ec4193086c16" (UID: "277a7a86-40f9-47c0-9e4c-ec4193086c16"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 15:23:42 crc kubenswrapper[4869]: I1001 15:23:42.262173 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/277a7a86-40f9-47c0-9e4c-ec4193086c16-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "277a7a86-40f9-47c0-9e4c-ec4193086c16" (UID: "277a7a86-40f9-47c0-9e4c-ec4193086c16"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 15:23:42 crc kubenswrapper[4869]: I1001 15:23:42.277351 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/277a7a86-40f9-47c0-9e4c-ec4193086c16-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "277a7a86-40f9-47c0-9e4c-ec4193086c16" (UID: "277a7a86-40f9-47c0-9e4c-ec4193086c16"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 15:23:42 crc kubenswrapper[4869]: I1001 15:23:42.279855 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/277a7a86-40f9-47c0-9e4c-ec4193086c16-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "277a7a86-40f9-47c0-9e4c-ec4193086c16" (UID: "277a7a86-40f9-47c0-9e4c-ec4193086c16"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 15:23:42 crc kubenswrapper[4869]: I1001 15:23:42.312507 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-96g85\" (UniqueName: \"kubernetes.io/projected/277a7a86-40f9-47c0-9e4c-ec4193086c16-kube-api-access-96g85\") on node \"crc\" DevicePath \"\"" Oct 01 15:23:42 crc kubenswrapper[4869]: I1001 15:23:42.312740 4869 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/277a7a86-40f9-47c0-9e4c-ec4193086c16-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Oct 01 15:23:42 crc kubenswrapper[4869]: I1001 15:23:42.312773 4869 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/277a7a86-40f9-47c0-9e4c-ec4193086c16-dns-svc\") on node \"crc\" DevicePath \"\"" Oct 01 15:23:42 crc kubenswrapper[4869]: I1001 15:23:42.312783 4869 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/277a7a86-40f9-47c0-9e4c-ec4193086c16-config\") on node \"crc\" DevicePath \"\"" Oct 01 15:23:42 crc kubenswrapper[4869]: I1001 15:23:42.312792 4869 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/277a7a86-40f9-47c0-9e4c-ec4193086c16-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Oct 01 15:23:42 crc kubenswrapper[4869]: I1001 15:23:42.370208 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6fcdb98747-4stnn" Oct 01 15:23:42 crc kubenswrapper[4869]: I1001 15:23:42.370301 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6fcdb98747-4stnn" event={"ID":"277a7a86-40f9-47c0-9e4c-ec4193086c16","Type":"ContainerDied","Data":"1a3f91626d055dedf37348ce45e16bf1c2049c8fe2fac5e3992abc3ca716c955"} Oct 01 15:23:42 crc kubenswrapper[4869]: I1001 15:23:42.370390 4869 scope.go:117] "RemoveContainer" containerID="1a3f91626d055dedf37348ce45e16bf1c2049c8fe2fac5e3992abc3ca716c955" Oct 01 15:23:42 crc kubenswrapper[4869]: I1001 15:23:42.372571 4869 generic.go:334] "Generic (PLEG): container finished" podID="277a7a86-40f9-47c0-9e4c-ec4193086c16" containerID="1a3f91626d055dedf37348ce45e16bf1c2049c8fe2fac5e3992abc3ca716c955" exitCode=0 Oct 01 15:23:42 crc kubenswrapper[4869]: I1001 15:23:42.372638 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6fcdb98747-4stnn" event={"ID":"277a7a86-40f9-47c0-9e4c-ec4193086c16","Type":"ContainerDied","Data":"3142a136fbf6f4ab7d68df31aa779c0863f02faac64123d6b3237a05e009f513"} Oct 01 15:23:42 crc kubenswrapper[4869]: I1001 15:23:42.396224 4869 scope.go:117] "RemoveContainer" containerID="f6938aa5d14f7cdf56fb6aef8d272222cf0f1d583b1e47fad583a837789368a3" Oct 01 15:23:42 crc kubenswrapper[4869]: I1001 15:23:42.413880 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-6fcdb98747-4stnn"] Oct 01 15:23:42 crc kubenswrapper[4869]: I1001 15:23:42.421862 4869 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-6fcdb98747-4stnn"] Oct 01 15:23:42 crc kubenswrapper[4869]: I1001 15:23:42.432288 4869 scope.go:117] "RemoveContainer" containerID="1a3f91626d055dedf37348ce45e16bf1c2049c8fe2fac5e3992abc3ca716c955" Oct 01 15:23:42 crc kubenswrapper[4869]: E1001 15:23:42.432689 4869 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1a3f91626d055dedf37348ce45e16bf1c2049c8fe2fac5e3992abc3ca716c955\": container with ID starting with 1a3f91626d055dedf37348ce45e16bf1c2049c8fe2fac5e3992abc3ca716c955 not found: ID does not exist" containerID="1a3f91626d055dedf37348ce45e16bf1c2049c8fe2fac5e3992abc3ca716c955" Oct 01 15:23:42 crc kubenswrapper[4869]: I1001 15:23:42.432722 4869 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1a3f91626d055dedf37348ce45e16bf1c2049c8fe2fac5e3992abc3ca716c955"} err="failed to get container status \"1a3f91626d055dedf37348ce45e16bf1c2049c8fe2fac5e3992abc3ca716c955\": rpc error: code = NotFound desc = could not find container \"1a3f91626d055dedf37348ce45e16bf1c2049c8fe2fac5e3992abc3ca716c955\": container with ID starting with 1a3f91626d055dedf37348ce45e16bf1c2049c8fe2fac5e3992abc3ca716c955 not found: ID does not exist" Oct 01 15:23:42 crc kubenswrapper[4869]: I1001 15:23:42.432743 4869 scope.go:117] "RemoveContainer" containerID="f6938aa5d14f7cdf56fb6aef8d272222cf0f1d583b1e47fad583a837789368a3" Oct 01 15:23:42 crc kubenswrapper[4869]: E1001 15:23:42.433182 4869 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f6938aa5d14f7cdf56fb6aef8d272222cf0f1d583b1e47fad583a837789368a3\": container with ID starting with f6938aa5d14f7cdf56fb6aef8d272222cf0f1d583b1e47fad583a837789368a3 not found: ID does not exist" containerID="f6938aa5d14f7cdf56fb6aef8d272222cf0f1d583b1e47fad583a837789368a3" Oct 01 15:23:42 crc kubenswrapper[4869]: I1001 15:23:42.433224 4869 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f6938aa5d14f7cdf56fb6aef8d272222cf0f1d583b1e47fad583a837789368a3"} err="failed to get container status \"f6938aa5d14f7cdf56fb6aef8d272222cf0f1d583b1e47fad583a837789368a3\": rpc error: code = NotFound desc = could not find container \"f6938aa5d14f7cdf56fb6aef8d272222cf0f1d583b1e47fad583a837789368a3\": container with ID starting with f6938aa5d14f7cdf56fb6aef8d272222cf0f1d583b1e47fad583a837789368a3 not found: ID does not exist" Oct 01 15:23:42 crc kubenswrapper[4869]: I1001 15:23:42.551182 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-7bf874d8bf-rkktm"] Oct 01 15:23:42 crc kubenswrapper[4869]: W1001 15:23:42.555908 4869 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod2cc0314c_8edd_431b_a31a_3a2355225d9a.slice/crio-ff41c40af4a73d155ef781b7e581143bcbb70a222310fca3437e85e698151452 WatchSource:0}: Error finding container ff41c40af4a73d155ef781b7e581143bcbb70a222310fca3437e85e698151452: Status 404 returned error can't find the container with id ff41c40af4a73d155ef781b7e581143bcbb70a222310fca3437e85e698151452 Oct 01 15:23:43 crc kubenswrapper[4869]: I1001 15:23:43.403837 4869 generic.go:334] "Generic (PLEG): container finished" podID="2cc0314c-8edd-431b-a31a-3a2355225d9a" containerID="6f072b717d909b6290eb6a50be0e8b7870d72a18290c8ab18b68b7165afd3d6b" exitCode=0 Oct 01 15:23:43 crc kubenswrapper[4869]: I1001 15:23:43.403968 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7bf874d8bf-rkktm" event={"ID":"2cc0314c-8edd-431b-a31a-3a2355225d9a","Type":"ContainerDied","Data":"6f072b717d909b6290eb6a50be0e8b7870d72a18290c8ab18b68b7165afd3d6b"} Oct 01 15:23:43 crc kubenswrapper[4869]: I1001 15:23:43.404374 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7bf874d8bf-rkktm" event={"ID":"2cc0314c-8edd-431b-a31a-3a2355225d9a","Type":"ContainerStarted","Data":"ff41c40af4a73d155ef781b7e581143bcbb70a222310fca3437e85e698151452"} Oct 01 15:23:43 crc kubenswrapper[4869]: I1001 15:23:43.592537 4869 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="277a7a86-40f9-47c0-9e4c-ec4193086c16" path="/var/lib/kubelet/pods/277a7a86-40f9-47c0-9e4c-ec4193086c16/volumes" Oct 01 15:23:44 crc kubenswrapper[4869]: I1001 15:23:44.435190 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7bf874d8bf-rkktm" event={"ID":"2cc0314c-8edd-431b-a31a-3a2355225d9a","Type":"ContainerStarted","Data":"c2c55fa28ea9e8a075a1d20574098592b5ec9e0bf1e20177b10b853ba95acb98"} Oct 01 15:23:44 crc kubenswrapper[4869]: I1001 15:23:44.435761 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-7bf874d8bf-rkktm" Oct 01 15:23:52 crc kubenswrapper[4869]: I1001 15:23:52.116506 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-7bf874d8bf-rkktm" Oct 01 15:23:52 crc kubenswrapper[4869]: I1001 15:23:52.150177 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-7bf874d8bf-rkktm" podStartSLOduration=11.150158433 podStartE2EDuration="11.150158433s" podCreationTimestamp="2025-10-01 15:23:41 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-01 15:23:44.4698913 +0000 UTC m=+1133.616734476" watchObservedRunningTime="2025-10-01 15:23:52.150158433 +0000 UTC m=+1141.297001549" Oct 01 15:23:52 crc kubenswrapper[4869]: I1001 15:23:52.205514 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5bfdb7854c-6bc4b"] Oct 01 15:23:52 crc kubenswrapper[4869]: I1001 15:23:52.205832 4869 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-5bfdb7854c-6bc4b" podUID="a2dce293-f4ab-4f01-a61d-ae6f4610ca09" containerName="dnsmasq-dns" containerID="cri-o://527837c093e600f7783a814bd8ee2e5faac1b789298c86eea9dd0cd2379da62b" gracePeriod=10 Oct 01 15:23:52 crc kubenswrapper[4869]: I1001 15:23:52.546482 4869 generic.go:334] "Generic (PLEG): container finished" podID="a2dce293-f4ab-4f01-a61d-ae6f4610ca09" containerID="527837c093e600f7783a814bd8ee2e5faac1b789298c86eea9dd0cd2379da62b" exitCode=0 Oct 01 15:23:52 crc kubenswrapper[4869]: I1001 15:23:52.546552 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5bfdb7854c-6bc4b" event={"ID":"a2dce293-f4ab-4f01-a61d-ae6f4610ca09","Type":"ContainerDied","Data":"527837c093e600f7783a814bd8ee2e5faac1b789298c86eea9dd0cd2379da62b"} Oct 01 15:23:52 crc kubenswrapper[4869]: I1001 15:23:52.673907 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5bfdb7854c-6bc4b" Oct 01 15:23:52 crc kubenswrapper[4869]: I1001 15:23:52.832481 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a2dce293-f4ab-4f01-a61d-ae6f4610ca09-config\") pod \"a2dce293-f4ab-4f01-a61d-ae6f4610ca09\" (UID: \"a2dce293-f4ab-4f01-a61d-ae6f4610ca09\") " Oct 01 15:23:52 crc kubenswrapper[4869]: I1001 15:23:52.833099 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/a2dce293-f4ab-4f01-a61d-ae6f4610ca09-openstack-edpm-ipam\") pod \"a2dce293-f4ab-4f01-a61d-ae6f4610ca09\" (UID: \"a2dce293-f4ab-4f01-a61d-ae6f4610ca09\") " Oct 01 15:23:52 crc kubenswrapper[4869]: I1001 15:23:52.833166 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/a2dce293-f4ab-4f01-a61d-ae6f4610ca09-ovsdbserver-sb\") pod \"a2dce293-f4ab-4f01-a61d-ae6f4610ca09\" (UID: \"a2dce293-f4ab-4f01-a61d-ae6f4610ca09\") " Oct 01 15:23:52 crc kubenswrapper[4869]: I1001 15:23:52.833352 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/a2dce293-f4ab-4f01-a61d-ae6f4610ca09-dns-svc\") pod \"a2dce293-f4ab-4f01-a61d-ae6f4610ca09\" (UID: \"a2dce293-f4ab-4f01-a61d-ae6f4610ca09\") " Oct 01 15:23:52 crc kubenswrapper[4869]: I1001 15:23:52.833475 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7v4hn\" (UniqueName: \"kubernetes.io/projected/a2dce293-f4ab-4f01-a61d-ae6f4610ca09-kube-api-access-7v4hn\") pod \"a2dce293-f4ab-4f01-a61d-ae6f4610ca09\" (UID: \"a2dce293-f4ab-4f01-a61d-ae6f4610ca09\") " Oct 01 15:23:52 crc kubenswrapper[4869]: I1001 15:23:52.833528 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/a2dce293-f4ab-4f01-a61d-ae6f4610ca09-ovsdbserver-nb\") pod \"a2dce293-f4ab-4f01-a61d-ae6f4610ca09\" (UID: \"a2dce293-f4ab-4f01-a61d-ae6f4610ca09\") " Oct 01 15:23:52 crc kubenswrapper[4869]: I1001 15:23:52.839635 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a2dce293-f4ab-4f01-a61d-ae6f4610ca09-kube-api-access-7v4hn" (OuterVolumeSpecName: "kube-api-access-7v4hn") pod "a2dce293-f4ab-4f01-a61d-ae6f4610ca09" (UID: "a2dce293-f4ab-4f01-a61d-ae6f4610ca09"). InnerVolumeSpecName "kube-api-access-7v4hn". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 15:23:52 crc kubenswrapper[4869]: I1001 15:23:52.885978 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a2dce293-f4ab-4f01-a61d-ae6f4610ca09-config" (OuterVolumeSpecName: "config") pod "a2dce293-f4ab-4f01-a61d-ae6f4610ca09" (UID: "a2dce293-f4ab-4f01-a61d-ae6f4610ca09"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 15:23:52 crc kubenswrapper[4869]: I1001 15:23:52.903219 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a2dce293-f4ab-4f01-a61d-ae6f4610ca09-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "a2dce293-f4ab-4f01-a61d-ae6f4610ca09" (UID: "a2dce293-f4ab-4f01-a61d-ae6f4610ca09"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 15:23:52 crc kubenswrapper[4869]: I1001 15:23:52.904135 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a2dce293-f4ab-4f01-a61d-ae6f4610ca09-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "a2dce293-f4ab-4f01-a61d-ae6f4610ca09" (UID: "a2dce293-f4ab-4f01-a61d-ae6f4610ca09"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 15:23:52 crc kubenswrapper[4869]: I1001 15:23:52.905566 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a2dce293-f4ab-4f01-a61d-ae6f4610ca09-openstack-edpm-ipam" (OuterVolumeSpecName: "openstack-edpm-ipam") pod "a2dce293-f4ab-4f01-a61d-ae6f4610ca09" (UID: "a2dce293-f4ab-4f01-a61d-ae6f4610ca09"). InnerVolumeSpecName "openstack-edpm-ipam". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 15:23:52 crc kubenswrapper[4869]: I1001 15:23:52.907025 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a2dce293-f4ab-4f01-a61d-ae6f4610ca09-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "a2dce293-f4ab-4f01-a61d-ae6f4610ca09" (UID: "a2dce293-f4ab-4f01-a61d-ae6f4610ca09"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 15:23:52 crc kubenswrapper[4869]: I1001 15:23:52.936036 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7v4hn\" (UniqueName: \"kubernetes.io/projected/a2dce293-f4ab-4f01-a61d-ae6f4610ca09-kube-api-access-7v4hn\") on node \"crc\" DevicePath \"\"" Oct 01 15:23:52 crc kubenswrapper[4869]: I1001 15:23:52.936075 4869 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/a2dce293-f4ab-4f01-a61d-ae6f4610ca09-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Oct 01 15:23:52 crc kubenswrapper[4869]: I1001 15:23:52.936085 4869 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a2dce293-f4ab-4f01-a61d-ae6f4610ca09-config\") on node \"crc\" DevicePath \"\"" Oct 01 15:23:52 crc kubenswrapper[4869]: I1001 15:23:52.936096 4869 reconciler_common.go:293] "Volume detached for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/a2dce293-f4ab-4f01-a61d-ae6f4610ca09-openstack-edpm-ipam\") on node \"crc\" DevicePath \"\"" Oct 01 15:23:52 crc kubenswrapper[4869]: I1001 15:23:52.936104 4869 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/a2dce293-f4ab-4f01-a61d-ae6f4610ca09-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Oct 01 15:23:52 crc kubenswrapper[4869]: I1001 15:23:52.936112 4869 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/a2dce293-f4ab-4f01-a61d-ae6f4610ca09-dns-svc\") on node \"crc\" DevicePath \"\"" Oct 01 15:23:53 crc kubenswrapper[4869]: I1001 15:23:53.561721 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5bfdb7854c-6bc4b" event={"ID":"a2dce293-f4ab-4f01-a61d-ae6f4610ca09","Type":"ContainerDied","Data":"d3825971c299967bcbde962411a837b648496a08ca25febdc70421727e9c6c7e"} Oct 01 15:23:53 crc kubenswrapper[4869]: I1001 15:23:53.561811 4869 scope.go:117] "RemoveContainer" containerID="527837c093e600f7783a814bd8ee2e5faac1b789298c86eea9dd0cd2379da62b" Oct 01 15:23:53 crc kubenswrapper[4869]: I1001 15:23:53.562504 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5bfdb7854c-6bc4b" Oct 01 15:23:53 crc kubenswrapper[4869]: I1001 15:23:53.608052 4869 scope.go:117] "RemoveContainer" containerID="f13e01ac03210d3686a8759d25bbab671d7a189763e709c5d3078b02bacab6cf" Oct 01 15:23:53 crc kubenswrapper[4869]: I1001 15:23:53.627775 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5bfdb7854c-6bc4b"] Oct 01 15:23:53 crc kubenswrapper[4869]: I1001 15:23:53.637491 4869 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-5bfdb7854c-6bc4b"] Oct 01 15:23:55 crc kubenswrapper[4869]: I1001 15:23:55.602247 4869 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a2dce293-f4ab-4f01-a61d-ae6f4610ca09" path="/var/lib/kubelet/pods/a2dce293-f4ab-4f01-a61d-ae6f4610ca09/volumes" Oct 01 15:24:02 crc kubenswrapper[4869]: I1001 15:24:02.305377 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-xn8rv"] Oct 01 15:24:02 crc kubenswrapper[4869]: E1001 15:24:02.306235 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="277a7a86-40f9-47c0-9e4c-ec4193086c16" containerName="init" Oct 01 15:24:02 crc kubenswrapper[4869]: I1001 15:24:02.306252 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="277a7a86-40f9-47c0-9e4c-ec4193086c16" containerName="init" Oct 01 15:24:02 crc kubenswrapper[4869]: E1001 15:24:02.306287 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a2dce293-f4ab-4f01-a61d-ae6f4610ca09" containerName="dnsmasq-dns" Oct 01 15:24:02 crc kubenswrapper[4869]: I1001 15:24:02.306296 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="a2dce293-f4ab-4f01-a61d-ae6f4610ca09" containerName="dnsmasq-dns" Oct 01 15:24:02 crc kubenswrapper[4869]: E1001 15:24:02.306327 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="277a7a86-40f9-47c0-9e4c-ec4193086c16" containerName="dnsmasq-dns" Oct 01 15:24:02 crc kubenswrapper[4869]: I1001 15:24:02.306337 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="277a7a86-40f9-47c0-9e4c-ec4193086c16" containerName="dnsmasq-dns" Oct 01 15:24:02 crc kubenswrapper[4869]: E1001 15:24:02.306349 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a2dce293-f4ab-4f01-a61d-ae6f4610ca09" containerName="init" Oct 01 15:24:02 crc kubenswrapper[4869]: I1001 15:24:02.306356 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="a2dce293-f4ab-4f01-a61d-ae6f4610ca09" containerName="init" Oct 01 15:24:02 crc kubenswrapper[4869]: I1001 15:24:02.306928 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="a2dce293-f4ab-4f01-a61d-ae6f4610ca09" containerName="dnsmasq-dns" Oct 01 15:24:02 crc kubenswrapper[4869]: I1001 15:24:02.306991 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="277a7a86-40f9-47c0-9e4c-ec4193086c16" containerName="dnsmasq-dns" Oct 01 15:24:02 crc kubenswrapper[4869]: I1001 15:24:02.308450 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-xn8rv" Oct 01 15:24:02 crc kubenswrapper[4869]: I1001 15:24:02.318760 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Oct 01 15:24:02 crc kubenswrapper[4869]: I1001 15:24:02.318915 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-cjg8g" Oct 01 15:24:02 crc kubenswrapper[4869]: I1001 15:24:02.319055 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Oct 01 15:24:02 crc kubenswrapper[4869]: I1001 15:24:02.319467 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Oct 01 15:24:02 crc kubenswrapper[4869]: I1001 15:24:02.325771 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-xn8rv"] Oct 01 15:24:02 crc kubenswrapper[4869]: I1001 15:24:02.430277 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/199eb3c4-6ff2-4910-9fe3-51a68f736017-inventory\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-xn8rv\" (UID: \"199eb3c4-6ff2-4910-9fe3-51a68f736017\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-xn8rv" Oct 01 15:24:02 crc kubenswrapper[4869]: I1001 15:24:02.430363 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nzrhs\" (UniqueName: \"kubernetes.io/projected/199eb3c4-6ff2-4910-9fe3-51a68f736017-kube-api-access-nzrhs\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-xn8rv\" (UID: \"199eb3c4-6ff2-4910-9fe3-51a68f736017\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-xn8rv" Oct 01 15:24:02 crc kubenswrapper[4869]: I1001 15:24:02.430479 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/199eb3c4-6ff2-4910-9fe3-51a68f736017-repo-setup-combined-ca-bundle\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-xn8rv\" (UID: \"199eb3c4-6ff2-4910-9fe3-51a68f736017\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-xn8rv" Oct 01 15:24:02 crc kubenswrapper[4869]: I1001 15:24:02.430535 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/199eb3c4-6ff2-4910-9fe3-51a68f736017-ssh-key\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-xn8rv\" (UID: \"199eb3c4-6ff2-4910-9fe3-51a68f736017\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-xn8rv" Oct 01 15:24:02 crc kubenswrapper[4869]: I1001 15:24:02.531983 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/199eb3c4-6ff2-4910-9fe3-51a68f736017-repo-setup-combined-ca-bundle\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-xn8rv\" (UID: \"199eb3c4-6ff2-4910-9fe3-51a68f736017\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-xn8rv" Oct 01 15:24:02 crc kubenswrapper[4869]: I1001 15:24:02.532122 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/199eb3c4-6ff2-4910-9fe3-51a68f736017-ssh-key\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-xn8rv\" (UID: \"199eb3c4-6ff2-4910-9fe3-51a68f736017\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-xn8rv" Oct 01 15:24:02 crc kubenswrapper[4869]: I1001 15:24:02.532209 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/199eb3c4-6ff2-4910-9fe3-51a68f736017-inventory\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-xn8rv\" (UID: \"199eb3c4-6ff2-4910-9fe3-51a68f736017\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-xn8rv" Oct 01 15:24:02 crc kubenswrapper[4869]: I1001 15:24:02.532373 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nzrhs\" (UniqueName: \"kubernetes.io/projected/199eb3c4-6ff2-4910-9fe3-51a68f736017-kube-api-access-nzrhs\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-xn8rv\" (UID: \"199eb3c4-6ff2-4910-9fe3-51a68f736017\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-xn8rv" Oct 01 15:24:02 crc kubenswrapper[4869]: I1001 15:24:02.541837 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/199eb3c4-6ff2-4910-9fe3-51a68f736017-ssh-key\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-xn8rv\" (UID: \"199eb3c4-6ff2-4910-9fe3-51a68f736017\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-xn8rv" Oct 01 15:24:02 crc kubenswrapper[4869]: I1001 15:24:02.542350 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/199eb3c4-6ff2-4910-9fe3-51a68f736017-inventory\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-xn8rv\" (UID: \"199eb3c4-6ff2-4910-9fe3-51a68f736017\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-xn8rv" Oct 01 15:24:02 crc kubenswrapper[4869]: I1001 15:24:02.544331 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/199eb3c4-6ff2-4910-9fe3-51a68f736017-repo-setup-combined-ca-bundle\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-xn8rv\" (UID: \"199eb3c4-6ff2-4910-9fe3-51a68f736017\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-xn8rv" Oct 01 15:24:02 crc kubenswrapper[4869]: I1001 15:24:02.554845 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nzrhs\" (UniqueName: \"kubernetes.io/projected/199eb3c4-6ff2-4910-9fe3-51a68f736017-kube-api-access-nzrhs\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-xn8rv\" (UID: \"199eb3c4-6ff2-4910-9fe3-51a68f736017\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-xn8rv" Oct 01 15:24:02 crc kubenswrapper[4869]: I1001 15:24:02.624712 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-xn8rv" Oct 01 15:24:02 crc kubenswrapper[4869]: I1001 15:24:02.673722 4869 generic.go:334] "Generic (PLEG): container finished" podID="e2d4e177-3ee6-497a-a0c6-db9305809a81" containerID="fc212ca76b639e00ea7368fe380ebbc7c8ef4688715551d05f6bdea1604eefbc" exitCode=0 Oct 01 15:24:02 crc kubenswrapper[4869]: I1001 15:24:02.673764 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"e2d4e177-3ee6-497a-a0c6-db9305809a81","Type":"ContainerDied","Data":"fc212ca76b639e00ea7368fe380ebbc7c8ef4688715551d05f6bdea1604eefbc"} Oct 01 15:24:03 crc kubenswrapper[4869]: I1001 15:24:03.190487 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-xn8rv"] Oct 01 15:24:03 crc kubenswrapper[4869]: W1001 15:24:03.194503 4869 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod199eb3c4_6ff2_4910_9fe3_51a68f736017.slice/crio-0d8c0b440bd154582874722e43406e27a75bb8a117876c983c03a51dbaac8786 WatchSource:0}: Error finding container 0d8c0b440bd154582874722e43406e27a75bb8a117876c983c03a51dbaac8786: Status 404 returned error can't find the container with id 0d8c0b440bd154582874722e43406e27a75bb8a117876c983c03a51dbaac8786 Oct 01 15:24:03 crc kubenswrapper[4869]: I1001 15:24:03.197328 4869 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Oct 01 15:24:03 crc kubenswrapper[4869]: I1001 15:24:03.693778 4869 generic.go:334] "Generic (PLEG): container finished" podID="70946011-083d-41f8-acf9-ab0c4711b48b" containerID="04578113632a59052138892123b9e2414104a170f9f57dd921dd4ccdf26512dc" exitCode=0 Oct 01 15:24:03 crc kubenswrapper[4869]: I1001 15:24:03.693891 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"70946011-083d-41f8-acf9-ab0c4711b48b","Type":"ContainerDied","Data":"04578113632a59052138892123b9e2414104a170f9f57dd921dd4ccdf26512dc"} Oct 01 15:24:03 crc kubenswrapper[4869]: I1001 15:24:03.696240 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-xn8rv" event={"ID":"199eb3c4-6ff2-4910-9fe3-51a68f736017","Type":"ContainerStarted","Data":"0d8c0b440bd154582874722e43406e27a75bb8a117876c983c03a51dbaac8786"} Oct 01 15:24:03 crc kubenswrapper[4869]: I1001 15:24:03.701025 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"e2d4e177-3ee6-497a-a0c6-db9305809a81","Type":"ContainerStarted","Data":"19d77c0ddcd2a44a68d2bb102bdc707c256f12efcf9ab84677968fb110c4493f"} Oct 01 15:24:03 crc kubenswrapper[4869]: I1001 15:24:03.701412 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/rabbitmq-server-0" Oct 01 15:24:04 crc kubenswrapper[4869]: I1001 15:24:04.736540 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"70946011-083d-41f8-acf9-ab0c4711b48b","Type":"ContainerStarted","Data":"ae6da651f3918a1b0cf29764560fa62c56f383ae3b5b499d1ee092c591935b79"} Oct 01 15:24:04 crc kubenswrapper[4869]: I1001 15:24:04.737547 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/rabbitmq-cell1-server-0" Oct 01 15:24:04 crc kubenswrapper[4869]: I1001 15:24:04.780775 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/rabbitmq-server-0" podStartSLOduration=37.780749259 podStartE2EDuration="37.780749259s" podCreationTimestamp="2025-10-01 15:23:27 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-01 15:24:03.758439107 +0000 UTC m=+1152.905282243" watchObservedRunningTime="2025-10-01 15:24:04.780749259 +0000 UTC m=+1153.927592375" Oct 01 15:24:11 crc kubenswrapper[4869]: I1001 15:24:11.613701 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/rabbitmq-cell1-server-0" podStartSLOduration=43.613685421 podStartE2EDuration="43.613685421s" podCreationTimestamp="2025-10-01 15:23:28 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-01 15:24:04.774097281 +0000 UTC m=+1153.920940437" watchObservedRunningTime="2025-10-01 15:24:11.613685421 +0000 UTC m=+1160.760528537" Oct 01 15:24:12 crc kubenswrapper[4869]: I1001 15:24:12.834897 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-xn8rv" event={"ID":"199eb3c4-6ff2-4910-9fe3-51a68f736017","Type":"ContainerStarted","Data":"1ec0c0c6e1592364c174b2f0559ef58b726ce12d4b566f8be96b2cdb9253fdd1"} Oct 01 15:24:12 crc kubenswrapper[4869]: I1001 15:24:12.855681 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-xn8rv" podStartSLOduration=2.250391869 podStartE2EDuration="10.85566181s" podCreationTimestamp="2025-10-01 15:24:02 +0000 UTC" firstStartedPulling="2025-10-01 15:24:03.197081766 +0000 UTC m=+1152.343924882" lastFinishedPulling="2025-10-01 15:24:11.802351707 +0000 UTC m=+1160.949194823" observedRunningTime="2025-10-01 15:24:12.854716626 +0000 UTC m=+1162.001559782" watchObservedRunningTime="2025-10-01 15:24:12.85566181 +0000 UTC m=+1162.002504926" Oct 01 15:24:17 crc kubenswrapper[4869]: I1001 15:24:17.669582 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/rabbitmq-server-0" Oct 01 15:24:18 crc kubenswrapper[4869]: I1001 15:24:18.683963 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/rabbitmq-cell1-server-0" Oct 01 15:24:23 crc kubenswrapper[4869]: I1001 15:24:23.963564 4869 generic.go:334] "Generic (PLEG): container finished" podID="199eb3c4-6ff2-4910-9fe3-51a68f736017" containerID="1ec0c0c6e1592364c174b2f0559ef58b726ce12d4b566f8be96b2cdb9253fdd1" exitCode=0 Oct 01 15:24:23 crc kubenswrapper[4869]: I1001 15:24:23.963683 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-xn8rv" event={"ID":"199eb3c4-6ff2-4910-9fe3-51a68f736017","Type":"ContainerDied","Data":"1ec0c0c6e1592364c174b2f0559ef58b726ce12d4b566f8be96b2cdb9253fdd1"} Oct 01 15:24:25 crc kubenswrapper[4869]: I1001 15:24:25.442259 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-xn8rv" Oct 01 15:24:25 crc kubenswrapper[4869]: I1001 15:24:25.488497 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/199eb3c4-6ff2-4910-9fe3-51a68f736017-ssh-key\") pod \"199eb3c4-6ff2-4910-9fe3-51a68f736017\" (UID: \"199eb3c4-6ff2-4910-9fe3-51a68f736017\") " Oct 01 15:24:25 crc kubenswrapper[4869]: I1001 15:24:25.488666 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/199eb3c4-6ff2-4910-9fe3-51a68f736017-repo-setup-combined-ca-bundle\") pod \"199eb3c4-6ff2-4910-9fe3-51a68f736017\" (UID: \"199eb3c4-6ff2-4910-9fe3-51a68f736017\") " Oct 01 15:24:25 crc kubenswrapper[4869]: I1001 15:24:25.488695 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/199eb3c4-6ff2-4910-9fe3-51a68f736017-inventory\") pod \"199eb3c4-6ff2-4910-9fe3-51a68f736017\" (UID: \"199eb3c4-6ff2-4910-9fe3-51a68f736017\") " Oct 01 15:24:25 crc kubenswrapper[4869]: I1001 15:24:25.488770 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nzrhs\" (UniqueName: \"kubernetes.io/projected/199eb3c4-6ff2-4910-9fe3-51a68f736017-kube-api-access-nzrhs\") pod \"199eb3c4-6ff2-4910-9fe3-51a68f736017\" (UID: \"199eb3c4-6ff2-4910-9fe3-51a68f736017\") " Oct 01 15:24:25 crc kubenswrapper[4869]: I1001 15:24:25.495428 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/199eb3c4-6ff2-4910-9fe3-51a68f736017-kube-api-access-nzrhs" (OuterVolumeSpecName: "kube-api-access-nzrhs") pod "199eb3c4-6ff2-4910-9fe3-51a68f736017" (UID: "199eb3c4-6ff2-4910-9fe3-51a68f736017"). InnerVolumeSpecName "kube-api-access-nzrhs". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 15:24:25 crc kubenswrapper[4869]: I1001 15:24:25.495828 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/199eb3c4-6ff2-4910-9fe3-51a68f736017-repo-setup-combined-ca-bundle" (OuterVolumeSpecName: "repo-setup-combined-ca-bundle") pod "199eb3c4-6ff2-4910-9fe3-51a68f736017" (UID: "199eb3c4-6ff2-4910-9fe3-51a68f736017"). InnerVolumeSpecName "repo-setup-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 15:24:25 crc kubenswrapper[4869]: I1001 15:24:25.525038 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/199eb3c4-6ff2-4910-9fe3-51a68f736017-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "199eb3c4-6ff2-4910-9fe3-51a68f736017" (UID: "199eb3c4-6ff2-4910-9fe3-51a68f736017"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 15:24:25 crc kubenswrapper[4869]: I1001 15:24:25.543539 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/199eb3c4-6ff2-4910-9fe3-51a68f736017-inventory" (OuterVolumeSpecName: "inventory") pod "199eb3c4-6ff2-4910-9fe3-51a68f736017" (UID: "199eb3c4-6ff2-4910-9fe3-51a68f736017"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 15:24:25 crc kubenswrapper[4869]: I1001 15:24:25.590750 4869 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/199eb3c4-6ff2-4910-9fe3-51a68f736017-ssh-key\") on node \"crc\" DevicePath \"\"" Oct 01 15:24:25 crc kubenswrapper[4869]: I1001 15:24:25.590996 4869 reconciler_common.go:293] "Volume detached for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/199eb3c4-6ff2-4910-9fe3-51a68f736017-repo-setup-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 01 15:24:25 crc kubenswrapper[4869]: I1001 15:24:25.591133 4869 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/199eb3c4-6ff2-4910-9fe3-51a68f736017-inventory\") on node \"crc\" DevicePath \"\"" Oct 01 15:24:25 crc kubenswrapper[4869]: I1001 15:24:25.593401 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nzrhs\" (UniqueName: \"kubernetes.io/projected/199eb3c4-6ff2-4910-9fe3-51a68f736017-kube-api-access-nzrhs\") on node \"crc\" DevicePath \"\"" Oct 01 15:24:25 crc kubenswrapper[4869]: I1001 15:24:25.993138 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-xn8rv" event={"ID":"199eb3c4-6ff2-4910-9fe3-51a68f736017","Type":"ContainerDied","Data":"0d8c0b440bd154582874722e43406e27a75bb8a117876c983c03a51dbaac8786"} Oct 01 15:24:25 crc kubenswrapper[4869]: I1001 15:24:25.993878 4869 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="0d8c0b440bd154582874722e43406e27a75bb8a117876c983c03a51dbaac8786" Oct 01 15:24:25 crc kubenswrapper[4869]: I1001 15:24:25.993229 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-xn8rv" Oct 01 15:24:26 crc kubenswrapper[4869]: I1001 15:24:26.103171 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-f8mtp"] Oct 01 15:24:26 crc kubenswrapper[4869]: E1001 15:24:26.103591 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="199eb3c4-6ff2-4910-9fe3-51a68f736017" containerName="repo-setup-edpm-deployment-openstack-edpm-ipam" Oct 01 15:24:26 crc kubenswrapper[4869]: I1001 15:24:26.103608 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="199eb3c4-6ff2-4910-9fe3-51a68f736017" containerName="repo-setup-edpm-deployment-openstack-edpm-ipam" Oct 01 15:24:26 crc kubenswrapper[4869]: I1001 15:24:26.103806 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="199eb3c4-6ff2-4910-9fe3-51a68f736017" containerName="repo-setup-edpm-deployment-openstack-edpm-ipam" Oct 01 15:24:26 crc kubenswrapper[4869]: I1001 15:24:26.104512 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-f8mtp" Oct 01 15:24:26 crc kubenswrapper[4869]: I1001 15:24:26.106362 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Oct 01 15:24:26 crc kubenswrapper[4869]: I1001 15:24:26.107025 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Oct 01 15:24:26 crc kubenswrapper[4869]: I1001 15:24:26.107398 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-cjg8g" Oct 01 15:24:26 crc kubenswrapper[4869]: I1001 15:24:26.107611 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Oct 01 15:24:26 crc kubenswrapper[4869]: I1001 15:24:26.113290 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-f8mtp"] Oct 01 15:24:26 crc kubenswrapper[4869]: I1001 15:24:26.207671 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/12855739-df17-48b6-886b-469e6a39d7f8-bootstrap-combined-ca-bundle\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-f8mtp\" (UID: \"12855739-df17-48b6-886b-469e6a39d7f8\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-f8mtp" Oct 01 15:24:26 crc kubenswrapper[4869]: I1001 15:24:26.207787 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/12855739-df17-48b6-886b-469e6a39d7f8-inventory\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-f8mtp\" (UID: \"12855739-df17-48b6-886b-469e6a39d7f8\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-f8mtp" Oct 01 15:24:26 crc kubenswrapper[4869]: I1001 15:24:26.207854 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/12855739-df17-48b6-886b-469e6a39d7f8-ssh-key\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-f8mtp\" (UID: \"12855739-df17-48b6-886b-469e6a39d7f8\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-f8mtp" Oct 01 15:24:26 crc kubenswrapper[4869]: I1001 15:24:26.208154 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6tczd\" (UniqueName: \"kubernetes.io/projected/12855739-df17-48b6-886b-469e6a39d7f8-kube-api-access-6tczd\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-f8mtp\" (UID: \"12855739-df17-48b6-886b-469e6a39d7f8\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-f8mtp" Oct 01 15:24:26 crc kubenswrapper[4869]: I1001 15:24:26.310188 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/12855739-df17-48b6-886b-469e6a39d7f8-inventory\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-f8mtp\" (UID: \"12855739-df17-48b6-886b-469e6a39d7f8\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-f8mtp" Oct 01 15:24:26 crc kubenswrapper[4869]: I1001 15:24:26.310307 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/12855739-df17-48b6-886b-469e6a39d7f8-ssh-key\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-f8mtp\" (UID: \"12855739-df17-48b6-886b-469e6a39d7f8\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-f8mtp" Oct 01 15:24:26 crc kubenswrapper[4869]: I1001 15:24:26.310387 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6tczd\" (UniqueName: \"kubernetes.io/projected/12855739-df17-48b6-886b-469e6a39d7f8-kube-api-access-6tczd\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-f8mtp\" (UID: \"12855739-df17-48b6-886b-469e6a39d7f8\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-f8mtp" Oct 01 15:24:26 crc kubenswrapper[4869]: I1001 15:24:26.310606 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/12855739-df17-48b6-886b-469e6a39d7f8-bootstrap-combined-ca-bundle\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-f8mtp\" (UID: \"12855739-df17-48b6-886b-469e6a39d7f8\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-f8mtp" Oct 01 15:24:26 crc kubenswrapper[4869]: I1001 15:24:26.315277 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/12855739-df17-48b6-886b-469e6a39d7f8-inventory\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-f8mtp\" (UID: \"12855739-df17-48b6-886b-469e6a39d7f8\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-f8mtp" Oct 01 15:24:26 crc kubenswrapper[4869]: I1001 15:24:26.315310 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/12855739-df17-48b6-886b-469e6a39d7f8-bootstrap-combined-ca-bundle\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-f8mtp\" (UID: \"12855739-df17-48b6-886b-469e6a39d7f8\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-f8mtp" Oct 01 15:24:26 crc kubenswrapper[4869]: I1001 15:24:26.316110 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/12855739-df17-48b6-886b-469e6a39d7f8-ssh-key\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-f8mtp\" (UID: \"12855739-df17-48b6-886b-469e6a39d7f8\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-f8mtp" Oct 01 15:24:26 crc kubenswrapper[4869]: I1001 15:24:26.330468 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6tczd\" (UniqueName: \"kubernetes.io/projected/12855739-df17-48b6-886b-469e6a39d7f8-kube-api-access-6tczd\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-f8mtp\" (UID: \"12855739-df17-48b6-886b-469e6a39d7f8\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-f8mtp" Oct 01 15:24:26 crc kubenswrapper[4869]: I1001 15:24:26.435072 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-f8mtp" Oct 01 15:24:26 crc kubenswrapper[4869]: I1001 15:24:26.833292 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-f8mtp"] Oct 01 15:24:27 crc kubenswrapper[4869]: I1001 15:24:27.003466 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-f8mtp" event={"ID":"12855739-df17-48b6-886b-469e6a39d7f8","Type":"ContainerStarted","Data":"e75712c46fee0cb6e95a4a4784dba49b33fc25401574c7c90d2a587d167e710a"} Oct 01 15:24:28 crc kubenswrapper[4869]: I1001 15:24:28.020895 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-f8mtp" event={"ID":"12855739-df17-48b6-886b-469e6a39d7f8","Type":"ContainerStarted","Data":"0644d998c969fb3bed012dc7a8f2ad9a8e87fcc549ee146354f20323a948af9f"} Oct 01 15:24:28 crc kubenswrapper[4869]: I1001 15:24:28.049201 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-f8mtp" podStartSLOduration=1.481316048 podStartE2EDuration="2.049172573s" podCreationTimestamp="2025-10-01 15:24:26 +0000 UTC" firstStartedPulling="2025-10-01 15:24:26.843828298 +0000 UTC m=+1175.990671424" lastFinishedPulling="2025-10-01 15:24:27.411684843 +0000 UTC m=+1176.558527949" observedRunningTime="2025-10-01 15:24:28.037791326 +0000 UTC m=+1177.184634502" watchObservedRunningTime="2025-10-01 15:24:28.049172573 +0000 UTC m=+1177.196015729" Oct 01 15:25:11 crc kubenswrapper[4869]: I1001 15:25:11.796093 4869 scope.go:117] "RemoveContainer" containerID="261d9771c3e0c08c4b3e742d5b6028f0b71083327f06a52fbf4b771d3c082ac0" Oct 01 15:25:11 crc kubenswrapper[4869]: I1001 15:25:11.825593 4869 scope.go:117] "RemoveContainer" containerID="c9112b6c56879e32b798287e159dea2fc67f0ade0ed0e14364c2cbacb8a4db68" Oct 01 15:25:13 crc kubenswrapper[4869]: I1001 15:25:13.387200 4869 patch_prober.go:28] interesting pod/machine-config-daemon-c86m8 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 01 15:25:13 crc kubenswrapper[4869]: I1001 15:25:13.387857 4869 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-c86m8" podUID="a4b64f7f-0b03-4f47-965b-9fde048b735c" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 01 15:25:43 crc kubenswrapper[4869]: I1001 15:25:43.353957 4869 patch_prober.go:28] interesting pod/machine-config-daemon-c86m8 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 01 15:25:43 crc kubenswrapper[4869]: I1001 15:25:43.354662 4869 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-c86m8" podUID="a4b64f7f-0b03-4f47-965b-9fde048b735c" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 01 15:26:11 crc kubenswrapper[4869]: I1001 15:26:11.880069 4869 scope.go:117] "RemoveContainer" containerID="1f949349f800051a952b2fe8bbf88dacd5fa2e71449eab4d35a9a964ac60e4c6" Oct 01 15:26:11 crc kubenswrapper[4869]: I1001 15:26:11.920140 4869 scope.go:117] "RemoveContainer" containerID="66bcce14f8021da5c860327093bf08a347d2f379c0a2209d540b5e338aeb57d9" Oct 01 15:26:13 crc kubenswrapper[4869]: I1001 15:26:13.354641 4869 patch_prober.go:28] interesting pod/machine-config-daemon-c86m8 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 01 15:26:13 crc kubenswrapper[4869]: I1001 15:26:13.355016 4869 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-c86m8" podUID="a4b64f7f-0b03-4f47-965b-9fde048b735c" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 01 15:26:13 crc kubenswrapper[4869]: I1001 15:26:13.355080 4869 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-c86m8" Oct 01 15:26:13 crc kubenswrapper[4869]: I1001 15:26:13.355954 4869 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"fbafe3c9358f68065a4bc8a44e7f7c6c280ff5086e57618c387c454ac514cc06"} pod="openshift-machine-config-operator/machine-config-daemon-c86m8" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Oct 01 15:26:13 crc kubenswrapper[4869]: I1001 15:26:13.356062 4869 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-c86m8" podUID="a4b64f7f-0b03-4f47-965b-9fde048b735c" containerName="machine-config-daemon" containerID="cri-o://fbafe3c9358f68065a4bc8a44e7f7c6c280ff5086e57618c387c454ac514cc06" gracePeriod=600 Oct 01 15:26:14 crc kubenswrapper[4869]: I1001 15:26:14.246311 4869 generic.go:334] "Generic (PLEG): container finished" podID="a4b64f7f-0b03-4f47-965b-9fde048b735c" containerID="fbafe3c9358f68065a4bc8a44e7f7c6c280ff5086e57618c387c454ac514cc06" exitCode=0 Oct 01 15:26:14 crc kubenswrapper[4869]: I1001 15:26:14.246326 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-c86m8" event={"ID":"a4b64f7f-0b03-4f47-965b-9fde048b735c","Type":"ContainerDied","Data":"fbafe3c9358f68065a4bc8a44e7f7c6c280ff5086e57618c387c454ac514cc06"} Oct 01 15:26:14 crc kubenswrapper[4869]: I1001 15:26:14.246818 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-c86m8" event={"ID":"a4b64f7f-0b03-4f47-965b-9fde048b735c","Type":"ContainerStarted","Data":"1d80d75dc72da971c79fa8b67243b56fe5690fce0a3f0d0147f32ac22b29db29"} Oct 01 15:26:14 crc kubenswrapper[4869]: I1001 15:26:14.246866 4869 scope.go:117] "RemoveContainer" containerID="0674a6010e4abaf43ad0d52524028fcd0e0d167c67609073a8bff51bfec2aabf" Oct 01 15:27:03 crc kubenswrapper[4869]: I1001 15:27:03.843452 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-l5n65"] Oct 01 15:27:03 crc kubenswrapper[4869]: I1001 15:27:03.853195 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-l5n65"] Oct 01 15:27:03 crc kubenswrapper[4869]: I1001 15:27:03.853343 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-l5n65" Oct 01 15:27:03 crc kubenswrapper[4869]: I1001 15:27:03.976475 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vbc99\" (UniqueName: \"kubernetes.io/projected/66bb0c20-eda1-444e-9dee-61391475b50e-kube-api-access-vbc99\") pod \"redhat-marketplace-l5n65\" (UID: \"66bb0c20-eda1-444e-9dee-61391475b50e\") " pod="openshift-marketplace/redhat-marketplace-l5n65" Oct 01 15:27:03 crc kubenswrapper[4869]: I1001 15:27:03.976525 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/66bb0c20-eda1-444e-9dee-61391475b50e-utilities\") pod \"redhat-marketplace-l5n65\" (UID: \"66bb0c20-eda1-444e-9dee-61391475b50e\") " pod="openshift-marketplace/redhat-marketplace-l5n65" Oct 01 15:27:03 crc kubenswrapper[4869]: I1001 15:27:03.976728 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/66bb0c20-eda1-444e-9dee-61391475b50e-catalog-content\") pod \"redhat-marketplace-l5n65\" (UID: \"66bb0c20-eda1-444e-9dee-61391475b50e\") " pod="openshift-marketplace/redhat-marketplace-l5n65" Oct 01 15:27:04 crc kubenswrapper[4869]: I1001 15:27:04.077743 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vbc99\" (UniqueName: \"kubernetes.io/projected/66bb0c20-eda1-444e-9dee-61391475b50e-kube-api-access-vbc99\") pod \"redhat-marketplace-l5n65\" (UID: \"66bb0c20-eda1-444e-9dee-61391475b50e\") " pod="openshift-marketplace/redhat-marketplace-l5n65" Oct 01 15:27:04 crc kubenswrapper[4869]: I1001 15:27:04.077782 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/66bb0c20-eda1-444e-9dee-61391475b50e-utilities\") pod \"redhat-marketplace-l5n65\" (UID: \"66bb0c20-eda1-444e-9dee-61391475b50e\") " pod="openshift-marketplace/redhat-marketplace-l5n65" Oct 01 15:27:04 crc kubenswrapper[4869]: I1001 15:27:04.077931 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/66bb0c20-eda1-444e-9dee-61391475b50e-catalog-content\") pod \"redhat-marketplace-l5n65\" (UID: \"66bb0c20-eda1-444e-9dee-61391475b50e\") " pod="openshift-marketplace/redhat-marketplace-l5n65" Oct 01 15:27:04 crc kubenswrapper[4869]: I1001 15:27:04.078345 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/66bb0c20-eda1-444e-9dee-61391475b50e-catalog-content\") pod \"redhat-marketplace-l5n65\" (UID: \"66bb0c20-eda1-444e-9dee-61391475b50e\") " pod="openshift-marketplace/redhat-marketplace-l5n65" Oct 01 15:27:04 crc kubenswrapper[4869]: I1001 15:27:04.078500 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/66bb0c20-eda1-444e-9dee-61391475b50e-utilities\") pod \"redhat-marketplace-l5n65\" (UID: \"66bb0c20-eda1-444e-9dee-61391475b50e\") " pod="openshift-marketplace/redhat-marketplace-l5n65" Oct 01 15:27:04 crc kubenswrapper[4869]: I1001 15:27:04.104714 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vbc99\" (UniqueName: \"kubernetes.io/projected/66bb0c20-eda1-444e-9dee-61391475b50e-kube-api-access-vbc99\") pod \"redhat-marketplace-l5n65\" (UID: \"66bb0c20-eda1-444e-9dee-61391475b50e\") " pod="openshift-marketplace/redhat-marketplace-l5n65" Oct 01 15:27:04 crc kubenswrapper[4869]: I1001 15:27:04.178148 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-l5n65" Oct 01 15:27:05 crc kubenswrapper[4869]: I1001 15:27:04.615682 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-l5n65"] Oct 01 15:27:05 crc kubenswrapper[4869]: I1001 15:27:04.768995 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-l5n65" event={"ID":"66bb0c20-eda1-444e-9dee-61391475b50e","Type":"ContainerStarted","Data":"a54feae3bec48eeeda76798f990e6aebe150da796f37df8d242f11cc37ae993c"} Oct 01 15:27:05 crc kubenswrapper[4869]: I1001 15:27:05.781620 4869 generic.go:334] "Generic (PLEG): container finished" podID="66bb0c20-eda1-444e-9dee-61391475b50e" containerID="0e350a61f551ad50e095cb83d6a767d0fce7d33df2a805e35650400de819a031" exitCode=0 Oct 01 15:27:05 crc kubenswrapper[4869]: I1001 15:27:05.781691 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-l5n65" event={"ID":"66bb0c20-eda1-444e-9dee-61391475b50e","Type":"ContainerDied","Data":"0e350a61f551ad50e095cb83d6a767d0fce7d33df2a805e35650400de819a031"} Oct 01 15:27:07 crc kubenswrapper[4869]: I1001 15:27:07.803858 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-l5n65" event={"ID":"66bb0c20-eda1-444e-9dee-61391475b50e","Type":"ContainerStarted","Data":"8fe8fb58081796b2eefebd1fb0a52d5f17f5247d539c137fec5161f2093c20d1"} Oct 01 15:27:08 crc kubenswrapper[4869]: I1001 15:27:08.818175 4869 generic.go:334] "Generic (PLEG): container finished" podID="66bb0c20-eda1-444e-9dee-61391475b50e" containerID="8fe8fb58081796b2eefebd1fb0a52d5f17f5247d539c137fec5161f2093c20d1" exitCode=0 Oct 01 15:27:08 crc kubenswrapper[4869]: I1001 15:27:08.818245 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-l5n65" event={"ID":"66bb0c20-eda1-444e-9dee-61391475b50e","Type":"ContainerDied","Data":"8fe8fb58081796b2eefebd1fb0a52d5f17f5247d539c137fec5161f2093c20d1"} Oct 01 15:27:10 crc kubenswrapper[4869]: I1001 15:27:10.837810 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-l5n65" event={"ID":"66bb0c20-eda1-444e-9dee-61391475b50e","Type":"ContainerStarted","Data":"8f0146cdc9671c33409cb1c61cce7d621cfdebcacc05c0ef0318f8da6a151bfd"} Oct 01 15:27:10 crc kubenswrapper[4869]: I1001 15:27:10.859967 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-l5n65" podStartSLOduration=3.810407267 podStartE2EDuration="7.859946427s" podCreationTimestamp="2025-10-01 15:27:03 +0000 UTC" firstStartedPulling="2025-10-01 15:27:05.783887817 +0000 UTC m=+1334.930730943" lastFinishedPulling="2025-10-01 15:27:09.833426987 +0000 UTC m=+1338.980270103" observedRunningTime="2025-10-01 15:27:10.855322711 +0000 UTC m=+1340.002165867" watchObservedRunningTime="2025-10-01 15:27:10.859946427 +0000 UTC m=+1340.006789543" Oct 01 15:27:14 crc kubenswrapper[4869]: I1001 15:27:14.179289 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-l5n65" Oct 01 15:27:14 crc kubenswrapper[4869]: I1001 15:27:14.180027 4869 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-l5n65" Oct 01 15:27:14 crc kubenswrapper[4869]: I1001 15:27:14.232754 4869 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-l5n65" Oct 01 15:27:22 crc kubenswrapper[4869]: I1001 15:27:22.851742 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-hb26q"] Oct 01 15:27:22 crc kubenswrapper[4869]: I1001 15:27:22.854610 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-hb26q" Oct 01 15:27:22 crc kubenswrapper[4869]: I1001 15:27:22.860185 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-hb26q"] Oct 01 15:27:22 crc kubenswrapper[4869]: I1001 15:27:22.946073 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/90506aa0-758d-473b-bf87-2009e78731ac-utilities\") pod \"community-operators-hb26q\" (UID: \"90506aa0-758d-473b-bf87-2009e78731ac\") " pod="openshift-marketplace/community-operators-hb26q" Oct 01 15:27:22 crc kubenswrapper[4869]: I1001 15:27:22.946525 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/90506aa0-758d-473b-bf87-2009e78731ac-catalog-content\") pod \"community-operators-hb26q\" (UID: \"90506aa0-758d-473b-bf87-2009e78731ac\") " pod="openshift-marketplace/community-operators-hb26q" Oct 01 15:27:22 crc kubenswrapper[4869]: I1001 15:27:22.946557 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pxrws\" (UniqueName: \"kubernetes.io/projected/90506aa0-758d-473b-bf87-2009e78731ac-kube-api-access-pxrws\") pod \"community-operators-hb26q\" (UID: \"90506aa0-758d-473b-bf87-2009e78731ac\") " pod="openshift-marketplace/community-operators-hb26q" Oct 01 15:27:23 crc kubenswrapper[4869]: I1001 15:27:23.048153 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/90506aa0-758d-473b-bf87-2009e78731ac-utilities\") pod \"community-operators-hb26q\" (UID: \"90506aa0-758d-473b-bf87-2009e78731ac\") " pod="openshift-marketplace/community-operators-hb26q" Oct 01 15:27:23 crc kubenswrapper[4869]: I1001 15:27:23.048278 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/90506aa0-758d-473b-bf87-2009e78731ac-catalog-content\") pod \"community-operators-hb26q\" (UID: \"90506aa0-758d-473b-bf87-2009e78731ac\") " pod="openshift-marketplace/community-operators-hb26q" Oct 01 15:27:23 crc kubenswrapper[4869]: I1001 15:27:23.048311 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pxrws\" (UniqueName: \"kubernetes.io/projected/90506aa0-758d-473b-bf87-2009e78731ac-kube-api-access-pxrws\") pod \"community-operators-hb26q\" (UID: \"90506aa0-758d-473b-bf87-2009e78731ac\") " pod="openshift-marketplace/community-operators-hb26q" Oct 01 15:27:23 crc kubenswrapper[4869]: I1001 15:27:23.048662 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/90506aa0-758d-473b-bf87-2009e78731ac-utilities\") pod \"community-operators-hb26q\" (UID: \"90506aa0-758d-473b-bf87-2009e78731ac\") " pod="openshift-marketplace/community-operators-hb26q" Oct 01 15:27:23 crc kubenswrapper[4869]: I1001 15:27:23.048774 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/90506aa0-758d-473b-bf87-2009e78731ac-catalog-content\") pod \"community-operators-hb26q\" (UID: \"90506aa0-758d-473b-bf87-2009e78731ac\") " pod="openshift-marketplace/community-operators-hb26q" Oct 01 15:27:23 crc kubenswrapper[4869]: I1001 15:27:23.075330 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pxrws\" (UniqueName: \"kubernetes.io/projected/90506aa0-758d-473b-bf87-2009e78731ac-kube-api-access-pxrws\") pod \"community-operators-hb26q\" (UID: \"90506aa0-758d-473b-bf87-2009e78731ac\") " pod="openshift-marketplace/community-operators-hb26q" Oct 01 15:27:23 crc kubenswrapper[4869]: I1001 15:27:23.226020 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-hb26q" Oct 01 15:27:23 crc kubenswrapper[4869]: I1001 15:27:23.735940 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-hb26q"] Oct 01 15:27:24 crc kubenswrapper[4869]: I1001 15:27:24.009668 4869 generic.go:334] "Generic (PLEG): container finished" podID="90506aa0-758d-473b-bf87-2009e78731ac" containerID="5a4ebc0f88c2223272fdc93f7949163bbfd61ce5d585255467d76107efa7bb0f" exitCode=0 Oct 01 15:27:24 crc kubenswrapper[4869]: I1001 15:27:24.009773 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-hb26q" event={"ID":"90506aa0-758d-473b-bf87-2009e78731ac","Type":"ContainerDied","Data":"5a4ebc0f88c2223272fdc93f7949163bbfd61ce5d585255467d76107efa7bb0f"} Oct 01 15:27:24 crc kubenswrapper[4869]: I1001 15:27:24.009921 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-hb26q" event={"ID":"90506aa0-758d-473b-bf87-2009e78731ac","Type":"ContainerStarted","Data":"ede17b3e1c953e298764c52fcf96396510ffc1e4e4d385453fd8624467d53f88"} Oct 01 15:27:24 crc kubenswrapper[4869]: I1001 15:27:24.246041 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-l5n65" Oct 01 15:27:26 crc kubenswrapper[4869]: I1001 15:27:26.629026 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-l5n65"] Oct 01 15:27:26 crc kubenswrapper[4869]: I1001 15:27:26.630149 4869 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-l5n65" podUID="66bb0c20-eda1-444e-9dee-61391475b50e" containerName="registry-server" containerID="cri-o://8f0146cdc9671c33409cb1c61cce7d621cfdebcacc05c0ef0318f8da6a151bfd" gracePeriod=2 Oct 01 15:27:27 crc kubenswrapper[4869]: I1001 15:27:27.048790 4869 generic.go:334] "Generic (PLEG): container finished" podID="90506aa0-758d-473b-bf87-2009e78731ac" containerID="3366dc2053bde56fcb85d42c372595a513324ad87fa81ae9472975646d5111be" exitCode=0 Oct 01 15:27:27 crc kubenswrapper[4869]: I1001 15:27:27.048857 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-hb26q" event={"ID":"90506aa0-758d-473b-bf87-2009e78731ac","Type":"ContainerDied","Data":"3366dc2053bde56fcb85d42c372595a513324ad87fa81ae9472975646d5111be"} Oct 01 15:27:27 crc kubenswrapper[4869]: E1001 15:27:27.367478 4869 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod66bb0c20_eda1_444e_9dee_61391475b50e.slice/crio-conmon-8f0146cdc9671c33409cb1c61cce7d621cfdebcacc05c0ef0318f8da6a151bfd.scope\": RecentStats: unable to find data in memory cache]" Oct 01 15:27:28 crc kubenswrapper[4869]: I1001 15:27:28.061779 4869 generic.go:334] "Generic (PLEG): container finished" podID="66bb0c20-eda1-444e-9dee-61391475b50e" containerID="8f0146cdc9671c33409cb1c61cce7d621cfdebcacc05c0ef0318f8da6a151bfd" exitCode=0 Oct 01 15:27:28 crc kubenswrapper[4869]: I1001 15:27:28.062449 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-l5n65" event={"ID":"66bb0c20-eda1-444e-9dee-61391475b50e","Type":"ContainerDied","Data":"8f0146cdc9671c33409cb1c61cce7d621cfdebcacc05c0ef0318f8da6a151bfd"} Oct 01 15:27:28 crc kubenswrapper[4869]: I1001 15:27:28.066724 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-hb26q" event={"ID":"90506aa0-758d-473b-bf87-2009e78731ac","Type":"ContainerStarted","Data":"901942f3fb60f5f0f8ed3f8d5d23565603e3008b356d5c8012fd5f5d51b33b08"} Oct 01 15:27:28 crc kubenswrapper[4869]: I1001 15:27:28.091721 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-hb26q" podStartSLOduration=3.668170939 podStartE2EDuration="6.091702987s" podCreationTimestamp="2025-10-01 15:27:22 +0000 UTC" firstStartedPulling="2025-10-01 15:27:25.019519102 +0000 UTC m=+1354.166362218" lastFinishedPulling="2025-10-01 15:27:27.44305112 +0000 UTC m=+1356.589894266" observedRunningTime="2025-10-01 15:27:28.086185029 +0000 UTC m=+1357.233028185" watchObservedRunningTime="2025-10-01 15:27:28.091702987 +0000 UTC m=+1357.238546113" Oct 01 15:27:28 crc kubenswrapper[4869]: I1001 15:27:28.199090 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-l5n65" Oct 01 15:27:28 crc kubenswrapper[4869]: I1001 15:27:28.248647 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/66bb0c20-eda1-444e-9dee-61391475b50e-utilities\") pod \"66bb0c20-eda1-444e-9dee-61391475b50e\" (UID: \"66bb0c20-eda1-444e-9dee-61391475b50e\") " Oct 01 15:27:28 crc kubenswrapper[4869]: I1001 15:27:28.248864 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vbc99\" (UniqueName: \"kubernetes.io/projected/66bb0c20-eda1-444e-9dee-61391475b50e-kube-api-access-vbc99\") pod \"66bb0c20-eda1-444e-9dee-61391475b50e\" (UID: \"66bb0c20-eda1-444e-9dee-61391475b50e\") " Oct 01 15:27:28 crc kubenswrapper[4869]: I1001 15:27:28.249141 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/66bb0c20-eda1-444e-9dee-61391475b50e-catalog-content\") pod \"66bb0c20-eda1-444e-9dee-61391475b50e\" (UID: \"66bb0c20-eda1-444e-9dee-61391475b50e\") " Oct 01 15:27:28 crc kubenswrapper[4869]: I1001 15:27:28.249658 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/66bb0c20-eda1-444e-9dee-61391475b50e-utilities" (OuterVolumeSpecName: "utilities") pod "66bb0c20-eda1-444e-9dee-61391475b50e" (UID: "66bb0c20-eda1-444e-9dee-61391475b50e"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 15:27:28 crc kubenswrapper[4869]: I1001 15:27:28.253886 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/66bb0c20-eda1-444e-9dee-61391475b50e-kube-api-access-vbc99" (OuterVolumeSpecName: "kube-api-access-vbc99") pod "66bb0c20-eda1-444e-9dee-61391475b50e" (UID: "66bb0c20-eda1-444e-9dee-61391475b50e"). InnerVolumeSpecName "kube-api-access-vbc99". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 15:27:28 crc kubenswrapper[4869]: I1001 15:27:28.266374 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/66bb0c20-eda1-444e-9dee-61391475b50e-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "66bb0c20-eda1-444e-9dee-61391475b50e" (UID: "66bb0c20-eda1-444e-9dee-61391475b50e"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 15:27:28 crc kubenswrapper[4869]: I1001 15:27:28.351518 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vbc99\" (UniqueName: \"kubernetes.io/projected/66bb0c20-eda1-444e-9dee-61391475b50e-kube-api-access-vbc99\") on node \"crc\" DevicePath \"\"" Oct 01 15:27:28 crc kubenswrapper[4869]: I1001 15:27:28.351574 4869 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/66bb0c20-eda1-444e-9dee-61391475b50e-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 01 15:27:28 crc kubenswrapper[4869]: I1001 15:27:28.351587 4869 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/66bb0c20-eda1-444e-9dee-61391475b50e-utilities\") on node \"crc\" DevicePath \"\"" Oct 01 15:27:29 crc kubenswrapper[4869]: I1001 15:27:29.025714 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-jl5mj"] Oct 01 15:27:29 crc kubenswrapper[4869]: E1001 15:27:29.026339 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="66bb0c20-eda1-444e-9dee-61391475b50e" containerName="registry-server" Oct 01 15:27:29 crc kubenswrapper[4869]: I1001 15:27:29.026353 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="66bb0c20-eda1-444e-9dee-61391475b50e" containerName="registry-server" Oct 01 15:27:29 crc kubenswrapper[4869]: E1001 15:27:29.026386 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="66bb0c20-eda1-444e-9dee-61391475b50e" containerName="extract-utilities" Oct 01 15:27:29 crc kubenswrapper[4869]: I1001 15:27:29.026392 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="66bb0c20-eda1-444e-9dee-61391475b50e" containerName="extract-utilities" Oct 01 15:27:29 crc kubenswrapper[4869]: E1001 15:27:29.026402 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="66bb0c20-eda1-444e-9dee-61391475b50e" containerName="extract-content" Oct 01 15:27:29 crc kubenswrapper[4869]: I1001 15:27:29.026408 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="66bb0c20-eda1-444e-9dee-61391475b50e" containerName="extract-content" Oct 01 15:27:29 crc kubenswrapper[4869]: I1001 15:27:29.026581 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="66bb0c20-eda1-444e-9dee-61391475b50e" containerName="registry-server" Oct 01 15:27:29 crc kubenswrapper[4869]: I1001 15:27:29.044135 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-jl5mj" Oct 01 15:27:29 crc kubenswrapper[4869]: I1001 15:27:29.066229 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-jl5mj"] Oct 01 15:27:29 crc kubenswrapper[4869]: I1001 15:27:29.084022 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6h8cg\" (UniqueName: \"kubernetes.io/projected/4a255233-e4a3-48ec-b11b-ae56d8815fe7-kube-api-access-6h8cg\") pod \"certified-operators-jl5mj\" (UID: \"4a255233-e4a3-48ec-b11b-ae56d8815fe7\") " pod="openshift-marketplace/certified-operators-jl5mj" Oct 01 15:27:29 crc kubenswrapper[4869]: I1001 15:27:29.084106 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4a255233-e4a3-48ec-b11b-ae56d8815fe7-utilities\") pod \"certified-operators-jl5mj\" (UID: \"4a255233-e4a3-48ec-b11b-ae56d8815fe7\") " pod="openshift-marketplace/certified-operators-jl5mj" Oct 01 15:27:29 crc kubenswrapper[4869]: I1001 15:27:29.084207 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4a255233-e4a3-48ec-b11b-ae56d8815fe7-catalog-content\") pod \"certified-operators-jl5mj\" (UID: \"4a255233-e4a3-48ec-b11b-ae56d8815fe7\") " pod="openshift-marketplace/certified-operators-jl5mj" Oct 01 15:27:29 crc kubenswrapper[4869]: I1001 15:27:29.098488 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-l5n65" event={"ID":"66bb0c20-eda1-444e-9dee-61391475b50e","Type":"ContainerDied","Data":"a54feae3bec48eeeda76798f990e6aebe150da796f37df8d242f11cc37ae993c"} Oct 01 15:27:29 crc kubenswrapper[4869]: I1001 15:27:29.098544 4869 scope.go:117] "RemoveContainer" containerID="8f0146cdc9671c33409cb1c61cce7d621cfdebcacc05c0ef0318f8da6a151bfd" Oct 01 15:27:29 crc kubenswrapper[4869]: I1001 15:27:29.098704 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-l5n65" Oct 01 15:27:29 crc kubenswrapper[4869]: I1001 15:27:29.144058 4869 scope.go:117] "RemoveContainer" containerID="8fe8fb58081796b2eefebd1fb0a52d5f17f5247d539c137fec5161f2093c20d1" Oct 01 15:27:29 crc kubenswrapper[4869]: I1001 15:27:29.153927 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-l5n65"] Oct 01 15:27:29 crc kubenswrapper[4869]: I1001 15:27:29.163395 4869 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-l5n65"] Oct 01 15:27:29 crc kubenswrapper[4869]: I1001 15:27:29.176562 4869 scope.go:117] "RemoveContainer" containerID="0e350a61f551ad50e095cb83d6a767d0fce7d33df2a805e35650400de819a031" Oct 01 15:27:29 crc kubenswrapper[4869]: I1001 15:27:29.187238 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6h8cg\" (UniqueName: \"kubernetes.io/projected/4a255233-e4a3-48ec-b11b-ae56d8815fe7-kube-api-access-6h8cg\") pod \"certified-operators-jl5mj\" (UID: \"4a255233-e4a3-48ec-b11b-ae56d8815fe7\") " pod="openshift-marketplace/certified-operators-jl5mj" Oct 01 15:27:29 crc kubenswrapper[4869]: I1001 15:27:29.187314 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4a255233-e4a3-48ec-b11b-ae56d8815fe7-utilities\") pod \"certified-operators-jl5mj\" (UID: \"4a255233-e4a3-48ec-b11b-ae56d8815fe7\") " pod="openshift-marketplace/certified-operators-jl5mj" Oct 01 15:27:29 crc kubenswrapper[4869]: I1001 15:27:29.187379 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4a255233-e4a3-48ec-b11b-ae56d8815fe7-catalog-content\") pod \"certified-operators-jl5mj\" (UID: \"4a255233-e4a3-48ec-b11b-ae56d8815fe7\") " pod="openshift-marketplace/certified-operators-jl5mj" Oct 01 15:27:29 crc kubenswrapper[4869]: I1001 15:27:29.187864 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4a255233-e4a3-48ec-b11b-ae56d8815fe7-catalog-content\") pod \"certified-operators-jl5mj\" (UID: \"4a255233-e4a3-48ec-b11b-ae56d8815fe7\") " pod="openshift-marketplace/certified-operators-jl5mj" Oct 01 15:27:29 crc kubenswrapper[4869]: I1001 15:27:29.187903 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4a255233-e4a3-48ec-b11b-ae56d8815fe7-utilities\") pod \"certified-operators-jl5mj\" (UID: \"4a255233-e4a3-48ec-b11b-ae56d8815fe7\") " pod="openshift-marketplace/certified-operators-jl5mj" Oct 01 15:27:29 crc kubenswrapper[4869]: I1001 15:27:29.211609 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6h8cg\" (UniqueName: \"kubernetes.io/projected/4a255233-e4a3-48ec-b11b-ae56d8815fe7-kube-api-access-6h8cg\") pod \"certified-operators-jl5mj\" (UID: \"4a255233-e4a3-48ec-b11b-ae56d8815fe7\") " pod="openshift-marketplace/certified-operators-jl5mj" Oct 01 15:27:29 crc kubenswrapper[4869]: I1001 15:27:29.385385 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-jl5mj" Oct 01 15:27:29 crc kubenswrapper[4869]: I1001 15:27:29.615636 4869 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="66bb0c20-eda1-444e-9dee-61391475b50e" path="/var/lib/kubelet/pods/66bb0c20-eda1-444e-9dee-61391475b50e/volumes" Oct 01 15:27:29 crc kubenswrapper[4869]: I1001 15:27:29.901222 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-jl5mj"] Oct 01 15:27:30 crc kubenswrapper[4869]: I1001 15:27:30.106996 4869 generic.go:334] "Generic (PLEG): container finished" podID="4a255233-e4a3-48ec-b11b-ae56d8815fe7" containerID="c86c2427bb649ee20fed3a908618e1c750f0298b5329e6d5829b5fa5198ba18c" exitCode=0 Oct 01 15:27:30 crc kubenswrapper[4869]: I1001 15:27:30.107076 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-jl5mj" event={"ID":"4a255233-e4a3-48ec-b11b-ae56d8815fe7","Type":"ContainerDied","Data":"c86c2427bb649ee20fed3a908618e1c750f0298b5329e6d5829b5fa5198ba18c"} Oct 01 15:27:30 crc kubenswrapper[4869]: I1001 15:27:30.107110 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-jl5mj" event={"ID":"4a255233-e4a3-48ec-b11b-ae56d8815fe7","Type":"ContainerStarted","Data":"6f66af390e450155e58e4c7bcb76a6391ec90da17126cbfa0460b1b5ef2360f4"} Oct 01 15:27:32 crc kubenswrapper[4869]: I1001 15:27:32.141388 4869 generic.go:334] "Generic (PLEG): container finished" podID="4a255233-e4a3-48ec-b11b-ae56d8815fe7" containerID="d7a8a6b47349f97f2c2279b5c95cde32afdac2784ed7b9ccfd594b28071ea7ba" exitCode=0 Oct 01 15:27:32 crc kubenswrapper[4869]: I1001 15:27:32.141968 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-jl5mj" event={"ID":"4a255233-e4a3-48ec-b11b-ae56d8815fe7","Type":"ContainerDied","Data":"d7a8a6b47349f97f2c2279b5c95cde32afdac2784ed7b9ccfd594b28071ea7ba"} Oct 01 15:27:33 crc kubenswrapper[4869]: I1001 15:27:33.154831 4869 generic.go:334] "Generic (PLEG): container finished" podID="12855739-df17-48b6-886b-469e6a39d7f8" containerID="0644d998c969fb3bed012dc7a8f2ad9a8e87fcc549ee146354f20323a948af9f" exitCode=0 Oct 01 15:27:33 crc kubenswrapper[4869]: I1001 15:27:33.154967 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-f8mtp" event={"ID":"12855739-df17-48b6-886b-469e6a39d7f8","Type":"ContainerDied","Data":"0644d998c969fb3bed012dc7a8f2ad9a8e87fcc549ee146354f20323a948af9f"} Oct 01 15:27:33 crc kubenswrapper[4869]: I1001 15:27:33.160493 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-jl5mj" event={"ID":"4a255233-e4a3-48ec-b11b-ae56d8815fe7","Type":"ContainerStarted","Data":"071357a22860f182dfe549707b9223a8b8a2e03fa46ce91c75e3ddfd13edfb0a"} Oct 01 15:27:33 crc kubenswrapper[4869]: I1001 15:27:33.211123 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-jl5mj" podStartSLOduration=1.574641147 podStartE2EDuration="4.211101306s" podCreationTimestamp="2025-10-01 15:27:29 +0000 UTC" firstStartedPulling="2025-10-01 15:27:30.108456816 +0000 UTC m=+1359.255299932" lastFinishedPulling="2025-10-01 15:27:32.744916935 +0000 UTC m=+1361.891760091" observedRunningTime="2025-10-01 15:27:33.198399547 +0000 UTC m=+1362.345242673" watchObservedRunningTime="2025-10-01 15:27:33.211101306 +0000 UTC m=+1362.357944422" Oct 01 15:27:33 crc kubenswrapper[4869]: I1001 15:27:33.226702 4869 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-hb26q" Oct 01 15:27:33 crc kubenswrapper[4869]: I1001 15:27:33.226775 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-hb26q" Oct 01 15:27:33 crc kubenswrapper[4869]: I1001 15:27:33.291321 4869 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-hb26q" Oct 01 15:27:34 crc kubenswrapper[4869]: I1001 15:27:34.242744 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-hb26q" Oct 01 15:27:34 crc kubenswrapper[4869]: I1001 15:27:34.610412 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-f8mtp" Oct 01 15:27:34 crc kubenswrapper[4869]: I1001 15:27:34.701279 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/12855739-df17-48b6-886b-469e6a39d7f8-ssh-key\") pod \"12855739-df17-48b6-886b-469e6a39d7f8\" (UID: \"12855739-df17-48b6-886b-469e6a39d7f8\") " Oct 01 15:27:34 crc kubenswrapper[4869]: I1001 15:27:34.701324 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/12855739-df17-48b6-886b-469e6a39d7f8-inventory\") pod \"12855739-df17-48b6-886b-469e6a39d7f8\" (UID: \"12855739-df17-48b6-886b-469e6a39d7f8\") " Oct 01 15:27:34 crc kubenswrapper[4869]: I1001 15:27:34.701368 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6tczd\" (UniqueName: \"kubernetes.io/projected/12855739-df17-48b6-886b-469e6a39d7f8-kube-api-access-6tczd\") pod \"12855739-df17-48b6-886b-469e6a39d7f8\" (UID: \"12855739-df17-48b6-886b-469e6a39d7f8\") " Oct 01 15:27:34 crc kubenswrapper[4869]: I1001 15:27:34.701506 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/12855739-df17-48b6-886b-469e6a39d7f8-bootstrap-combined-ca-bundle\") pod \"12855739-df17-48b6-886b-469e6a39d7f8\" (UID: \"12855739-df17-48b6-886b-469e6a39d7f8\") " Oct 01 15:27:34 crc kubenswrapper[4869]: I1001 15:27:34.707163 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/12855739-df17-48b6-886b-469e6a39d7f8-bootstrap-combined-ca-bundle" (OuterVolumeSpecName: "bootstrap-combined-ca-bundle") pod "12855739-df17-48b6-886b-469e6a39d7f8" (UID: "12855739-df17-48b6-886b-469e6a39d7f8"). InnerVolumeSpecName "bootstrap-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 15:27:34 crc kubenswrapper[4869]: I1001 15:27:34.707712 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/12855739-df17-48b6-886b-469e6a39d7f8-kube-api-access-6tczd" (OuterVolumeSpecName: "kube-api-access-6tczd") pod "12855739-df17-48b6-886b-469e6a39d7f8" (UID: "12855739-df17-48b6-886b-469e6a39d7f8"). InnerVolumeSpecName "kube-api-access-6tczd". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 15:27:34 crc kubenswrapper[4869]: I1001 15:27:34.728087 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/12855739-df17-48b6-886b-469e6a39d7f8-inventory" (OuterVolumeSpecName: "inventory") pod "12855739-df17-48b6-886b-469e6a39d7f8" (UID: "12855739-df17-48b6-886b-469e6a39d7f8"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 15:27:34 crc kubenswrapper[4869]: I1001 15:27:34.741807 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/12855739-df17-48b6-886b-469e6a39d7f8-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "12855739-df17-48b6-886b-469e6a39d7f8" (UID: "12855739-df17-48b6-886b-469e6a39d7f8"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 15:27:34 crc kubenswrapper[4869]: I1001 15:27:34.806203 4869 reconciler_common.go:293] "Volume detached for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/12855739-df17-48b6-886b-469e6a39d7f8-bootstrap-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 01 15:27:34 crc kubenswrapper[4869]: I1001 15:27:34.806247 4869 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/12855739-df17-48b6-886b-469e6a39d7f8-ssh-key\") on node \"crc\" DevicePath \"\"" Oct 01 15:27:34 crc kubenswrapper[4869]: I1001 15:27:34.806279 4869 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/12855739-df17-48b6-886b-469e6a39d7f8-inventory\") on node \"crc\" DevicePath \"\"" Oct 01 15:27:34 crc kubenswrapper[4869]: I1001 15:27:34.806291 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6tczd\" (UniqueName: \"kubernetes.io/projected/12855739-df17-48b6-886b-469e6a39d7f8-kube-api-access-6tczd\") on node \"crc\" DevicePath \"\"" Oct 01 15:27:35 crc kubenswrapper[4869]: I1001 15:27:35.186320 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-f8mtp" event={"ID":"12855739-df17-48b6-886b-469e6a39d7f8","Type":"ContainerDied","Data":"e75712c46fee0cb6e95a4a4784dba49b33fc25401574c7c90d2a587d167e710a"} Oct 01 15:27:35 crc kubenswrapper[4869]: I1001 15:27:35.186401 4869 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="e75712c46fee0cb6e95a4a4784dba49b33fc25401574c7c90d2a587d167e710a" Oct 01 15:27:35 crc kubenswrapper[4869]: I1001 15:27:35.186345 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-f8mtp" Oct 01 15:27:35 crc kubenswrapper[4869]: I1001 15:27:35.296401 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/configure-network-edpm-deployment-openstack-edpm-ipam-ph2hh"] Oct 01 15:27:35 crc kubenswrapper[4869]: E1001 15:27:35.296916 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="12855739-df17-48b6-886b-469e6a39d7f8" containerName="bootstrap-edpm-deployment-openstack-edpm-ipam" Oct 01 15:27:35 crc kubenswrapper[4869]: I1001 15:27:35.296935 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="12855739-df17-48b6-886b-469e6a39d7f8" containerName="bootstrap-edpm-deployment-openstack-edpm-ipam" Oct 01 15:27:35 crc kubenswrapper[4869]: I1001 15:27:35.297111 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="12855739-df17-48b6-886b-469e6a39d7f8" containerName="bootstrap-edpm-deployment-openstack-edpm-ipam" Oct 01 15:27:35 crc kubenswrapper[4869]: I1001 15:27:35.297797 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-ph2hh" Oct 01 15:27:35 crc kubenswrapper[4869]: I1001 15:27:35.300165 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Oct 01 15:27:35 crc kubenswrapper[4869]: I1001 15:27:35.300174 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-cjg8g" Oct 01 15:27:35 crc kubenswrapper[4869]: I1001 15:27:35.300764 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Oct 01 15:27:35 crc kubenswrapper[4869]: I1001 15:27:35.300795 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Oct 01 15:27:35 crc kubenswrapper[4869]: I1001 15:27:35.306611 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/configure-network-edpm-deployment-openstack-edpm-ipam-ph2hh"] Oct 01 15:27:35 crc kubenswrapper[4869]: I1001 15:27:35.418861 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/23e89dd5-daa6-4174-90e3-a9a3a84dde66-ssh-key\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-ph2hh\" (UID: \"23e89dd5-daa6-4174-90e3-a9a3a84dde66\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-ph2hh" Oct 01 15:27:35 crc kubenswrapper[4869]: I1001 15:27:35.419079 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/23e89dd5-daa6-4174-90e3-a9a3a84dde66-inventory\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-ph2hh\" (UID: \"23e89dd5-daa6-4174-90e3-a9a3a84dde66\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-ph2hh" Oct 01 15:27:35 crc kubenswrapper[4869]: I1001 15:27:35.419373 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vl258\" (UniqueName: \"kubernetes.io/projected/23e89dd5-daa6-4174-90e3-a9a3a84dde66-kube-api-access-vl258\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-ph2hh\" (UID: \"23e89dd5-daa6-4174-90e3-a9a3a84dde66\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-ph2hh" Oct 01 15:27:35 crc kubenswrapper[4869]: I1001 15:27:35.521078 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/23e89dd5-daa6-4174-90e3-a9a3a84dde66-inventory\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-ph2hh\" (UID: \"23e89dd5-daa6-4174-90e3-a9a3a84dde66\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-ph2hh" Oct 01 15:27:35 crc kubenswrapper[4869]: I1001 15:27:35.521482 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vl258\" (UniqueName: \"kubernetes.io/projected/23e89dd5-daa6-4174-90e3-a9a3a84dde66-kube-api-access-vl258\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-ph2hh\" (UID: \"23e89dd5-daa6-4174-90e3-a9a3a84dde66\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-ph2hh" Oct 01 15:27:35 crc kubenswrapper[4869]: I1001 15:27:35.521618 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/23e89dd5-daa6-4174-90e3-a9a3a84dde66-ssh-key\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-ph2hh\" (UID: \"23e89dd5-daa6-4174-90e3-a9a3a84dde66\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-ph2hh" Oct 01 15:27:35 crc kubenswrapper[4869]: I1001 15:27:35.526591 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/23e89dd5-daa6-4174-90e3-a9a3a84dde66-inventory\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-ph2hh\" (UID: \"23e89dd5-daa6-4174-90e3-a9a3a84dde66\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-ph2hh" Oct 01 15:27:35 crc kubenswrapper[4869]: I1001 15:27:35.526832 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/23e89dd5-daa6-4174-90e3-a9a3a84dde66-ssh-key\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-ph2hh\" (UID: \"23e89dd5-daa6-4174-90e3-a9a3a84dde66\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-ph2hh" Oct 01 15:27:35 crc kubenswrapper[4869]: I1001 15:27:35.548478 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vl258\" (UniqueName: \"kubernetes.io/projected/23e89dd5-daa6-4174-90e3-a9a3a84dde66-kube-api-access-vl258\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-ph2hh\" (UID: \"23e89dd5-daa6-4174-90e3-a9a3a84dde66\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-ph2hh" Oct 01 15:27:35 crc kubenswrapper[4869]: I1001 15:27:35.621793 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-hb26q"] Oct 01 15:27:35 crc kubenswrapper[4869]: I1001 15:27:35.629241 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-ph2hh" Oct 01 15:27:36 crc kubenswrapper[4869]: I1001 15:27:36.196463 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/configure-network-edpm-deployment-openstack-edpm-ipam-ph2hh"] Oct 01 15:27:36 crc kubenswrapper[4869]: W1001 15:27:36.207283 4869 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod23e89dd5_daa6_4174_90e3_a9a3a84dde66.slice/crio-df10c4a089b9e27111fdb98decc02a3a60b74d863ec43a542430a3a03ac93fb2 WatchSource:0}: Error finding container df10c4a089b9e27111fdb98decc02a3a60b74d863ec43a542430a3a03ac93fb2: Status 404 returned error can't find the container with id df10c4a089b9e27111fdb98decc02a3a60b74d863ec43a542430a3a03ac93fb2 Oct 01 15:27:37 crc kubenswrapper[4869]: I1001 15:27:37.203423 4869 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-hb26q" podUID="90506aa0-758d-473b-bf87-2009e78731ac" containerName="registry-server" containerID="cri-o://901942f3fb60f5f0f8ed3f8d5d23565603e3008b356d5c8012fd5f5d51b33b08" gracePeriod=2 Oct 01 15:27:37 crc kubenswrapper[4869]: I1001 15:27:37.203565 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-ph2hh" event={"ID":"23e89dd5-daa6-4174-90e3-a9a3a84dde66","Type":"ContainerStarted","Data":"7cf48ea29ff8f9ec58969e4312cb5b3e8ce8120ddd5c2843d36031b5c1942727"} Oct 01 15:27:37 crc kubenswrapper[4869]: I1001 15:27:37.204070 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-ph2hh" event={"ID":"23e89dd5-daa6-4174-90e3-a9a3a84dde66","Type":"ContainerStarted","Data":"df10c4a089b9e27111fdb98decc02a3a60b74d863ec43a542430a3a03ac93fb2"} Oct 01 15:27:37 crc kubenswrapper[4869]: I1001 15:27:37.236564 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-ph2hh" podStartSLOduration=1.668228004 podStartE2EDuration="2.236542782s" podCreationTimestamp="2025-10-01 15:27:35 +0000 UTC" firstStartedPulling="2025-10-01 15:27:36.210811422 +0000 UTC m=+1365.357654538" lastFinishedPulling="2025-10-01 15:27:36.77912618 +0000 UTC m=+1365.925969316" observedRunningTime="2025-10-01 15:27:37.233868525 +0000 UTC m=+1366.380711651" watchObservedRunningTime="2025-10-01 15:27:37.236542782 +0000 UTC m=+1366.383385898" Oct 01 15:27:37 crc kubenswrapper[4869]: I1001 15:27:37.712602 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-hb26q" Oct 01 15:27:37 crc kubenswrapper[4869]: I1001 15:27:37.765779 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/90506aa0-758d-473b-bf87-2009e78731ac-utilities\") pod \"90506aa0-758d-473b-bf87-2009e78731ac\" (UID: \"90506aa0-758d-473b-bf87-2009e78731ac\") " Oct 01 15:27:37 crc kubenswrapper[4869]: I1001 15:27:37.765826 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pxrws\" (UniqueName: \"kubernetes.io/projected/90506aa0-758d-473b-bf87-2009e78731ac-kube-api-access-pxrws\") pod \"90506aa0-758d-473b-bf87-2009e78731ac\" (UID: \"90506aa0-758d-473b-bf87-2009e78731ac\") " Oct 01 15:27:37 crc kubenswrapper[4869]: I1001 15:27:37.765852 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/90506aa0-758d-473b-bf87-2009e78731ac-catalog-content\") pod \"90506aa0-758d-473b-bf87-2009e78731ac\" (UID: \"90506aa0-758d-473b-bf87-2009e78731ac\") " Oct 01 15:27:37 crc kubenswrapper[4869]: I1001 15:27:37.779513 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/90506aa0-758d-473b-bf87-2009e78731ac-utilities" (OuterVolumeSpecName: "utilities") pod "90506aa0-758d-473b-bf87-2009e78731ac" (UID: "90506aa0-758d-473b-bf87-2009e78731ac"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 15:27:37 crc kubenswrapper[4869]: I1001 15:27:37.783559 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/90506aa0-758d-473b-bf87-2009e78731ac-kube-api-access-pxrws" (OuterVolumeSpecName: "kube-api-access-pxrws") pod "90506aa0-758d-473b-bf87-2009e78731ac" (UID: "90506aa0-758d-473b-bf87-2009e78731ac"). InnerVolumeSpecName "kube-api-access-pxrws". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 15:27:37 crc kubenswrapper[4869]: I1001 15:27:37.813868 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/90506aa0-758d-473b-bf87-2009e78731ac-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "90506aa0-758d-473b-bf87-2009e78731ac" (UID: "90506aa0-758d-473b-bf87-2009e78731ac"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 15:27:37 crc kubenswrapper[4869]: I1001 15:27:37.867744 4869 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/90506aa0-758d-473b-bf87-2009e78731ac-utilities\") on node \"crc\" DevicePath \"\"" Oct 01 15:27:37 crc kubenswrapper[4869]: I1001 15:27:37.867775 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pxrws\" (UniqueName: \"kubernetes.io/projected/90506aa0-758d-473b-bf87-2009e78731ac-kube-api-access-pxrws\") on node \"crc\" DevicePath \"\"" Oct 01 15:27:37 crc kubenswrapper[4869]: I1001 15:27:37.867786 4869 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/90506aa0-758d-473b-bf87-2009e78731ac-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 01 15:27:38 crc kubenswrapper[4869]: I1001 15:27:38.220480 4869 generic.go:334] "Generic (PLEG): container finished" podID="90506aa0-758d-473b-bf87-2009e78731ac" containerID="901942f3fb60f5f0f8ed3f8d5d23565603e3008b356d5c8012fd5f5d51b33b08" exitCode=0 Oct 01 15:27:38 crc kubenswrapper[4869]: I1001 15:27:38.220586 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-hb26q" Oct 01 15:27:38 crc kubenswrapper[4869]: I1001 15:27:38.220583 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-hb26q" event={"ID":"90506aa0-758d-473b-bf87-2009e78731ac","Type":"ContainerDied","Data":"901942f3fb60f5f0f8ed3f8d5d23565603e3008b356d5c8012fd5f5d51b33b08"} Oct 01 15:27:38 crc kubenswrapper[4869]: I1001 15:27:38.220742 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-hb26q" event={"ID":"90506aa0-758d-473b-bf87-2009e78731ac","Type":"ContainerDied","Data":"ede17b3e1c953e298764c52fcf96396510ffc1e4e4d385453fd8624467d53f88"} Oct 01 15:27:38 crc kubenswrapper[4869]: I1001 15:27:38.220786 4869 scope.go:117] "RemoveContainer" containerID="901942f3fb60f5f0f8ed3f8d5d23565603e3008b356d5c8012fd5f5d51b33b08" Oct 01 15:27:38 crc kubenswrapper[4869]: I1001 15:27:38.254068 4869 scope.go:117] "RemoveContainer" containerID="3366dc2053bde56fcb85d42c372595a513324ad87fa81ae9472975646d5111be" Oct 01 15:27:38 crc kubenswrapper[4869]: I1001 15:27:38.283514 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-hb26q"] Oct 01 15:27:38 crc kubenswrapper[4869]: I1001 15:27:38.298407 4869 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-hb26q"] Oct 01 15:27:38 crc kubenswrapper[4869]: I1001 15:27:38.322392 4869 scope.go:117] "RemoveContainer" containerID="5a4ebc0f88c2223272fdc93f7949163bbfd61ce5d585255467d76107efa7bb0f" Oct 01 15:27:38 crc kubenswrapper[4869]: I1001 15:27:38.349385 4869 scope.go:117] "RemoveContainer" containerID="901942f3fb60f5f0f8ed3f8d5d23565603e3008b356d5c8012fd5f5d51b33b08" Oct 01 15:27:38 crc kubenswrapper[4869]: E1001 15:27:38.350191 4869 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"901942f3fb60f5f0f8ed3f8d5d23565603e3008b356d5c8012fd5f5d51b33b08\": container with ID starting with 901942f3fb60f5f0f8ed3f8d5d23565603e3008b356d5c8012fd5f5d51b33b08 not found: ID does not exist" containerID="901942f3fb60f5f0f8ed3f8d5d23565603e3008b356d5c8012fd5f5d51b33b08" Oct 01 15:27:38 crc kubenswrapper[4869]: I1001 15:27:38.350244 4869 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"901942f3fb60f5f0f8ed3f8d5d23565603e3008b356d5c8012fd5f5d51b33b08"} err="failed to get container status \"901942f3fb60f5f0f8ed3f8d5d23565603e3008b356d5c8012fd5f5d51b33b08\": rpc error: code = NotFound desc = could not find container \"901942f3fb60f5f0f8ed3f8d5d23565603e3008b356d5c8012fd5f5d51b33b08\": container with ID starting with 901942f3fb60f5f0f8ed3f8d5d23565603e3008b356d5c8012fd5f5d51b33b08 not found: ID does not exist" Oct 01 15:27:38 crc kubenswrapper[4869]: I1001 15:27:38.350290 4869 scope.go:117] "RemoveContainer" containerID="3366dc2053bde56fcb85d42c372595a513324ad87fa81ae9472975646d5111be" Oct 01 15:27:38 crc kubenswrapper[4869]: E1001 15:27:38.351568 4869 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3366dc2053bde56fcb85d42c372595a513324ad87fa81ae9472975646d5111be\": container with ID starting with 3366dc2053bde56fcb85d42c372595a513324ad87fa81ae9472975646d5111be not found: ID does not exist" containerID="3366dc2053bde56fcb85d42c372595a513324ad87fa81ae9472975646d5111be" Oct 01 15:27:38 crc kubenswrapper[4869]: I1001 15:27:38.351637 4869 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3366dc2053bde56fcb85d42c372595a513324ad87fa81ae9472975646d5111be"} err="failed to get container status \"3366dc2053bde56fcb85d42c372595a513324ad87fa81ae9472975646d5111be\": rpc error: code = NotFound desc = could not find container \"3366dc2053bde56fcb85d42c372595a513324ad87fa81ae9472975646d5111be\": container with ID starting with 3366dc2053bde56fcb85d42c372595a513324ad87fa81ae9472975646d5111be not found: ID does not exist" Oct 01 15:27:38 crc kubenswrapper[4869]: I1001 15:27:38.351693 4869 scope.go:117] "RemoveContainer" containerID="5a4ebc0f88c2223272fdc93f7949163bbfd61ce5d585255467d76107efa7bb0f" Oct 01 15:27:38 crc kubenswrapper[4869]: E1001 15:27:38.352135 4869 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5a4ebc0f88c2223272fdc93f7949163bbfd61ce5d585255467d76107efa7bb0f\": container with ID starting with 5a4ebc0f88c2223272fdc93f7949163bbfd61ce5d585255467d76107efa7bb0f not found: ID does not exist" containerID="5a4ebc0f88c2223272fdc93f7949163bbfd61ce5d585255467d76107efa7bb0f" Oct 01 15:27:38 crc kubenswrapper[4869]: I1001 15:27:38.352182 4869 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5a4ebc0f88c2223272fdc93f7949163bbfd61ce5d585255467d76107efa7bb0f"} err="failed to get container status \"5a4ebc0f88c2223272fdc93f7949163bbfd61ce5d585255467d76107efa7bb0f\": rpc error: code = NotFound desc = could not find container \"5a4ebc0f88c2223272fdc93f7949163bbfd61ce5d585255467d76107efa7bb0f\": container with ID starting with 5a4ebc0f88c2223272fdc93f7949163bbfd61ce5d585255467d76107efa7bb0f not found: ID does not exist" Oct 01 15:27:39 crc kubenswrapper[4869]: I1001 15:27:39.386885 4869 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-jl5mj" Oct 01 15:27:39 crc kubenswrapper[4869]: I1001 15:27:39.386931 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-jl5mj" Oct 01 15:27:39 crc kubenswrapper[4869]: I1001 15:27:39.450596 4869 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-jl5mj" Oct 01 15:27:39 crc kubenswrapper[4869]: I1001 15:27:39.595841 4869 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="90506aa0-758d-473b-bf87-2009e78731ac" path="/var/lib/kubelet/pods/90506aa0-758d-473b-bf87-2009e78731ac/volumes" Oct 01 15:27:40 crc kubenswrapper[4869]: I1001 15:27:40.292212 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-jl5mj" Oct 01 15:27:41 crc kubenswrapper[4869]: I1001 15:27:41.025942 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-jl5mj"] Oct 01 15:27:42 crc kubenswrapper[4869]: I1001 15:27:42.265179 4869 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-jl5mj" podUID="4a255233-e4a3-48ec-b11b-ae56d8815fe7" containerName="registry-server" containerID="cri-o://071357a22860f182dfe549707b9223a8b8a2e03fa46ce91c75e3ddfd13edfb0a" gracePeriod=2 Oct 01 15:27:42 crc kubenswrapper[4869]: I1001 15:27:42.697840 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-jl5mj" Oct 01 15:27:42 crc kubenswrapper[4869]: I1001 15:27:42.871845 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4a255233-e4a3-48ec-b11b-ae56d8815fe7-utilities\") pod \"4a255233-e4a3-48ec-b11b-ae56d8815fe7\" (UID: \"4a255233-e4a3-48ec-b11b-ae56d8815fe7\") " Oct 01 15:27:42 crc kubenswrapper[4869]: I1001 15:27:42.872444 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6h8cg\" (UniqueName: \"kubernetes.io/projected/4a255233-e4a3-48ec-b11b-ae56d8815fe7-kube-api-access-6h8cg\") pod \"4a255233-e4a3-48ec-b11b-ae56d8815fe7\" (UID: \"4a255233-e4a3-48ec-b11b-ae56d8815fe7\") " Oct 01 15:27:42 crc kubenswrapper[4869]: I1001 15:27:42.872500 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4a255233-e4a3-48ec-b11b-ae56d8815fe7-catalog-content\") pod \"4a255233-e4a3-48ec-b11b-ae56d8815fe7\" (UID: \"4a255233-e4a3-48ec-b11b-ae56d8815fe7\") " Oct 01 15:27:42 crc kubenswrapper[4869]: I1001 15:27:42.872907 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4a255233-e4a3-48ec-b11b-ae56d8815fe7-utilities" (OuterVolumeSpecName: "utilities") pod "4a255233-e4a3-48ec-b11b-ae56d8815fe7" (UID: "4a255233-e4a3-48ec-b11b-ae56d8815fe7"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 15:27:42 crc kubenswrapper[4869]: I1001 15:27:42.878332 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4a255233-e4a3-48ec-b11b-ae56d8815fe7-kube-api-access-6h8cg" (OuterVolumeSpecName: "kube-api-access-6h8cg") pod "4a255233-e4a3-48ec-b11b-ae56d8815fe7" (UID: "4a255233-e4a3-48ec-b11b-ae56d8815fe7"). InnerVolumeSpecName "kube-api-access-6h8cg". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 15:27:42 crc kubenswrapper[4869]: I1001 15:27:42.942544 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4a255233-e4a3-48ec-b11b-ae56d8815fe7-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "4a255233-e4a3-48ec-b11b-ae56d8815fe7" (UID: "4a255233-e4a3-48ec-b11b-ae56d8815fe7"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 15:27:42 crc kubenswrapper[4869]: I1001 15:27:42.977520 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6h8cg\" (UniqueName: \"kubernetes.io/projected/4a255233-e4a3-48ec-b11b-ae56d8815fe7-kube-api-access-6h8cg\") on node \"crc\" DevicePath \"\"" Oct 01 15:27:42 crc kubenswrapper[4869]: I1001 15:27:42.977580 4869 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4a255233-e4a3-48ec-b11b-ae56d8815fe7-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 01 15:27:42 crc kubenswrapper[4869]: I1001 15:27:42.977599 4869 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4a255233-e4a3-48ec-b11b-ae56d8815fe7-utilities\") on node \"crc\" DevicePath \"\"" Oct 01 15:27:43 crc kubenswrapper[4869]: I1001 15:27:43.310145 4869 generic.go:334] "Generic (PLEG): container finished" podID="4a255233-e4a3-48ec-b11b-ae56d8815fe7" containerID="071357a22860f182dfe549707b9223a8b8a2e03fa46ce91c75e3ddfd13edfb0a" exitCode=0 Oct 01 15:27:43 crc kubenswrapper[4869]: I1001 15:27:43.310207 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-jl5mj" event={"ID":"4a255233-e4a3-48ec-b11b-ae56d8815fe7","Type":"ContainerDied","Data":"071357a22860f182dfe549707b9223a8b8a2e03fa46ce91c75e3ddfd13edfb0a"} Oct 01 15:27:43 crc kubenswrapper[4869]: I1001 15:27:43.310244 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-jl5mj" event={"ID":"4a255233-e4a3-48ec-b11b-ae56d8815fe7","Type":"ContainerDied","Data":"6f66af390e450155e58e4c7bcb76a6391ec90da17126cbfa0460b1b5ef2360f4"} Oct 01 15:27:43 crc kubenswrapper[4869]: I1001 15:27:43.310289 4869 scope.go:117] "RemoveContainer" containerID="071357a22860f182dfe549707b9223a8b8a2e03fa46ce91c75e3ddfd13edfb0a" Oct 01 15:27:43 crc kubenswrapper[4869]: I1001 15:27:43.310325 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-jl5mj" Oct 01 15:27:43 crc kubenswrapper[4869]: I1001 15:27:43.334295 4869 scope.go:117] "RemoveContainer" containerID="d7a8a6b47349f97f2c2279b5c95cde32afdac2784ed7b9ccfd594b28071ea7ba" Oct 01 15:27:43 crc kubenswrapper[4869]: I1001 15:27:43.359189 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-jl5mj"] Oct 01 15:27:43 crc kubenswrapper[4869]: I1001 15:27:43.370419 4869 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-jl5mj"] Oct 01 15:27:43 crc kubenswrapper[4869]: I1001 15:27:43.375437 4869 scope.go:117] "RemoveContainer" containerID="c86c2427bb649ee20fed3a908618e1c750f0298b5329e6d5829b5fa5198ba18c" Oct 01 15:27:43 crc kubenswrapper[4869]: I1001 15:27:43.404593 4869 scope.go:117] "RemoveContainer" containerID="071357a22860f182dfe549707b9223a8b8a2e03fa46ce91c75e3ddfd13edfb0a" Oct 01 15:27:43 crc kubenswrapper[4869]: E1001 15:27:43.405100 4869 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"071357a22860f182dfe549707b9223a8b8a2e03fa46ce91c75e3ddfd13edfb0a\": container with ID starting with 071357a22860f182dfe549707b9223a8b8a2e03fa46ce91c75e3ddfd13edfb0a not found: ID does not exist" containerID="071357a22860f182dfe549707b9223a8b8a2e03fa46ce91c75e3ddfd13edfb0a" Oct 01 15:27:43 crc kubenswrapper[4869]: I1001 15:27:43.405140 4869 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"071357a22860f182dfe549707b9223a8b8a2e03fa46ce91c75e3ddfd13edfb0a"} err="failed to get container status \"071357a22860f182dfe549707b9223a8b8a2e03fa46ce91c75e3ddfd13edfb0a\": rpc error: code = NotFound desc = could not find container \"071357a22860f182dfe549707b9223a8b8a2e03fa46ce91c75e3ddfd13edfb0a\": container with ID starting with 071357a22860f182dfe549707b9223a8b8a2e03fa46ce91c75e3ddfd13edfb0a not found: ID does not exist" Oct 01 15:27:43 crc kubenswrapper[4869]: I1001 15:27:43.405167 4869 scope.go:117] "RemoveContainer" containerID="d7a8a6b47349f97f2c2279b5c95cde32afdac2784ed7b9ccfd594b28071ea7ba" Oct 01 15:27:43 crc kubenswrapper[4869]: E1001 15:27:43.405616 4869 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d7a8a6b47349f97f2c2279b5c95cde32afdac2784ed7b9ccfd594b28071ea7ba\": container with ID starting with d7a8a6b47349f97f2c2279b5c95cde32afdac2784ed7b9ccfd594b28071ea7ba not found: ID does not exist" containerID="d7a8a6b47349f97f2c2279b5c95cde32afdac2784ed7b9ccfd594b28071ea7ba" Oct 01 15:27:43 crc kubenswrapper[4869]: I1001 15:27:43.405667 4869 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d7a8a6b47349f97f2c2279b5c95cde32afdac2784ed7b9ccfd594b28071ea7ba"} err="failed to get container status \"d7a8a6b47349f97f2c2279b5c95cde32afdac2784ed7b9ccfd594b28071ea7ba\": rpc error: code = NotFound desc = could not find container \"d7a8a6b47349f97f2c2279b5c95cde32afdac2784ed7b9ccfd594b28071ea7ba\": container with ID starting with d7a8a6b47349f97f2c2279b5c95cde32afdac2784ed7b9ccfd594b28071ea7ba not found: ID does not exist" Oct 01 15:27:43 crc kubenswrapper[4869]: I1001 15:27:43.405708 4869 scope.go:117] "RemoveContainer" containerID="c86c2427bb649ee20fed3a908618e1c750f0298b5329e6d5829b5fa5198ba18c" Oct 01 15:27:43 crc kubenswrapper[4869]: E1001 15:27:43.406120 4869 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c86c2427bb649ee20fed3a908618e1c750f0298b5329e6d5829b5fa5198ba18c\": container with ID starting with c86c2427bb649ee20fed3a908618e1c750f0298b5329e6d5829b5fa5198ba18c not found: ID does not exist" containerID="c86c2427bb649ee20fed3a908618e1c750f0298b5329e6d5829b5fa5198ba18c" Oct 01 15:27:43 crc kubenswrapper[4869]: I1001 15:27:43.406147 4869 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c86c2427bb649ee20fed3a908618e1c750f0298b5329e6d5829b5fa5198ba18c"} err="failed to get container status \"c86c2427bb649ee20fed3a908618e1c750f0298b5329e6d5829b5fa5198ba18c\": rpc error: code = NotFound desc = could not find container \"c86c2427bb649ee20fed3a908618e1c750f0298b5329e6d5829b5fa5198ba18c\": container with ID starting with c86c2427bb649ee20fed3a908618e1c750f0298b5329e6d5829b5fa5198ba18c not found: ID does not exist" Oct 01 15:27:43 crc kubenswrapper[4869]: I1001 15:27:43.591280 4869 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4a255233-e4a3-48ec-b11b-ae56d8815fe7" path="/var/lib/kubelet/pods/4a255233-e4a3-48ec-b11b-ae56d8815fe7/volumes" Oct 01 15:28:13 crc kubenswrapper[4869]: I1001 15:28:13.354464 4869 patch_prober.go:28] interesting pod/machine-config-daemon-c86m8 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 01 15:28:13 crc kubenswrapper[4869]: I1001 15:28:13.355057 4869 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-c86m8" podUID="a4b64f7f-0b03-4f47-965b-9fde048b735c" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 01 15:28:43 crc kubenswrapper[4869]: I1001 15:28:43.354232 4869 patch_prober.go:28] interesting pod/machine-config-daemon-c86m8 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 01 15:28:43 crc kubenswrapper[4869]: I1001 15:28:43.354724 4869 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-c86m8" podUID="a4b64f7f-0b03-4f47-965b-9fde048b735c" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 01 15:28:53 crc kubenswrapper[4869]: I1001 15:28:53.073651 4869 generic.go:334] "Generic (PLEG): container finished" podID="23e89dd5-daa6-4174-90e3-a9a3a84dde66" containerID="7cf48ea29ff8f9ec58969e4312cb5b3e8ce8120ddd5c2843d36031b5c1942727" exitCode=0 Oct 01 15:28:53 crc kubenswrapper[4869]: I1001 15:28:53.073745 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-ph2hh" event={"ID":"23e89dd5-daa6-4174-90e3-a9a3a84dde66","Type":"ContainerDied","Data":"7cf48ea29ff8f9ec58969e4312cb5b3e8ce8120ddd5c2843d36031b5c1942727"} Oct 01 15:28:54 crc kubenswrapper[4869]: I1001 15:28:54.524332 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-ph2hh" Oct 01 15:28:54 crc kubenswrapper[4869]: I1001 15:28:54.673119 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vl258\" (UniqueName: \"kubernetes.io/projected/23e89dd5-daa6-4174-90e3-a9a3a84dde66-kube-api-access-vl258\") pod \"23e89dd5-daa6-4174-90e3-a9a3a84dde66\" (UID: \"23e89dd5-daa6-4174-90e3-a9a3a84dde66\") " Oct 01 15:28:54 crc kubenswrapper[4869]: I1001 15:28:54.673342 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/23e89dd5-daa6-4174-90e3-a9a3a84dde66-ssh-key\") pod \"23e89dd5-daa6-4174-90e3-a9a3a84dde66\" (UID: \"23e89dd5-daa6-4174-90e3-a9a3a84dde66\") " Oct 01 15:28:54 crc kubenswrapper[4869]: I1001 15:28:54.673717 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/23e89dd5-daa6-4174-90e3-a9a3a84dde66-inventory\") pod \"23e89dd5-daa6-4174-90e3-a9a3a84dde66\" (UID: \"23e89dd5-daa6-4174-90e3-a9a3a84dde66\") " Oct 01 15:28:54 crc kubenswrapper[4869]: I1001 15:28:54.680708 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/23e89dd5-daa6-4174-90e3-a9a3a84dde66-kube-api-access-vl258" (OuterVolumeSpecName: "kube-api-access-vl258") pod "23e89dd5-daa6-4174-90e3-a9a3a84dde66" (UID: "23e89dd5-daa6-4174-90e3-a9a3a84dde66"). InnerVolumeSpecName "kube-api-access-vl258". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 15:28:54 crc kubenswrapper[4869]: I1001 15:28:54.715026 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/23e89dd5-daa6-4174-90e3-a9a3a84dde66-inventory" (OuterVolumeSpecName: "inventory") pod "23e89dd5-daa6-4174-90e3-a9a3a84dde66" (UID: "23e89dd5-daa6-4174-90e3-a9a3a84dde66"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 15:28:54 crc kubenswrapper[4869]: I1001 15:28:54.729368 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/23e89dd5-daa6-4174-90e3-a9a3a84dde66-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "23e89dd5-daa6-4174-90e3-a9a3a84dde66" (UID: "23e89dd5-daa6-4174-90e3-a9a3a84dde66"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 15:28:54 crc kubenswrapper[4869]: I1001 15:28:54.775886 4869 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/23e89dd5-daa6-4174-90e3-a9a3a84dde66-ssh-key\") on node \"crc\" DevicePath \"\"" Oct 01 15:28:54 crc kubenswrapper[4869]: I1001 15:28:54.775931 4869 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/23e89dd5-daa6-4174-90e3-a9a3a84dde66-inventory\") on node \"crc\" DevicePath \"\"" Oct 01 15:28:54 crc kubenswrapper[4869]: I1001 15:28:54.775945 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vl258\" (UniqueName: \"kubernetes.io/projected/23e89dd5-daa6-4174-90e3-a9a3a84dde66-kube-api-access-vl258\") on node \"crc\" DevicePath \"\"" Oct 01 15:28:55 crc kubenswrapper[4869]: I1001 15:28:55.097846 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-ph2hh" event={"ID":"23e89dd5-daa6-4174-90e3-a9a3a84dde66","Type":"ContainerDied","Data":"df10c4a089b9e27111fdb98decc02a3a60b74d863ec43a542430a3a03ac93fb2"} Oct 01 15:28:55 crc kubenswrapper[4869]: I1001 15:28:55.098209 4869 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="df10c4a089b9e27111fdb98decc02a3a60b74d863ec43a542430a3a03ac93fb2" Oct 01 15:28:55 crc kubenswrapper[4869]: I1001 15:28:55.097999 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-ph2hh" Oct 01 15:28:55 crc kubenswrapper[4869]: I1001 15:28:55.199650 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/validate-network-edpm-deployment-openstack-edpm-ipam-68jb4"] Oct 01 15:28:55 crc kubenswrapper[4869]: E1001 15:28:55.200133 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="90506aa0-758d-473b-bf87-2009e78731ac" containerName="extract-utilities" Oct 01 15:28:55 crc kubenswrapper[4869]: I1001 15:28:55.200160 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="90506aa0-758d-473b-bf87-2009e78731ac" containerName="extract-utilities" Oct 01 15:28:55 crc kubenswrapper[4869]: E1001 15:28:55.200184 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="90506aa0-758d-473b-bf87-2009e78731ac" containerName="extract-content" Oct 01 15:28:55 crc kubenswrapper[4869]: I1001 15:28:55.200192 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="90506aa0-758d-473b-bf87-2009e78731ac" containerName="extract-content" Oct 01 15:28:55 crc kubenswrapper[4869]: E1001 15:28:55.200210 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="90506aa0-758d-473b-bf87-2009e78731ac" containerName="registry-server" Oct 01 15:28:55 crc kubenswrapper[4869]: I1001 15:28:55.200220 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="90506aa0-758d-473b-bf87-2009e78731ac" containerName="registry-server" Oct 01 15:28:55 crc kubenswrapper[4869]: E1001 15:28:55.200243 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="23e89dd5-daa6-4174-90e3-a9a3a84dde66" containerName="configure-network-edpm-deployment-openstack-edpm-ipam" Oct 01 15:28:55 crc kubenswrapper[4869]: I1001 15:28:55.200253 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="23e89dd5-daa6-4174-90e3-a9a3a84dde66" containerName="configure-network-edpm-deployment-openstack-edpm-ipam" Oct 01 15:28:55 crc kubenswrapper[4869]: E1001 15:28:55.200284 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4a255233-e4a3-48ec-b11b-ae56d8815fe7" containerName="registry-server" Oct 01 15:28:55 crc kubenswrapper[4869]: I1001 15:28:55.200294 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="4a255233-e4a3-48ec-b11b-ae56d8815fe7" containerName="registry-server" Oct 01 15:28:55 crc kubenswrapper[4869]: E1001 15:28:55.200310 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4a255233-e4a3-48ec-b11b-ae56d8815fe7" containerName="extract-utilities" Oct 01 15:28:55 crc kubenswrapper[4869]: I1001 15:28:55.200317 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="4a255233-e4a3-48ec-b11b-ae56d8815fe7" containerName="extract-utilities" Oct 01 15:28:55 crc kubenswrapper[4869]: E1001 15:28:55.200339 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4a255233-e4a3-48ec-b11b-ae56d8815fe7" containerName="extract-content" Oct 01 15:28:55 crc kubenswrapper[4869]: I1001 15:28:55.200346 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="4a255233-e4a3-48ec-b11b-ae56d8815fe7" containerName="extract-content" Oct 01 15:28:55 crc kubenswrapper[4869]: I1001 15:28:55.200546 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="90506aa0-758d-473b-bf87-2009e78731ac" containerName="registry-server" Oct 01 15:28:55 crc kubenswrapper[4869]: I1001 15:28:55.200572 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="4a255233-e4a3-48ec-b11b-ae56d8815fe7" containerName="registry-server" Oct 01 15:28:55 crc kubenswrapper[4869]: I1001 15:28:55.200591 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="23e89dd5-daa6-4174-90e3-a9a3a84dde66" containerName="configure-network-edpm-deployment-openstack-edpm-ipam" Oct 01 15:28:55 crc kubenswrapper[4869]: I1001 15:28:55.201434 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-68jb4" Oct 01 15:28:55 crc kubenswrapper[4869]: I1001 15:28:55.203074 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-cjg8g" Oct 01 15:28:55 crc kubenswrapper[4869]: I1001 15:28:55.204033 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Oct 01 15:28:55 crc kubenswrapper[4869]: I1001 15:28:55.204111 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Oct 01 15:28:55 crc kubenswrapper[4869]: I1001 15:28:55.204801 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Oct 01 15:28:55 crc kubenswrapper[4869]: I1001 15:28:55.218975 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/validate-network-edpm-deployment-openstack-edpm-ipam-68jb4"] Oct 01 15:28:55 crc kubenswrapper[4869]: I1001 15:28:55.386348 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/4741e7d5-5ceb-4699-9d3c-f5798a08af91-ssh-key\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-68jb4\" (UID: \"4741e7d5-5ceb-4699-9d3c-f5798a08af91\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-68jb4" Oct 01 15:28:55 crc kubenswrapper[4869]: I1001 15:28:55.386963 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/4741e7d5-5ceb-4699-9d3c-f5798a08af91-inventory\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-68jb4\" (UID: \"4741e7d5-5ceb-4699-9d3c-f5798a08af91\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-68jb4" Oct 01 15:28:55 crc kubenswrapper[4869]: I1001 15:28:55.387213 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mlbk6\" (UniqueName: \"kubernetes.io/projected/4741e7d5-5ceb-4699-9d3c-f5798a08af91-kube-api-access-mlbk6\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-68jb4\" (UID: \"4741e7d5-5ceb-4699-9d3c-f5798a08af91\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-68jb4" Oct 01 15:28:55 crc kubenswrapper[4869]: I1001 15:28:55.489143 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/4741e7d5-5ceb-4699-9d3c-f5798a08af91-inventory\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-68jb4\" (UID: \"4741e7d5-5ceb-4699-9d3c-f5798a08af91\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-68jb4" Oct 01 15:28:55 crc kubenswrapper[4869]: I1001 15:28:55.489212 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mlbk6\" (UniqueName: \"kubernetes.io/projected/4741e7d5-5ceb-4699-9d3c-f5798a08af91-kube-api-access-mlbk6\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-68jb4\" (UID: \"4741e7d5-5ceb-4699-9d3c-f5798a08af91\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-68jb4" Oct 01 15:28:55 crc kubenswrapper[4869]: I1001 15:28:55.489336 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/4741e7d5-5ceb-4699-9d3c-f5798a08af91-ssh-key\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-68jb4\" (UID: \"4741e7d5-5ceb-4699-9d3c-f5798a08af91\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-68jb4" Oct 01 15:28:55 crc kubenswrapper[4869]: I1001 15:28:55.493615 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/4741e7d5-5ceb-4699-9d3c-f5798a08af91-inventory\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-68jb4\" (UID: \"4741e7d5-5ceb-4699-9d3c-f5798a08af91\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-68jb4" Oct 01 15:28:55 crc kubenswrapper[4869]: I1001 15:28:55.494303 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/4741e7d5-5ceb-4699-9d3c-f5798a08af91-ssh-key\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-68jb4\" (UID: \"4741e7d5-5ceb-4699-9d3c-f5798a08af91\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-68jb4" Oct 01 15:28:55 crc kubenswrapper[4869]: I1001 15:28:55.504575 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mlbk6\" (UniqueName: \"kubernetes.io/projected/4741e7d5-5ceb-4699-9d3c-f5798a08af91-kube-api-access-mlbk6\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-68jb4\" (UID: \"4741e7d5-5ceb-4699-9d3c-f5798a08af91\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-68jb4" Oct 01 15:28:55 crc kubenswrapper[4869]: I1001 15:28:55.518023 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-68jb4" Oct 01 15:28:56 crc kubenswrapper[4869]: I1001 15:28:56.084656 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/validate-network-edpm-deployment-openstack-edpm-ipam-68jb4"] Oct 01 15:28:56 crc kubenswrapper[4869]: W1001 15:28:56.086036 4869 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod4741e7d5_5ceb_4699_9d3c_f5798a08af91.slice/crio-fd8f0047ec1fa2dc9a2f6a7f66237563122e261805d6ae89c175475b0d4a58a8 WatchSource:0}: Error finding container fd8f0047ec1fa2dc9a2f6a7f66237563122e261805d6ae89c175475b0d4a58a8: Status 404 returned error can't find the container with id fd8f0047ec1fa2dc9a2f6a7f66237563122e261805d6ae89c175475b0d4a58a8 Oct 01 15:28:56 crc kubenswrapper[4869]: I1001 15:28:56.111787 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-68jb4" event={"ID":"4741e7d5-5ceb-4699-9d3c-f5798a08af91","Type":"ContainerStarted","Data":"fd8f0047ec1fa2dc9a2f6a7f66237563122e261805d6ae89c175475b0d4a58a8"} Oct 01 15:28:57 crc kubenswrapper[4869]: I1001 15:28:57.122637 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-68jb4" event={"ID":"4741e7d5-5ceb-4699-9d3c-f5798a08af91","Type":"ContainerStarted","Data":"a4b63cb3162da03b91af5d7cae161e2c3e36b6c7ff7c72dd1638a5e775113df3"} Oct 01 15:28:58 crc kubenswrapper[4869]: I1001 15:28:58.167240 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-68jb4" podStartSLOduration=2.496270992 podStartE2EDuration="3.167209879s" podCreationTimestamp="2025-10-01 15:28:55 +0000 UTC" firstStartedPulling="2025-10-01 15:28:56.089486128 +0000 UTC m=+1445.236329264" lastFinishedPulling="2025-10-01 15:28:56.760425035 +0000 UTC m=+1445.907268151" observedRunningTime="2025-10-01 15:28:58.157379192 +0000 UTC m=+1447.304222388" watchObservedRunningTime="2025-10-01 15:28:58.167209879 +0000 UTC m=+1447.314053035" Oct 01 15:29:01 crc kubenswrapper[4869]: I1001 15:29:01.051668 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-db-create-q7p4t"] Oct 01 15:29:01 crc kubenswrapper[4869]: I1001 15:29:01.062793 4869 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-db-create-q7p4t"] Oct 01 15:29:01 crc kubenswrapper[4869]: I1001 15:29:01.602957 4869 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="01bcad38-d3c2-42c6-8895-366eb4e48bb0" path="/var/lib/kubelet/pods/01bcad38-d3c2-42c6-8895-366eb4e48bb0/volumes" Oct 01 15:29:02 crc kubenswrapper[4869]: I1001 15:29:02.172790 4869 generic.go:334] "Generic (PLEG): container finished" podID="4741e7d5-5ceb-4699-9d3c-f5798a08af91" containerID="a4b63cb3162da03b91af5d7cae161e2c3e36b6c7ff7c72dd1638a5e775113df3" exitCode=0 Oct 01 15:29:02 crc kubenswrapper[4869]: I1001 15:29:02.172920 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-68jb4" event={"ID":"4741e7d5-5ceb-4699-9d3c-f5798a08af91","Type":"ContainerDied","Data":"a4b63cb3162da03b91af5d7cae161e2c3e36b6c7ff7c72dd1638a5e775113df3"} Oct 01 15:29:03 crc kubenswrapper[4869]: I1001 15:29:03.567383 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-68jb4" Oct 01 15:29:03 crc kubenswrapper[4869]: I1001 15:29:03.648820 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mlbk6\" (UniqueName: \"kubernetes.io/projected/4741e7d5-5ceb-4699-9d3c-f5798a08af91-kube-api-access-mlbk6\") pod \"4741e7d5-5ceb-4699-9d3c-f5798a08af91\" (UID: \"4741e7d5-5ceb-4699-9d3c-f5798a08af91\") " Oct 01 15:29:03 crc kubenswrapper[4869]: I1001 15:29:03.648947 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/4741e7d5-5ceb-4699-9d3c-f5798a08af91-ssh-key\") pod \"4741e7d5-5ceb-4699-9d3c-f5798a08af91\" (UID: \"4741e7d5-5ceb-4699-9d3c-f5798a08af91\") " Oct 01 15:29:03 crc kubenswrapper[4869]: I1001 15:29:03.649131 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/4741e7d5-5ceb-4699-9d3c-f5798a08af91-inventory\") pod \"4741e7d5-5ceb-4699-9d3c-f5798a08af91\" (UID: \"4741e7d5-5ceb-4699-9d3c-f5798a08af91\") " Oct 01 15:29:03 crc kubenswrapper[4869]: I1001 15:29:03.655417 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4741e7d5-5ceb-4699-9d3c-f5798a08af91-kube-api-access-mlbk6" (OuterVolumeSpecName: "kube-api-access-mlbk6") pod "4741e7d5-5ceb-4699-9d3c-f5798a08af91" (UID: "4741e7d5-5ceb-4699-9d3c-f5798a08af91"). InnerVolumeSpecName "kube-api-access-mlbk6". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 15:29:03 crc kubenswrapper[4869]: I1001 15:29:03.676468 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4741e7d5-5ceb-4699-9d3c-f5798a08af91-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "4741e7d5-5ceb-4699-9d3c-f5798a08af91" (UID: "4741e7d5-5ceb-4699-9d3c-f5798a08af91"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 15:29:03 crc kubenswrapper[4869]: I1001 15:29:03.683077 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4741e7d5-5ceb-4699-9d3c-f5798a08af91-inventory" (OuterVolumeSpecName: "inventory") pod "4741e7d5-5ceb-4699-9d3c-f5798a08af91" (UID: "4741e7d5-5ceb-4699-9d3c-f5798a08af91"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 15:29:03 crc kubenswrapper[4869]: I1001 15:29:03.751766 4869 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/4741e7d5-5ceb-4699-9d3c-f5798a08af91-inventory\") on node \"crc\" DevicePath \"\"" Oct 01 15:29:03 crc kubenswrapper[4869]: I1001 15:29:03.751805 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mlbk6\" (UniqueName: \"kubernetes.io/projected/4741e7d5-5ceb-4699-9d3c-f5798a08af91-kube-api-access-mlbk6\") on node \"crc\" DevicePath \"\"" Oct 01 15:29:03 crc kubenswrapper[4869]: I1001 15:29:03.751816 4869 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/4741e7d5-5ceb-4699-9d3c-f5798a08af91-ssh-key\") on node \"crc\" DevicePath \"\"" Oct 01 15:29:04 crc kubenswrapper[4869]: I1001 15:29:04.197749 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-68jb4" event={"ID":"4741e7d5-5ceb-4699-9d3c-f5798a08af91","Type":"ContainerDied","Data":"fd8f0047ec1fa2dc9a2f6a7f66237563122e261805d6ae89c175475b0d4a58a8"} Oct 01 15:29:04 crc kubenswrapper[4869]: I1001 15:29:04.197821 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-68jb4" Oct 01 15:29:04 crc kubenswrapper[4869]: I1001 15:29:04.197827 4869 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="fd8f0047ec1fa2dc9a2f6a7f66237563122e261805d6ae89c175475b0d4a58a8" Oct 01 15:29:04 crc kubenswrapper[4869]: I1001 15:29:04.296536 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/install-os-edpm-deployment-openstack-edpm-ipam-fwpqm"] Oct 01 15:29:04 crc kubenswrapper[4869]: E1001 15:29:04.297508 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4741e7d5-5ceb-4699-9d3c-f5798a08af91" containerName="validate-network-edpm-deployment-openstack-edpm-ipam" Oct 01 15:29:04 crc kubenswrapper[4869]: I1001 15:29:04.297538 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="4741e7d5-5ceb-4699-9d3c-f5798a08af91" containerName="validate-network-edpm-deployment-openstack-edpm-ipam" Oct 01 15:29:04 crc kubenswrapper[4869]: I1001 15:29:04.297825 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="4741e7d5-5ceb-4699-9d3c-f5798a08af91" containerName="validate-network-edpm-deployment-openstack-edpm-ipam" Oct 01 15:29:04 crc kubenswrapper[4869]: I1001 15:29:04.298825 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-fwpqm" Oct 01 15:29:04 crc kubenswrapper[4869]: I1001 15:29:04.303128 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-cjg8g" Oct 01 15:29:04 crc kubenswrapper[4869]: I1001 15:29:04.303503 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Oct 01 15:29:04 crc kubenswrapper[4869]: I1001 15:29:04.303705 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Oct 01 15:29:04 crc kubenswrapper[4869]: I1001 15:29:04.304284 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Oct 01 15:29:04 crc kubenswrapper[4869]: I1001 15:29:04.310315 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/install-os-edpm-deployment-openstack-edpm-ipam-fwpqm"] Oct 01 15:29:04 crc kubenswrapper[4869]: I1001 15:29:04.365754 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/6dc7fd52-a0b5-47e1-9fa7-348ca07979c9-inventory\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-fwpqm\" (UID: \"6dc7fd52-a0b5-47e1-9fa7-348ca07979c9\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-fwpqm" Oct 01 15:29:04 crc kubenswrapper[4869]: I1001 15:29:04.365829 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/6dc7fd52-a0b5-47e1-9fa7-348ca07979c9-ssh-key\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-fwpqm\" (UID: \"6dc7fd52-a0b5-47e1-9fa7-348ca07979c9\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-fwpqm" Oct 01 15:29:04 crc kubenswrapper[4869]: I1001 15:29:04.365875 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8smz7\" (UniqueName: \"kubernetes.io/projected/6dc7fd52-a0b5-47e1-9fa7-348ca07979c9-kube-api-access-8smz7\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-fwpqm\" (UID: \"6dc7fd52-a0b5-47e1-9fa7-348ca07979c9\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-fwpqm" Oct 01 15:29:04 crc kubenswrapper[4869]: I1001 15:29:04.467735 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/6dc7fd52-a0b5-47e1-9fa7-348ca07979c9-inventory\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-fwpqm\" (UID: \"6dc7fd52-a0b5-47e1-9fa7-348ca07979c9\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-fwpqm" Oct 01 15:29:04 crc kubenswrapper[4869]: I1001 15:29:04.467788 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/6dc7fd52-a0b5-47e1-9fa7-348ca07979c9-ssh-key\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-fwpqm\" (UID: \"6dc7fd52-a0b5-47e1-9fa7-348ca07979c9\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-fwpqm" Oct 01 15:29:04 crc kubenswrapper[4869]: I1001 15:29:04.467816 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8smz7\" (UniqueName: \"kubernetes.io/projected/6dc7fd52-a0b5-47e1-9fa7-348ca07979c9-kube-api-access-8smz7\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-fwpqm\" (UID: \"6dc7fd52-a0b5-47e1-9fa7-348ca07979c9\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-fwpqm" Oct 01 15:29:04 crc kubenswrapper[4869]: I1001 15:29:04.474772 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/6dc7fd52-a0b5-47e1-9fa7-348ca07979c9-ssh-key\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-fwpqm\" (UID: \"6dc7fd52-a0b5-47e1-9fa7-348ca07979c9\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-fwpqm" Oct 01 15:29:04 crc kubenswrapper[4869]: I1001 15:29:04.478200 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/6dc7fd52-a0b5-47e1-9fa7-348ca07979c9-inventory\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-fwpqm\" (UID: \"6dc7fd52-a0b5-47e1-9fa7-348ca07979c9\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-fwpqm" Oct 01 15:29:04 crc kubenswrapper[4869]: I1001 15:29:04.491020 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8smz7\" (UniqueName: \"kubernetes.io/projected/6dc7fd52-a0b5-47e1-9fa7-348ca07979c9-kube-api-access-8smz7\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-fwpqm\" (UID: \"6dc7fd52-a0b5-47e1-9fa7-348ca07979c9\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-fwpqm" Oct 01 15:29:04 crc kubenswrapper[4869]: I1001 15:29:04.629780 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-fwpqm" Oct 01 15:29:05 crc kubenswrapper[4869]: I1001 15:29:05.027749 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-db-create-mx78s"] Oct 01 15:29:05 crc kubenswrapper[4869]: I1001 15:29:05.036021 4869 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-db-create-mx78s"] Oct 01 15:29:05 crc kubenswrapper[4869]: I1001 15:29:05.181283 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/install-os-edpm-deployment-openstack-edpm-ipam-fwpqm"] Oct 01 15:29:05 crc kubenswrapper[4869]: I1001 15:29:05.185655 4869 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Oct 01 15:29:05 crc kubenswrapper[4869]: I1001 15:29:05.208374 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-fwpqm" event={"ID":"6dc7fd52-a0b5-47e1-9fa7-348ca07979c9","Type":"ContainerStarted","Data":"ad888510269bc11d927d1ef4d62dc7a1cf79976b517b7671fc4441cc12951f90"} Oct 01 15:29:05 crc kubenswrapper[4869]: I1001 15:29:05.602704 4869 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="746c5a71-9fcd-42a6-88f6-167edfe66fac" path="/var/lib/kubelet/pods/746c5a71-9fcd-42a6-88f6-167edfe66fac/volumes" Oct 01 15:29:06 crc kubenswrapper[4869]: I1001 15:29:06.035685 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/placement-db-create-769h5"] Oct 01 15:29:06 crc kubenswrapper[4869]: I1001 15:29:06.049228 4869 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/placement-db-create-769h5"] Oct 01 15:29:06 crc kubenswrapper[4869]: I1001 15:29:06.219084 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-fwpqm" event={"ID":"6dc7fd52-a0b5-47e1-9fa7-348ca07979c9","Type":"ContainerStarted","Data":"01553e2b01891ddc71d4b8a1179cef6c85eeb8220e018db463ba9b08d73df080"} Oct 01 15:29:06 crc kubenswrapper[4869]: I1001 15:29:06.238582 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-fwpqm" podStartSLOduration=1.809339707 podStartE2EDuration="2.238562351s" podCreationTimestamp="2025-10-01 15:29:04 +0000 UTC" firstStartedPulling="2025-10-01 15:29:05.185246658 +0000 UTC m=+1454.332089774" lastFinishedPulling="2025-10-01 15:29:05.614469292 +0000 UTC m=+1454.761312418" observedRunningTime="2025-10-01 15:29:06.237689449 +0000 UTC m=+1455.384532605" watchObservedRunningTime="2025-10-01 15:29:06.238562351 +0000 UTC m=+1455.385405467" Oct 01 15:29:07 crc kubenswrapper[4869]: I1001 15:29:07.591609 4869 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6d3a2079-2713-4de2-b3c9-3e3e49d581e0" path="/var/lib/kubelet/pods/6d3a2079-2713-4de2-b3c9-3e3e49d581e0/volumes" Oct 01 15:29:11 crc kubenswrapper[4869]: I1001 15:29:11.043548 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-f8bf-account-create-lvqd8"] Oct 01 15:29:11 crc kubenswrapper[4869]: I1001 15:29:11.050652 4869 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-f8bf-account-create-lvqd8"] Oct 01 15:29:11 crc kubenswrapper[4869]: I1001 15:29:11.598737 4869 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4287832f-bf35-45ec-a6fa-56e07bfca8f7" path="/var/lib/kubelet/pods/4287832f-bf35-45ec-a6fa-56e07bfca8f7/volumes" Oct 01 15:29:12 crc kubenswrapper[4869]: I1001 15:29:12.141740 4869 scope.go:117] "RemoveContainer" containerID="6a9da1c45c82ec75a07d9cac750447de2ee6d6edb616ac97f47672935c430d42" Oct 01 15:29:12 crc kubenswrapper[4869]: I1001 15:29:12.191691 4869 scope.go:117] "RemoveContainer" containerID="f472c64f2e196a6b51cdbf2b4f8b68c5419d7c0d5bc40bf0acff0ce4a2910019" Oct 01 15:29:12 crc kubenswrapper[4869]: I1001 15:29:12.222309 4869 scope.go:117] "RemoveContainer" containerID="3c4caf8ee2cb4970363eb8a019c329bf7d020fc01e7cdd17bb25f60f8c5d9f3a" Oct 01 15:29:12 crc kubenswrapper[4869]: I1001 15:29:12.262782 4869 scope.go:117] "RemoveContainer" containerID="94257e09992c96c437ff35162ebf79331cede5e490be5d690b8c269edc20fb68" Oct 01 15:29:13 crc kubenswrapper[4869]: I1001 15:29:13.354623 4869 patch_prober.go:28] interesting pod/machine-config-daemon-c86m8 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 01 15:29:13 crc kubenswrapper[4869]: I1001 15:29:13.354719 4869 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-c86m8" podUID="a4b64f7f-0b03-4f47-965b-9fde048b735c" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 01 15:29:13 crc kubenswrapper[4869]: I1001 15:29:13.354777 4869 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-c86m8" Oct 01 15:29:13 crc kubenswrapper[4869]: I1001 15:29:13.355667 4869 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"1d80d75dc72da971c79fa8b67243b56fe5690fce0a3f0d0147f32ac22b29db29"} pod="openshift-machine-config-operator/machine-config-daemon-c86m8" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Oct 01 15:29:13 crc kubenswrapper[4869]: I1001 15:29:13.356205 4869 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-c86m8" podUID="a4b64f7f-0b03-4f47-965b-9fde048b735c" containerName="machine-config-daemon" containerID="cri-o://1d80d75dc72da971c79fa8b67243b56fe5690fce0a3f0d0147f32ac22b29db29" gracePeriod=600 Oct 01 15:29:13 crc kubenswrapper[4869]: E1001 15:29:13.486344 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-c86m8_openshift-machine-config-operator(a4b64f7f-0b03-4f47-965b-9fde048b735c)\"" pod="openshift-machine-config-operator/machine-config-daemon-c86m8" podUID="a4b64f7f-0b03-4f47-965b-9fde048b735c" Oct 01 15:29:14 crc kubenswrapper[4869]: I1001 15:29:14.316655 4869 generic.go:334] "Generic (PLEG): container finished" podID="a4b64f7f-0b03-4f47-965b-9fde048b735c" containerID="1d80d75dc72da971c79fa8b67243b56fe5690fce0a3f0d0147f32ac22b29db29" exitCode=0 Oct 01 15:29:14 crc kubenswrapper[4869]: I1001 15:29:14.316704 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-c86m8" event={"ID":"a4b64f7f-0b03-4f47-965b-9fde048b735c","Type":"ContainerDied","Data":"1d80d75dc72da971c79fa8b67243b56fe5690fce0a3f0d0147f32ac22b29db29"} Oct 01 15:29:14 crc kubenswrapper[4869]: I1001 15:29:14.316740 4869 scope.go:117] "RemoveContainer" containerID="fbafe3c9358f68065a4bc8a44e7f7c6c280ff5086e57618c387c454ac514cc06" Oct 01 15:29:14 crc kubenswrapper[4869]: I1001 15:29:14.318087 4869 scope.go:117] "RemoveContainer" containerID="1d80d75dc72da971c79fa8b67243b56fe5690fce0a3f0d0147f32ac22b29db29" Oct 01 15:29:14 crc kubenswrapper[4869]: E1001 15:29:14.318522 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-c86m8_openshift-machine-config-operator(a4b64f7f-0b03-4f47-965b-9fde048b735c)\"" pod="openshift-machine-config-operator/machine-config-daemon-c86m8" podUID="a4b64f7f-0b03-4f47-965b-9fde048b735c" Oct 01 15:29:15 crc kubenswrapper[4869]: I1001 15:29:15.029607 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-9562-account-create-rx6kp"] Oct 01 15:29:15 crc kubenswrapper[4869]: I1001 15:29:15.041650 4869 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-9562-account-create-rx6kp"] Oct 01 15:29:15 crc kubenswrapper[4869]: I1001 15:29:15.594576 4869 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a2d8b42f-6c11-4f05-baa2-5252184c1e92" path="/var/lib/kubelet/pods/a2d8b42f-6c11-4f05-baa2-5252184c1e92/volumes" Oct 01 15:29:16 crc kubenswrapper[4869]: I1001 15:29:16.030081 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/placement-b636-account-create-sbzps"] Oct 01 15:29:16 crc kubenswrapper[4869]: I1001 15:29:16.038746 4869 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/placement-b636-account-create-sbzps"] Oct 01 15:29:17 crc kubenswrapper[4869]: I1001 15:29:17.600131 4869 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f7670535-ca76-4d3e-8493-ca7341d7de3f" path="/var/lib/kubelet/pods/f7670535-ca76-4d3e-8493-ca7341d7de3f/volumes" Oct 01 15:29:28 crc kubenswrapper[4869]: I1001 15:29:28.037913 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-db-create-v7rjt"] Oct 01 15:29:28 crc kubenswrapper[4869]: I1001 15:29:28.045126 4869 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-db-create-v7rjt"] Oct 01 15:29:28 crc kubenswrapper[4869]: I1001 15:29:28.581748 4869 scope.go:117] "RemoveContainer" containerID="1d80d75dc72da971c79fa8b67243b56fe5690fce0a3f0d0147f32ac22b29db29" Oct 01 15:29:28 crc kubenswrapper[4869]: E1001 15:29:28.582726 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-c86m8_openshift-machine-config-operator(a4b64f7f-0b03-4f47-965b-9fde048b735c)\"" pod="openshift-machine-config-operator/machine-config-daemon-c86m8" podUID="a4b64f7f-0b03-4f47-965b-9fde048b735c" Oct 01 15:29:29 crc kubenswrapper[4869]: I1001 15:29:29.597851 4869 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="24526b59-4280-4dc6-9980-ea48d0143071" path="/var/lib/kubelet/pods/24526b59-4280-4dc6-9980-ea48d0143071/volumes" Oct 01 15:29:31 crc kubenswrapper[4869]: I1001 15:29:31.033483 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-db-create-sdgm5"] Oct 01 15:29:31 crc kubenswrapper[4869]: I1001 15:29:31.040274 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-db-create-k6f4h"] Oct 01 15:29:31 crc kubenswrapper[4869]: I1001 15:29:31.047495 4869 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/neutron-db-create-sdgm5"] Oct 01 15:29:31 crc kubenswrapper[4869]: I1001 15:29:31.053050 4869 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-db-create-k6f4h"] Oct 01 15:29:31 crc kubenswrapper[4869]: I1001 15:29:31.601564 4869 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="52104f6a-5c9e-46c9-9197-4015634558a6" path="/var/lib/kubelet/pods/52104f6a-5c9e-46c9-9197-4015634558a6/volumes" Oct 01 15:29:31 crc kubenswrapper[4869]: I1001 15:29:31.602887 4869 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6ba3f80b-83bc-4a34-9c76-44fafb693520" path="/var/lib/kubelet/pods/6ba3f80b-83bc-4a34-9c76-44fafb693520/volumes" Oct 01 15:29:33 crc kubenswrapper[4869]: I1001 15:29:33.035576 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-0946-account-create-g5vxh"] Oct 01 15:29:33 crc kubenswrapper[4869]: I1001 15:29:33.046132 4869 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-0946-account-create-g5vxh"] Oct 01 15:29:33 crc kubenswrapper[4869]: I1001 15:29:33.595614 4869 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4937bc6d-5b93-47a8-bbe6-957b30612d76" path="/var/lib/kubelet/pods/4937bc6d-5b93-47a8-bbe6-957b30612d76/volumes" Oct 01 15:29:35 crc kubenswrapper[4869]: I1001 15:29:35.045515 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-db-sync-d6v2s"] Oct 01 15:29:35 crc kubenswrapper[4869]: I1001 15:29:35.060205 4869 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-db-sync-d6v2s"] Oct 01 15:29:35 crc kubenswrapper[4869]: I1001 15:29:35.594445 4869 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="16eefc1b-2890-4b09-8f2b-0febecad6523" path="/var/lib/kubelet/pods/16eefc1b-2890-4b09-8f2b-0febecad6523/volumes" Oct 01 15:29:36 crc kubenswrapper[4869]: I1001 15:29:36.044867 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-db-sync-j277p"] Oct 01 15:29:36 crc kubenswrapper[4869]: I1001 15:29:36.062324 4869 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-db-sync-j277p"] Oct 01 15:29:37 crc kubenswrapper[4869]: I1001 15:29:37.596244 4869 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="92372d1e-b67d-4615-9978-946caec18e59" path="/var/lib/kubelet/pods/92372d1e-b67d-4615-9978-946caec18e59/volumes" Oct 01 15:29:39 crc kubenswrapper[4869]: I1001 15:29:39.581965 4869 scope.go:117] "RemoveContainer" containerID="1d80d75dc72da971c79fa8b67243b56fe5690fce0a3f0d0147f32ac22b29db29" Oct 01 15:29:39 crc kubenswrapper[4869]: E1001 15:29:39.582887 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-c86m8_openshift-machine-config-operator(a4b64f7f-0b03-4f47-965b-9fde048b735c)\"" pod="openshift-machine-config-operator/machine-config-daemon-c86m8" podUID="a4b64f7f-0b03-4f47-965b-9fde048b735c" Oct 01 15:29:43 crc kubenswrapper[4869]: I1001 15:29:43.629534 4869 generic.go:334] "Generic (PLEG): container finished" podID="6dc7fd52-a0b5-47e1-9fa7-348ca07979c9" containerID="01553e2b01891ddc71d4b8a1179cef6c85eeb8220e018db463ba9b08d73df080" exitCode=0 Oct 01 15:29:43 crc kubenswrapper[4869]: I1001 15:29:43.629650 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-fwpqm" event={"ID":"6dc7fd52-a0b5-47e1-9fa7-348ca07979c9","Type":"ContainerDied","Data":"01553e2b01891ddc71d4b8a1179cef6c85eeb8220e018db463ba9b08d73df080"} Oct 01 15:29:45 crc kubenswrapper[4869]: I1001 15:29:45.107697 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-fwpqm" Oct 01 15:29:45 crc kubenswrapper[4869]: I1001 15:29:45.178291 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8smz7\" (UniqueName: \"kubernetes.io/projected/6dc7fd52-a0b5-47e1-9fa7-348ca07979c9-kube-api-access-8smz7\") pod \"6dc7fd52-a0b5-47e1-9fa7-348ca07979c9\" (UID: \"6dc7fd52-a0b5-47e1-9fa7-348ca07979c9\") " Oct 01 15:29:45 crc kubenswrapper[4869]: I1001 15:29:45.178519 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/6dc7fd52-a0b5-47e1-9fa7-348ca07979c9-inventory\") pod \"6dc7fd52-a0b5-47e1-9fa7-348ca07979c9\" (UID: \"6dc7fd52-a0b5-47e1-9fa7-348ca07979c9\") " Oct 01 15:29:45 crc kubenswrapper[4869]: I1001 15:29:45.179840 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/6dc7fd52-a0b5-47e1-9fa7-348ca07979c9-ssh-key\") pod \"6dc7fd52-a0b5-47e1-9fa7-348ca07979c9\" (UID: \"6dc7fd52-a0b5-47e1-9fa7-348ca07979c9\") " Oct 01 15:29:45 crc kubenswrapper[4869]: I1001 15:29:45.188558 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6dc7fd52-a0b5-47e1-9fa7-348ca07979c9-kube-api-access-8smz7" (OuterVolumeSpecName: "kube-api-access-8smz7") pod "6dc7fd52-a0b5-47e1-9fa7-348ca07979c9" (UID: "6dc7fd52-a0b5-47e1-9fa7-348ca07979c9"). InnerVolumeSpecName "kube-api-access-8smz7". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 15:29:45 crc kubenswrapper[4869]: I1001 15:29:45.214139 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6dc7fd52-a0b5-47e1-9fa7-348ca07979c9-inventory" (OuterVolumeSpecName: "inventory") pod "6dc7fd52-a0b5-47e1-9fa7-348ca07979c9" (UID: "6dc7fd52-a0b5-47e1-9fa7-348ca07979c9"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 15:29:45 crc kubenswrapper[4869]: I1001 15:29:45.226759 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6dc7fd52-a0b5-47e1-9fa7-348ca07979c9-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "6dc7fd52-a0b5-47e1-9fa7-348ca07979c9" (UID: "6dc7fd52-a0b5-47e1-9fa7-348ca07979c9"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 15:29:45 crc kubenswrapper[4869]: I1001 15:29:45.283566 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8smz7\" (UniqueName: \"kubernetes.io/projected/6dc7fd52-a0b5-47e1-9fa7-348ca07979c9-kube-api-access-8smz7\") on node \"crc\" DevicePath \"\"" Oct 01 15:29:45 crc kubenswrapper[4869]: I1001 15:29:45.283636 4869 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/6dc7fd52-a0b5-47e1-9fa7-348ca07979c9-inventory\") on node \"crc\" DevicePath \"\"" Oct 01 15:29:45 crc kubenswrapper[4869]: I1001 15:29:45.283667 4869 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/6dc7fd52-a0b5-47e1-9fa7-348ca07979c9-ssh-key\") on node \"crc\" DevicePath \"\"" Oct 01 15:29:45 crc kubenswrapper[4869]: I1001 15:29:45.655901 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-fwpqm" event={"ID":"6dc7fd52-a0b5-47e1-9fa7-348ca07979c9","Type":"ContainerDied","Data":"ad888510269bc11d927d1ef4d62dc7a1cf79976b517b7671fc4441cc12951f90"} Oct 01 15:29:45 crc kubenswrapper[4869]: I1001 15:29:45.656245 4869 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="ad888510269bc11d927d1ef4d62dc7a1cf79976b517b7671fc4441cc12951f90" Oct 01 15:29:45 crc kubenswrapper[4869]: I1001 15:29:45.656132 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-fwpqm" Oct 01 15:29:45 crc kubenswrapper[4869]: I1001 15:29:45.734579 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-22x79"] Oct 01 15:29:45 crc kubenswrapper[4869]: E1001 15:29:45.735890 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6dc7fd52-a0b5-47e1-9fa7-348ca07979c9" containerName="install-os-edpm-deployment-openstack-edpm-ipam" Oct 01 15:29:45 crc kubenswrapper[4869]: I1001 15:29:45.735934 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="6dc7fd52-a0b5-47e1-9fa7-348ca07979c9" containerName="install-os-edpm-deployment-openstack-edpm-ipam" Oct 01 15:29:45 crc kubenswrapper[4869]: I1001 15:29:45.736135 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="6dc7fd52-a0b5-47e1-9fa7-348ca07979c9" containerName="install-os-edpm-deployment-openstack-edpm-ipam" Oct 01 15:29:45 crc kubenswrapper[4869]: I1001 15:29:45.736887 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-22x79" Oct 01 15:29:45 crc kubenswrapper[4869]: I1001 15:29:45.739116 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Oct 01 15:29:45 crc kubenswrapper[4869]: I1001 15:29:45.739253 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Oct 01 15:29:45 crc kubenswrapper[4869]: I1001 15:29:45.739394 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Oct 01 15:29:45 crc kubenswrapper[4869]: I1001 15:29:45.739638 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-cjg8g" Oct 01 15:29:45 crc kubenswrapper[4869]: I1001 15:29:45.745540 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-22x79"] Oct 01 15:29:45 crc kubenswrapper[4869]: I1001 15:29:45.794392 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zbrhv\" (UniqueName: \"kubernetes.io/projected/74968806-6b3a-4d29-a5ed-49987fafba72-kube-api-access-zbrhv\") pod \"ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-22x79\" (UID: \"74968806-6b3a-4d29-a5ed-49987fafba72\") " pod="openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-22x79" Oct 01 15:29:45 crc kubenswrapper[4869]: I1001 15:29:45.794602 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/74968806-6b3a-4d29-a5ed-49987fafba72-ssh-key\") pod \"ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-22x79\" (UID: \"74968806-6b3a-4d29-a5ed-49987fafba72\") " pod="openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-22x79" Oct 01 15:29:45 crc kubenswrapper[4869]: I1001 15:29:45.794782 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/74968806-6b3a-4d29-a5ed-49987fafba72-inventory\") pod \"ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-22x79\" (UID: \"74968806-6b3a-4d29-a5ed-49987fafba72\") " pod="openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-22x79" Oct 01 15:29:45 crc kubenswrapper[4869]: I1001 15:29:45.896576 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zbrhv\" (UniqueName: \"kubernetes.io/projected/74968806-6b3a-4d29-a5ed-49987fafba72-kube-api-access-zbrhv\") pod \"ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-22x79\" (UID: \"74968806-6b3a-4d29-a5ed-49987fafba72\") " pod="openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-22x79" Oct 01 15:29:45 crc kubenswrapper[4869]: I1001 15:29:45.896724 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/74968806-6b3a-4d29-a5ed-49987fafba72-ssh-key\") pod \"ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-22x79\" (UID: \"74968806-6b3a-4d29-a5ed-49987fafba72\") " pod="openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-22x79" Oct 01 15:29:45 crc kubenswrapper[4869]: I1001 15:29:45.896798 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/74968806-6b3a-4d29-a5ed-49987fafba72-inventory\") pod \"ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-22x79\" (UID: \"74968806-6b3a-4d29-a5ed-49987fafba72\") " pod="openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-22x79" Oct 01 15:29:45 crc kubenswrapper[4869]: I1001 15:29:45.902033 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/74968806-6b3a-4d29-a5ed-49987fafba72-inventory\") pod \"ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-22x79\" (UID: \"74968806-6b3a-4d29-a5ed-49987fafba72\") " pod="openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-22x79" Oct 01 15:29:45 crc kubenswrapper[4869]: I1001 15:29:45.903163 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/74968806-6b3a-4d29-a5ed-49987fafba72-ssh-key\") pod \"ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-22x79\" (UID: \"74968806-6b3a-4d29-a5ed-49987fafba72\") " pod="openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-22x79" Oct 01 15:29:45 crc kubenswrapper[4869]: I1001 15:29:45.918502 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zbrhv\" (UniqueName: \"kubernetes.io/projected/74968806-6b3a-4d29-a5ed-49987fafba72-kube-api-access-zbrhv\") pod \"ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-22x79\" (UID: \"74968806-6b3a-4d29-a5ed-49987fafba72\") " pod="openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-22x79" Oct 01 15:29:46 crc kubenswrapper[4869]: I1001 15:29:46.057917 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-22x79" Oct 01 15:29:46 crc kubenswrapper[4869]: I1001 15:29:46.585656 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-22x79"] Oct 01 15:29:46 crc kubenswrapper[4869]: I1001 15:29:46.675983 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-22x79" event={"ID":"74968806-6b3a-4d29-a5ed-49987fafba72","Type":"ContainerStarted","Data":"4f74af86ee591cc917345a8e45e00e0a2530ca380bb3f6a26c07101a8e85ce4d"} Oct 01 15:29:47 crc kubenswrapper[4869]: I1001 15:29:47.688546 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-22x79" event={"ID":"74968806-6b3a-4d29-a5ed-49987fafba72","Type":"ContainerStarted","Data":"40f587a480a7c39a6196a0a54aab2ff69baf31668f21bcaffb8dff340cdd4d67"} Oct 01 15:29:47 crc kubenswrapper[4869]: I1001 15:29:47.722513 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-22x79" podStartSLOduration=2.285420689 podStartE2EDuration="2.72248821s" podCreationTimestamp="2025-10-01 15:29:45 +0000 UTC" firstStartedPulling="2025-10-01 15:29:46.592772657 +0000 UTC m=+1495.739615773" lastFinishedPulling="2025-10-01 15:29:47.029840168 +0000 UTC m=+1496.176683294" observedRunningTime="2025-10-01 15:29:47.711531895 +0000 UTC m=+1496.858375111" watchObservedRunningTime="2025-10-01 15:29:47.72248821 +0000 UTC m=+1496.869331366" Oct 01 15:29:51 crc kubenswrapper[4869]: I1001 15:29:51.591613 4869 scope.go:117] "RemoveContainer" containerID="1d80d75dc72da971c79fa8b67243b56fe5690fce0a3f0d0147f32ac22b29db29" Oct 01 15:29:51 crc kubenswrapper[4869]: E1001 15:29:51.592900 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-c86m8_openshift-machine-config-operator(a4b64f7f-0b03-4f47-965b-9fde048b735c)\"" pod="openshift-machine-config-operator/machine-config-daemon-c86m8" podUID="a4b64f7f-0b03-4f47-965b-9fde048b735c" Oct 01 15:29:51 crc kubenswrapper[4869]: I1001 15:29:51.728899 4869 generic.go:334] "Generic (PLEG): container finished" podID="74968806-6b3a-4d29-a5ed-49987fafba72" containerID="40f587a480a7c39a6196a0a54aab2ff69baf31668f21bcaffb8dff340cdd4d67" exitCode=0 Oct 01 15:29:51 crc kubenswrapper[4869]: I1001 15:29:51.729288 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-22x79" event={"ID":"74968806-6b3a-4d29-a5ed-49987fafba72","Type":"ContainerDied","Data":"40f587a480a7c39a6196a0a54aab2ff69baf31668f21bcaffb8dff340cdd4d67"} Oct 01 15:29:53 crc kubenswrapper[4869]: I1001 15:29:53.195330 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-22x79" Oct 01 15:29:53 crc kubenswrapper[4869]: I1001 15:29:53.249570 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/74968806-6b3a-4d29-a5ed-49987fafba72-inventory\") pod \"74968806-6b3a-4d29-a5ed-49987fafba72\" (UID: \"74968806-6b3a-4d29-a5ed-49987fafba72\") " Oct 01 15:29:53 crc kubenswrapper[4869]: I1001 15:29:53.249634 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zbrhv\" (UniqueName: \"kubernetes.io/projected/74968806-6b3a-4d29-a5ed-49987fafba72-kube-api-access-zbrhv\") pod \"74968806-6b3a-4d29-a5ed-49987fafba72\" (UID: \"74968806-6b3a-4d29-a5ed-49987fafba72\") " Oct 01 15:29:53 crc kubenswrapper[4869]: I1001 15:29:53.249658 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/74968806-6b3a-4d29-a5ed-49987fafba72-ssh-key\") pod \"74968806-6b3a-4d29-a5ed-49987fafba72\" (UID: \"74968806-6b3a-4d29-a5ed-49987fafba72\") " Oct 01 15:29:53 crc kubenswrapper[4869]: I1001 15:29:53.257486 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/74968806-6b3a-4d29-a5ed-49987fafba72-kube-api-access-zbrhv" (OuterVolumeSpecName: "kube-api-access-zbrhv") pod "74968806-6b3a-4d29-a5ed-49987fafba72" (UID: "74968806-6b3a-4d29-a5ed-49987fafba72"). InnerVolumeSpecName "kube-api-access-zbrhv". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 15:29:53 crc kubenswrapper[4869]: I1001 15:29:53.278706 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/74968806-6b3a-4d29-a5ed-49987fafba72-inventory" (OuterVolumeSpecName: "inventory") pod "74968806-6b3a-4d29-a5ed-49987fafba72" (UID: "74968806-6b3a-4d29-a5ed-49987fafba72"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 15:29:53 crc kubenswrapper[4869]: I1001 15:29:53.293443 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/74968806-6b3a-4d29-a5ed-49987fafba72-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "74968806-6b3a-4d29-a5ed-49987fafba72" (UID: "74968806-6b3a-4d29-a5ed-49987fafba72"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 15:29:53 crc kubenswrapper[4869]: I1001 15:29:53.355345 4869 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/74968806-6b3a-4d29-a5ed-49987fafba72-inventory\") on node \"crc\" DevicePath \"\"" Oct 01 15:29:53 crc kubenswrapper[4869]: I1001 15:29:53.355383 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zbrhv\" (UniqueName: \"kubernetes.io/projected/74968806-6b3a-4d29-a5ed-49987fafba72-kube-api-access-zbrhv\") on node \"crc\" DevicePath \"\"" Oct 01 15:29:53 crc kubenswrapper[4869]: I1001 15:29:53.355398 4869 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/74968806-6b3a-4d29-a5ed-49987fafba72-ssh-key\") on node \"crc\" DevicePath \"\"" Oct 01 15:29:53 crc kubenswrapper[4869]: I1001 15:29:53.759678 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-22x79" event={"ID":"74968806-6b3a-4d29-a5ed-49987fafba72","Type":"ContainerDied","Data":"4f74af86ee591cc917345a8e45e00e0a2530ca380bb3f6a26c07101a8e85ce4d"} Oct 01 15:29:53 crc kubenswrapper[4869]: I1001 15:29:53.760221 4869 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="4f74af86ee591cc917345a8e45e00e0a2530ca380bb3f6a26c07101a8e85ce4d" Oct 01 15:29:53 crc kubenswrapper[4869]: I1001 15:29:53.759740 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-22x79" Oct 01 15:29:53 crc kubenswrapper[4869]: I1001 15:29:53.887527 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/configure-os-edpm-deployment-openstack-edpm-ipam-l2t45"] Oct 01 15:29:53 crc kubenswrapper[4869]: E1001 15:29:53.888120 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="74968806-6b3a-4d29-a5ed-49987fafba72" containerName="ceph-hci-pre-edpm-deployment-openstack-edpm-ipam" Oct 01 15:29:53 crc kubenswrapper[4869]: I1001 15:29:53.888144 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="74968806-6b3a-4d29-a5ed-49987fafba72" containerName="ceph-hci-pre-edpm-deployment-openstack-edpm-ipam" Oct 01 15:29:53 crc kubenswrapper[4869]: I1001 15:29:53.888397 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="74968806-6b3a-4d29-a5ed-49987fafba72" containerName="ceph-hci-pre-edpm-deployment-openstack-edpm-ipam" Oct 01 15:29:53 crc kubenswrapper[4869]: I1001 15:29:53.892448 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-l2t45" Oct 01 15:29:53 crc kubenswrapper[4869]: I1001 15:29:53.897371 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Oct 01 15:29:53 crc kubenswrapper[4869]: I1001 15:29:53.899108 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-cjg8g" Oct 01 15:29:53 crc kubenswrapper[4869]: I1001 15:29:53.899384 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Oct 01 15:29:53 crc kubenswrapper[4869]: I1001 15:29:53.903123 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Oct 01 15:29:53 crc kubenswrapper[4869]: I1001 15:29:53.940537 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/configure-os-edpm-deployment-openstack-edpm-ipam-l2t45"] Oct 01 15:29:53 crc kubenswrapper[4869]: I1001 15:29:53.968427 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/b21a8f1c-ddea-4663-ba39-f2c75c93acec-inventory\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-l2t45\" (UID: \"b21a8f1c-ddea-4663-ba39-f2c75c93acec\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-l2t45" Oct 01 15:29:53 crc kubenswrapper[4869]: I1001 15:29:53.968529 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-55qxm\" (UniqueName: \"kubernetes.io/projected/b21a8f1c-ddea-4663-ba39-f2c75c93acec-kube-api-access-55qxm\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-l2t45\" (UID: \"b21a8f1c-ddea-4663-ba39-f2c75c93acec\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-l2t45" Oct 01 15:29:53 crc kubenswrapper[4869]: I1001 15:29:53.968597 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/b21a8f1c-ddea-4663-ba39-f2c75c93acec-ssh-key\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-l2t45\" (UID: \"b21a8f1c-ddea-4663-ba39-f2c75c93acec\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-l2t45" Oct 01 15:29:54 crc kubenswrapper[4869]: I1001 15:29:54.070251 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-55qxm\" (UniqueName: \"kubernetes.io/projected/b21a8f1c-ddea-4663-ba39-f2c75c93acec-kube-api-access-55qxm\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-l2t45\" (UID: \"b21a8f1c-ddea-4663-ba39-f2c75c93acec\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-l2t45" Oct 01 15:29:54 crc kubenswrapper[4869]: I1001 15:29:54.070328 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/b21a8f1c-ddea-4663-ba39-f2c75c93acec-ssh-key\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-l2t45\" (UID: \"b21a8f1c-ddea-4663-ba39-f2c75c93acec\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-l2t45" Oct 01 15:29:54 crc kubenswrapper[4869]: I1001 15:29:54.070517 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/b21a8f1c-ddea-4663-ba39-f2c75c93acec-inventory\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-l2t45\" (UID: \"b21a8f1c-ddea-4663-ba39-f2c75c93acec\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-l2t45" Oct 01 15:29:54 crc kubenswrapper[4869]: I1001 15:29:54.076182 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/b21a8f1c-ddea-4663-ba39-f2c75c93acec-inventory\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-l2t45\" (UID: \"b21a8f1c-ddea-4663-ba39-f2c75c93acec\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-l2t45" Oct 01 15:29:54 crc kubenswrapper[4869]: I1001 15:29:54.077153 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/b21a8f1c-ddea-4663-ba39-f2c75c93acec-ssh-key\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-l2t45\" (UID: \"b21a8f1c-ddea-4663-ba39-f2c75c93acec\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-l2t45" Oct 01 15:29:54 crc kubenswrapper[4869]: I1001 15:29:54.088784 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-55qxm\" (UniqueName: \"kubernetes.io/projected/b21a8f1c-ddea-4663-ba39-f2c75c93acec-kube-api-access-55qxm\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-l2t45\" (UID: \"b21a8f1c-ddea-4663-ba39-f2c75c93acec\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-l2t45" Oct 01 15:29:54 crc kubenswrapper[4869]: I1001 15:29:54.220615 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-l2t45" Oct 01 15:29:54 crc kubenswrapper[4869]: I1001 15:29:54.741903 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/configure-os-edpm-deployment-openstack-edpm-ipam-l2t45"] Oct 01 15:29:54 crc kubenswrapper[4869]: I1001 15:29:54.770577 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-l2t45" event={"ID":"b21a8f1c-ddea-4663-ba39-f2c75c93acec","Type":"ContainerStarted","Data":"2d9e9f271a685f542b68c0203a2b61e4b8601738af8049d2c380bdcc6f89c71c"} Oct 01 15:29:55 crc kubenswrapper[4869]: I1001 15:29:55.783306 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-l2t45" event={"ID":"b21a8f1c-ddea-4663-ba39-f2c75c93acec","Type":"ContainerStarted","Data":"a4c30e7649e1d7e4e4b388df040c08752693c97867f649c3a99c026e12375a3a"} Oct 01 15:29:55 crc kubenswrapper[4869]: I1001 15:29:55.807344 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-l2t45" podStartSLOduration=2.33516283 podStartE2EDuration="2.807252821s" podCreationTimestamp="2025-10-01 15:29:53 +0000 UTC" firstStartedPulling="2025-10-01 15:29:54.75127463 +0000 UTC m=+1503.898117746" lastFinishedPulling="2025-10-01 15:29:55.223364591 +0000 UTC m=+1504.370207737" observedRunningTime="2025-10-01 15:29:55.802188094 +0000 UTC m=+1504.949031220" watchObservedRunningTime="2025-10-01 15:29:55.807252821 +0000 UTC m=+1504.954095967" Oct 01 15:29:59 crc kubenswrapper[4869]: I1001 15:29:59.063740 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-3b43-account-create-fgnrl"] Oct 01 15:29:59 crc kubenswrapper[4869]: I1001 15:29:59.075720 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-6b4b-account-create-8zk2q"] Oct 01 15:29:59 crc kubenswrapper[4869]: I1001 15:29:59.086678 4869 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/neutron-3b43-account-create-fgnrl"] Oct 01 15:29:59 crc kubenswrapper[4869]: I1001 15:29:59.094799 4869 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-6b4b-account-create-8zk2q"] Oct 01 15:29:59 crc kubenswrapper[4869]: I1001 15:29:59.595118 4869 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="40fc21e1-9cc1-42cb-84ae-a5edde36cefd" path="/var/lib/kubelet/pods/40fc21e1-9cc1-42cb-84ae-a5edde36cefd/volumes" Oct 01 15:29:59 crc kubenswrapper[4869]: I1001 15:29:59.596024 4869 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b685c639-4b3f-4597-8b0d-9f03283f18ed" path="/var/lib/kubelet/pods/b685c639-4b3f-4597-8b0d-9f03283f18ed/volumes" Oct 01 15:30:00 crc kubenswrapper[4869]: I1001 15:30:00.136971 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29322210-r2pxf"] Oct 01 15:30:00 crc kubenswrapper[4869]: I1001 15:30:00.138343 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29322210-r2pxf" Oct 01 15:30:00 crc kubenswrapper[4869]: I1001 15:30:00.143282 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Oct 01 15:30:00 crc kubenswrapper[4869]: I1001 15:30:00.146505 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29322210-r2pxf"] Oct 01 15:30:00 crc kubenswrapper[4869]: I1001 15:30:00.158383 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Oct 01 15:30:00 crc kubenswrapper[4869]: I1001 15:30:00.204557 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/b3c7b9cf-c4b5-45bd-a4f4-6178797e2d04-config-volume\") pod \"collect-profiles-29322210-r2pxf\" (UID: \"b3c7b9cf-c4b5-45bd-a4f4-6178797e2d04\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29322210-r2pxf" Oct 01 15:30:00 crc kubenswrapper[4869]: I1001 15:30:00.204629 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/b3c7b9cf-c4b5-45bd-a4f4-6178797e2d04-secret-volume\") pod \"collect-profiles-29322210-r2pxf\" (UID: \"b3c7b9cf-c4b5-45bd-a4f4-6178797e2d04\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29322210-r2pxf" Oct 01 15:30:00 crc kubenswrapper[4869]: I1001 15:30:00.204949 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kc9nk\" (UniqueName: \"kubernetes.io/projected/b3c7b9cf-c4b5-45bd-a4f4-6178797e2d04-kube-api-access-kc9nk\") pod \"collect-profiles-29322210-r2pxf\" (UID: \"b3c7b9cf-c4b5-45bd-a4f4-6178797e2d04\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29322210-r2pxf" Oct 01 15:30:00 crc kubenswrapper[4869]: I1001 15:30:00.307547 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kc9nk\" (UniqueName: \"kubernetes.io/projected/b3c7b9cf-c4b5-45bd-a4f4-6178797e2d04-kube-api-access-kc9nk\") pod \"collect-profiles-29322210-r2pxf\" (UID: \"b3c7b9cf-c4b5-45bd-a4f4-6178797e2d04\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29322210-r2pxf" Oct 01 15:30:00 crc kubenswrapper[4869]: I1001 15:30:00.307633 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/b3c7b9cf-c4b5-45bd-a4f4-6178797e2d04-config-volume\") pod \"collect-profiles-29322210-r2pxf\" (UID: \"b3c7b9cf-c4b5-45bd-a4f4-6178797e2d04\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29322210-r2pxf" Oct 01 15:30:00 crc kubenswrapper[4869]: I1001 15:30:00.307680 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/b3c7b9cf-c4b5-45bd-a4f4-6178797e2d04-secret-volume\") pod \"collect-profiles-29322210-r2pxf\" (UID: \"b3c7b9cf-c4b5-45bd-a4f4-6178797e2d04\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29322210-r2pxf" Oct 01 15:30:00 crc kubenswrapper[4869]: I1001 15:30:00.308541 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/b3c7b9cf-c4b5-45bd-a4f4-6178797e2d04-config-volume\") pod \"collect-profiles-29322210-r2pxf\" (UID: \"b3c7b9cf-c4b5-45bd-a4f4-6178797e2d04\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29322210-r2pxf" Oct 01 15:30:00 crc kubenswrapper[4869]: I1001 15:30:00.313443 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/b3c7b9cf-c4b5-45bd-a4f4-6178797e2d04-secret-volume\") pod \"collect-profiles-29322210-r2pxf\" (UID: \"b3c7b9cf-c4b5-45bd-a4f4-6178797e2d04\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29322210-r2pxf" Oct 01 15:30:00 crc kubenswrapper[4869]: I1001 15:30:00.324941 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kc9nk\" (UniqueName: \"kubernetes.io/projected/b3c7b9cf-c4b5-45bd-a4f4-6178797e2d04-kube-api-access-kc9nk\") pod \"collect-profiles-29322210-r2pxf\" (UID: \"b3c7b9cf-c4b5-45bd-a4f4-6178797e2d04\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29322210-r2pxf" Oct 01 15:30:00 crc kubenswrapper[4869]: I1001 15:30:00.471030 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29322210-r2pxf" Oct 01 15:30:01 crc kubenswrapper[4869]: I1001 15:30:01.066170 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29322210-r2pxf"] Oct 01 15:30:01 crc kubenswrapper[4869]: I1001 15:30:01.852752 4869 generic.go:334] "Generic (PLEG): container finished" podID="b3c7b9cf-c4b5-45bd-a4f4-6178797e2d04" containerID="4dd69d9fdaac305e0e109c5778080b188ff1dda5220701c8338b9458fae97169" exitCode=0 Oct 01 15:30:01 crc kubenswrapper[4869]: I1001 15:30:01.852876 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29322210-r2pxf" event={"ID":"b3c7b9cf-c4b5-45bd-a4f4-6178797e2d04","Type":"ContainerDied","Data":"4dd69d9fdaac305e0e109c5778080b188ff1dda5220701c8338b9458fae97169"} Oct 01 15:30:01 crc kubenswrapper[4869]: I1001 15:30:01.853179 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29322210-r2pxf" event={"ID":"b3c7b9cf-c4b5-45bd-a4f4-6178797e2d04","Type":"ContainerStarted","Data":"433b45d59160defb9c64ff8f31e2ea8d529ed87780437d392bdb4e3043d59b21"} Oct 01 15:30:03 crc kubenswrapper[4869]: I1001 15:30:03.245725 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29322210-r2pxf" Oct 01 15:30:03 crc kubenswrapper[4869]: I1001 15:30:03.269386 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/b3c7b9cf-c4b5-45bd-a4f4-6178797e2d04-config-volume\") pod \"b3c7b9cf-c4b5-45bd-a4f4-6178797e2d04\" (UID: \"b3c7b9cf-c4b5-45bd-a4f4-6178797e2d04\") " Oct 01 15:30:03 crc kubenswrapper[4869]: I1001 15:30:03.269458 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/b3c7b9cf-c4b5-45bd-a4f4-6178797e2d04-secret-volume\") pod \"b3c7b9cf-c4b5-45bd-a4f4-6178797e2d04\" (UID: \"b3c7b9cf-c4b5-45bd-a4f4-6178797e2d04\") " Oct 01 15:30:03 crc kubenswrapper[4869]: I1001 15:30:03.269619 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kc9nk\" (UniqueName: \"kubernetes.io/projected/b3c7b9cf-c4b5-45bd-a4f4-6178797e2d04-kube-api-access-kc9nk\") pod \"b3c7b9cf-c4b5-45bd-a4f4-6178797e2d04\" (UID: \"b3c7b9cf-c4b5-45bd-a4f4-6178797e2d04\") " Oct 01 15:30:03 crc kubenswrapper[4869]: I1001 15:30:03.271467 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b3c7b9cf-c4b5-45bd-a4f4-6178797e2d04-config-volume" (OuterVolumeSpecName: "config-volume") pod "b3c7b9cf-c4b5-45bd-a4f4-6178797e2d04" (UID: "b3c7b9cf-c4b5-45bd-a4f4-6178797e2d04"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 15:30:03 crc kubenswrapper[4869]: I1001 15:30:03.278346 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b3c7b9cf-c4b5-45bd-a4f4-6178797e2d04-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "b3c7b9cf-c4b5-45bd-a4f4-6178797e2d04" (UID: "b3c7b9cf-c4b5-45bd-a4f4-6178797e2d04"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 15:30:03 crc kubenswrapper[4869]: I1001 15:30:03.278513 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b3c7b9cf-c4b5-45bd-a4f4-6178797e2d04-kube-api-access-kc9nk" (OuterVolumeSpecName: "kube-api-access-kc9nk") pod "b3c7b9cf-c4b5-45bd-a4f4-6178797e2d04" (UID: "b3c7b9cf-c4b5-45bd-a4f4-6178797e2d04"). InnerVolumeSpecName "kube-api-access-kc9nk". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 15:30:03 crc kubenswrapper[4869]: I1001 15:30:03.371840 4869 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/b3c7b9cf-c4b5-45bd-a4f4-6178797e2d04-config-volume\") on node \"crc\" DevicePath \"\"" Oct 01 15:30:03 crc kubenswrapper[4869]: I1001 15:30:03.371888 4869 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/b3c7b9cf-c4b5-45bd-a4f4-6178797e2d04-secret-volume\") on node \"crc\" DevicePath \"\"" Oct 01 15:30:03 crc kubenswrapper[4869]: I1001 15:30:03.371903 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kc9nk\" (UniqueName: \"kubernetes.io/projected/b3c7b9cf-c4b5-45bd-a4f4-6178797e2d04-kube-api-access-kc9nk\") on node \"crc\" DevicePath \"\"" Oct 01 15:30:03 crc kubenswrapper[4869]: I1001 15:30:03.879198 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29322210-r2pxf" event={"ID":"b3c7b9cf-c4b5-45bd-a4f4-6178797e2d04","Type":"ContainerDied","Data":"433b45d59160defb9c64ff8f31e2ea8d529ed87780437d392bdb4e3043d59b21"} Oct 01 15:30:03 crc kubenswrapper[4869]: I1001 15:30:03.879242 4869 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="433b45d59160defb9c64ff8f31e2ea8d529ed87780437d392bdb4e3043d59b21" Oct 01 15:30:03 crc kubenswrapper[4869]: I1001 15:30:03.879324 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29322210-r2pxf" Oct 01 15:30:05 crc kubenswrapper[4869]: I1001 15:30:05.582076 4869 scope.go:117] "RemoveContainer" containerID="1d80d75dc72da971c79fa8b67243b56fe5690fce0a3f0d0147f32ac22b29db29" Oct 01 15:30:05 crc kubenswrapper[4869]: E1001 15:30:05.582550 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-c86m8_openshift-machine-config-operator(a4b64f7f-0b03-4f47-965b-9fde048b735c)\"" pod="openshift-machine-config-operator/machine-config-daemon-c86m8" podUID="a4b64f7f-0b03-4f47-965b-9fde048b735c" Oct 01 15:30:12 crc kubenswrapper[4869]: I1001 15:30:12.370667 4869 scope.go:117] "RemoveContainer" containerID="f016e01d825d31393c24de728542ad4d00ece51da7b3df220722829a51122719" Oct 01 15:30:12 crc kubenswrapper[4869]: I1001 15:30:12.411806 4869 scope.go:117] "RemoveContainer" containerID="0eda98a080d6660e765a3fc5cdd826413f49f5b145e7c422de30995e88bcac0c" Oct 01 15:30:12 crc kubenswrapper[4869]: I1001 15:30:12.497194 4869 scope.go:117] "RemoveContainer" containerID="0e2955c0044e102e705874bc0b017f85080ab523359c3e8639eb1f5b32884a8e" Oct 01 15:30:12 crc kubenswrapper[4869]: I1001 15:30:12.519008 4869 scope.go:117] "RemoveContainer" containerID="aec0744a0a58cab7b01468080828f89287315954d0b524ca5cbd2d95f9cdb650" Oct 01 15:30:12 crc kubenswrapper[4869]: I1001 15:30:12.584994 4869 scope.go:117] "RemoveContainer" containerID="7d1d70ef4225de9cd48199f03b003bed499ac89b280193aef1af9b58e2b67964" Oct 01 15:30:12 crc kubenswrapper[4869]: I1001 15:30:12.604077 4869 scope.go:117] "RemoveContainer" containerID="8cf151c7d788727aca7efd8e2402e6c98a597180d730fc0d23b4c172fa3b3047" Oct 01 15:30:12 crc kubenswrapper[4869]: I1001 15:30:12.648142 4869 scope.go:117] "RemoveContainer" containerID="768b2a45e689a166340d2874079e94b55e0e86c9218ffa627b0f6a909ef9531c" Oct 01 15:30:12 crc kubenswrapper[4869]: I1001 15:30:12.667344 4869 scope.go:117] "RemoveContainer" containerID="e7193361abf25bba8ae9e50ff67a61ea04304dfd7d20fb79ab37092b30828f4e" Oct 01 15:30:12 crc kubenswrapper[4869]: I1001 15:30:12.691553 4869 scope.go:117] "RemoveContainer" containerID="1726125b1c080f02b2db667cf3b002bb0c5c61a58e129ca346f4df4c00e0391a" Oct 01 15:30:12 crc kubenswrapper[4869]: I1001 15:30:12.711116 4869 scope.go:117] "RemoveContainer" containerID="3eeff271b18b415270d44d04b22de017726b98da37306fe19d1395319ff66461" Oct 01 15:30:14 crc kubenswrapper[4869]: I1001 15:30:14.042462 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-bootstrap-24mhh"] Oct 01 15:30:14 crc kubenswrapper[4869]: I1001 15:30:14.049134 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/placement-db-sync-25k96"] Oct 01 15:30:14 crc kubenswrapper[4869]: I1001 15:30:14.055129 4869 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-bootstrap-24mhh"] Oct 01 15:30:14 crc kubenswrapper[4869]: I1001 15:30:14.064427 4869 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/placement-db-sync-25k96"] Oct 01 15:30:15 crc kubenswrapper[4869]: I1001 15:30:15.595081 4869 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0d6f159a-60ec-48de-87f6-d676877278c6" path="/var/lib/kubelet/pods/0d6f159a-60ec-48de-87f6-d676877278c6/volumes" Oct 01 15:30:15 crc kubenswrapper[4869]: I1001 15:30:15.596989 4869 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="546e76f6-f453-481b-8115-369d6ff9326c" path="/var/lib/kubelet/pods/546e76f6-f453-481b-8115-369d6ff9326c/volumes" Oct 01 15:30:18 crc kubenswrapper[4869]: I1001 15:30:18.581401 4869 scope.go:117] "RemoveContainer" containerID="1d80d75dc72da971c79fa8b67243b56fe5690fce0a3f0d0147f32ac22b29db29" Oct 01 15:30:18 crc kubenswrapper[4869]: E1001 15:30:18.582242 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-c86m8_openshift-machine-config-operator(a4b64f7f-0b03-4f47-965b-9fde048b735c)\"" pod="openshift-machine-config-operator/machine-config-daemon-c86m8" podUID="a4b64f7f-0b03-4f47-965b-9fde048b735c" Oct 01 15:30:20 crc kubenswrapper[4869]: I1001 15:30:20.026329 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-db-sync-jmffx"] Oct 01 15:30:20 crc kubenswrapper[4869]: I1001 15:30:20.038070 4869 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-db-sync-jmffx"] Oct 01 15:30:21 crc kubenswrapper[4869]: I1001 15:30:21.590796 4869 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3dfd59b2-2698-41f5-95b7-f3c765173302" path="/var/lib/kubelet/pods/3dfd59b2-2698-41f5-95b7-f3c765173302/volumes" Oct 01 15:30:30 crc kubenswrapper[4869]: I1001 15:30:30.582590 4869 scope.go:117] "RemoveContainer" containerID="1d80d75dc72da971c79fa8b67243b56fe5690fce0a3f0d0147f32ac22b29db29" Oct 01 15:30:30 crc kubenswrapper[4869]: E1001 15:30:30.586061 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-c86m8_openshift-machine-config-operator(a4b64f7f-0b03-4f47-965b-9fde048b735c)\"" pod="openshift-machine-config-operator/machine-config-daemon-c86m8" podUID="a4b64f7f-0b03-4f47-965b-9fde048b735c" Oct 01 15:30:31 crc kubenswrapper[4869]: I1001 15:30:31.033035 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-db-sync-slhz2"] Oct 01 15:30:31 crc kubenswrapper[4869]: I1001 15:30:31.039534 4869 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-db-sync-slhz2"] Oct 01 15:30:31 crc kubenswrapper[4869]: I1001 15:30:31.599366 4869 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f9c5d763-90ad-4611-8cac-193343af1b78" path="/var/lib/kubelet/pods/f9c5d763-90ad-4611-8cac-193343af1b78/volumes" Oct 01 15:30:37 crc kubenswrapper[4869]: I1001 15:30:37.026075 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-db-sync-9fzlp"] Oct 01 15:30:37 crc kubenswrapper[4869]: I1001 15:30:37.034868 4869 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/neutron-db-sync-9fzlp"] Oct 01 15:30:37 crc kubenswrapper[4869]: I1001 15:30:37.593647 4869 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="560e7cea-ec57-4f2c-b0d3-9ee5ded6f37a" path="/var/lib/kubelet/pods/560e7cea-ec57-4f2c-b0d3-9ee5ded6f37a/volumes" Oct 01 15:30:39 crc kubenswrapper[4869]: I1001 15:30:39.313865 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-t6j8h"] Oct 01 15:30:39 crc kubenswrapper[4869]: E1001 15:30:39.314767 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b3c7b9cf-c4b5-45bd-a4f4-6178797e2d04" containerName="collect-profiles" Oct 01 15:30:39 crc kubenswrapper[4869]: I1001 15:30:39.314788 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="b3c7b9cf-c4b5-45bd-a4f4-6178797e2d04" containerName="collect-profiles" Oct 01 15:30:39 crc kubenswrapper[4869]: I1001 15:30:39.315143 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="b3c7b9cf-c4b5-45bd-a4f4-6178797e2d04" containerName="collect-profiles" Oct 01 15:30:39 crc kubenswrapper[4869]: I1001 15:30:39.317303 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-t6j8h" Oct 01 15:30:39 crc kubenswrapper[4869]: I1001 15:30:39.328612 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-t6j8h"] Oct 01 15:30:39 crc kubenswrapper[4869]: I1001 15:30:39.383613 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/37191f96-a543-4969-b93f-a7a73da13007-catalog-content\") pod \"redhat-operators-t6j8h\" (UID: \"37191f96-a543-4969-b93f-a7a73da13007\") " pod="openshift-marketplace/redhat-operators-t6j8h" Oct 01 15:30:39 crc kubenswrapper[4869]: I1001 15:30:39.383654 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/37191f96-a543-4969-b93f-a7a73da13007-utilities\") pod \"redhat-operators-t6j8h\" (UID: \"37191f96-a543-4969-b93f-a7a73da13007\") " pod="openshift-marketplace/redhat-operators-t6j8h" Oct 01 15:30:39 crc kubenswrapper[4869]: I1001 15:30:39.383688 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-z6fml\" (UniqueName: \"kubernetes.io/projected/37191f96-a543-4969-b93f-a7a73da13007-kube-api-access-z6fml\") pod \"redhat-operators-t6j8h\" (UID: \"37191f96-a543-4969-b93f-a7a73da13007\") " pod="openshift-marketplace/redhat-operators-t6j8h" Oct 01 15:30:39 crc kubenswrapper[4869]: I1001 15:30:39.485654 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/37191f96-a543-4969-b93f-a7a73da13007-catalog-content\") pod \"redhat-operators-t6j8h\" (UID: \"37191f96-a543-4969-b93f-a7a73da13007\") " pod="openshift-marketplace/redhat-operators-t6j8h" Oct 01 15:30:39 crc kubenswrapper[4869]: I1001 15:30:39.485713 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/37191f96-a543-4969-b93f-a7a73da13007-utilities\") pod \"redhat-operators-t6j8h\" (UID: \"37191f96-a543-4969-b93f-a7a73da13007\") " pod="openshift-marketplace/redhat-operators-t6j8h" Oct 01 15:30:39 crc kubenswrapper[4869]: I1001 15:30:39.485759 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-z6fml\" (UniqueName: \"kubernetes.io/projected/37191f96-a543-4969-b93f-a7a73da13007-kube-api-access-z6fml\") pod \"redhat-operators-t6j8h\" (UID: \"37191f96-a543-4969-b93f-a7a73da13007\") " pod="openshift-marketplace/redhat-operators-t6j8h" Oct 01 15:30:39 crc kubenswrapper[4869]: I1001 15:30:39.486279 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/37191f96-a543-4969-b93f-a7a73da13007-catalog-content\") pod \"redhat-operators-t6j8h\" (UID: \"37191f96-a543-4969-b93f-a7a73da13007\") " pod="openshift-marketplace/redhat-operators-t6j8h" Oct 01 15:30:39 crc kubenswrapper[4869]: I1001 15:30:39.486359 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/37191f96-a543-4969-b93f-a7a73da13007-utilities\") pod \"redhat-operators-t6j8h\" (UID: \"37191f96-a543-4969-b93f-a7a73da13007\") " pod="openshift-marketplace/redhat-operators-t6j8h" Oct 01 15:30:39 crc kubenswrapper[4869]: I1001 15:30:39.524017 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-z6fml\" (UniqueName: \"kubernetes.io/projected/37191f96-a543-4969-b93f-a7a73da13007-kube-api-access-z6fml\") pod \"redhat-operators-t6j8h\" (UID: \"37191f96-a543-4969-b93f-a7a73da13007\") " pod="openshift-marketplace/redhat-operators-t6j8h" Oct 01 15:30:39 crc kubenswrapper[4869]: I1001 15:30:39.652147 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-t6j8h" Oct 01 15:30:40 crc kubenswrapper[4869]: I1001 15:30:40.103209 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-t6j8h"] Oct 01 15:30:40 crc kubenswrapper[4869]: I1001 15:30:40.220326 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-t6j8h" event={"ID":"37191f96-a543-4969-b93f-a7a73da13007","Type":"ContainerStarted","Data":"b87665a9e5b83f409a0c9f6363ae1b7a9664555f105ef73039e178ee4109d4ab"} Oct 01 15:30:41 crc kubenswrapper[4869]: I1001 15:30:41.230827 4869 generic.go:334] "Generic (PLEG): container finished" podID="37191f96-a543-4969-b93f-a7a73da13007" containerID="0f69edbd2898d1f7c40c5289e4f9333bddfcff81c97206eda8b564a193a96894" exitCode=0 Oct 01 15:30:41 crc kubenswrapper[4869]: I1001 15:30:41.230886 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-t6j8h" event={"ID":"37191f96-a543-4969-b93f-a7a73da13007","Type":"ContainerDied","Data":"0f69edbd2898d1f7c40c5289e4f9333bddfcff81c97206eda8b564a193a96894"} Oct 01 15:30:42 crc kubenswrapper[4869]: I1001 15:30:42.242579 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-t6j8h" event={"ID":"37191f96-a543-4969-b93f-a7a73da13007","Type":"ContainerStarted","Data":"f2b8c006db6a1e9bd00a5145e481bca7a1a7e2d50660c06ed04e517a72d99966"} Oct 01 15:30:43 crc kubenswrapper[4869]: I1001 15:30:43.256132 4869 generic.go:334] "Generic (PLEG): container finished" podID="37191f96-a543-4969-b93f-a7a73da13007" containerID="f2b8c006db6a1e9bd00a5145e481bca7a1a7e2d50660c06ed04e517a72d99966" exitCode=0 Oct 01 15:30:43 crc kubenswrapper[4869]: I1001 15:30:43.256192 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-t6j8h" event={"ID":"37191f96-a543-4969-b93f-a7a73da13007","Type":"ContainerDied","Data":"f2b8c006db6a1e9bd00a5145e481bca7a1a7e2d50660c06ed04e517a72d99966"} Oct 01 15:30:44 crc kubenswrapper[4869]: I1001 15:30:44.266789 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-t6j8h" event={"ID":"37191f96-a543-4969-b93f-a7a73da13007","Type":"ContainerStarted","Data":"a7cab69c6266e56b22da81e85b783e2c2cad876477d53809c67c49a040ea5b93"} Oct 01 15:30:44 crc kubenswrapper[4869]: I1001 15:30:44.283486 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-t6j8h" podStartSLOduration=2.762775888 podStartE2EDuration="5.283468889s" podCreationTimestamp="2025-10-01 15:30:39 +0000 UTC" firstStartedPulling="2025-10-01 15:30:41.233321161 +0000 UTC m=+1550.380164287" lastFinishedPulling="2025-10-01 15:30:43.754014172 +0000 UTC m=+1552.900857288" observedRunningTime="2025-10-01 15:30:44.28269645 +0000 UTC m=+1553.429539576" watchObservedRunningTime="2025-10-01 15:30:44.283468889 +0000 UTC m=+1553.430312005" Oct 01 15:30:45 crc kubenswrapper[4869]: I1001 15:30:45.581054 4869 scope.go:117] "RemoveContainer" containerID="1d80d75dc72da971c79fa8b67243b56fe5690fce0a3f0d0147f32ac22b29db29" Oct 01 15:30:45 crc kubenswrapper[4869]: E1001 15:30:45.581393 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-c86m8_openshift-machine-config-operator(a4b64f7f-0b03-4f47-965b-9fde048b735c)\"" pod="openshift-machine-config-operator/machine-config-daemon-c86m8" podUID="a4b64f7f-0b03-4f47-965b-9fde048b735c" Oct 01 15:30:49 crc kubenswrapper[4869]: I1001 15:30:49.652885 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-t6j8h" Oct 01 15:30:49 crc kubenswrapper[4869]: I1001 15:30:49.653680 4869 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-t6j8h" Oct 01 15:30:49 crc kubenswrapper[4869]: I1001 15:30:49.742612 4869 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-t6j8h" Oct 01 15:30:50 crc kubenswrapper[4869]: I1001 15:30:50.408657 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-t6j8h" Oct 01 15:30:50 crc kubenswrapper[4869]: I1001 15:30:50.476588 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-t6j8h"] Oct 01 15:30:52 crc kubenswrapper[4869]: I1001 15:30:52.348568 4869 generic.go:334] "Generic (PLEG): container finished" podID="b21a8f1c-ddea-4663-ba39-f2c75c93acec" containerID="a4c30e7649e1d7e4e4b388df040c08752693c97867f649c3a99c026e12375a3a" exitCode=2 Oct 01 15:30:52 crc kubenswrapper[4869]: I1001 15:30:52.348665 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-l2t45" event={"ID":"b21a8f1c-ddea-4663-ba39-f2c75c93acec","Type":"ContainerDied","Data":"a4c30e7649e1d7e4e4b388df040c08752693c97867f649c3a99c026e12375a3a"} Oct 01 15:30:52 crc kubenswrapper[4869]: I1001 15:30:52.349317 4869 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-t6j8h" podUID="37191f96-a543-4969-b93f-a7a73da13007" containerName="registry-server" containerID="cri-o://a7cab69c6266e56b22da81e85b783e2c2cad876477d53809c67c49a040ea5b93" gracePeriod=2 Oct 01 15:30:52 crc kubenswrapper[4869]: E1001 15:30:52.416431 4869 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod37191f96_a543_4969_b93f_a7a73da13007.slice/crio-a7cab69c6266e56b22da81e85b783e2c2cad876477d53809c67c49a040ea5b93.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod37191f96_a543_4969_b93f_a7a73da13007.slice/crio-conmon-a7cab69c6266e56b22da81e85b783e2c2cad876477d53809c67c49a040ea5b93.scope\": RecentStats: unable to find data in memory cache]" Oct 01 15:30:52 crc kubenswrapper[4869]: I1001 15:30:52.762718 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-t6j8h" Oct 01 15:30:52 crc kubenswrapper[4869]: I1001 15:30:52.942025 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-z6fml\" (UniqueName: \"kubernetes.io/projected/37191f96-a543-4969-b93f-a7a73da13007-kube-api-access-z6fml\") pod \"37191f96-a543-4969-b93f-a7a73da13007\" (UID: \"37191f96-a543-4969-b93f-a7a73da13007\") " Oct 01 15:30:52 crc kubenswrapper[4869]: I1001 15:30:52.942183 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/37191f96-a543-4969-b93f-a7a73da13007-utilities\") pod \"37191f96-a543-4969-b93f-a7a73da13007\" (UID: \"37191f96-a543-4969-b93f-a7a73da13007\") " Oct 01 15:30:52 crc kubenswrapper[4869]: I1001 15:30:52.942465 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/37191f96-a543-4969-b93f-a7a73da13007-catalog-content\") pod \"37191f96-a543-4969-b93f-a7a73da13007\" (UID: \"37191f96-a543-4969-b93f-a7a73da13007\") " Oct 01 15:30:52 crc kubenswrapper[4869]: I1001 15:30:52.943842 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/37191f96-a543-4969-b93f-a7a73da13007-utilities" (OuterVolumeSpecName: "utilities") pod "37191f96-a543-4969-b93f-a7a73da13007" (UID: "37191f96-a543-4969-b93f-a7a73da13007"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 15:30:52 crc kubenswrapper[4869]: I1001 15:30:52.944158 4869 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/37191f96-a543-4969-b93f-a7a73da13007-utilities\") on node \"crc\" DevicePath \"\"" Oct 01 15:30:52 crc kubenswrapper[4869]: I1001 15:30:52.952719 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/37191f96-a543-4969-b93f-a7a73da13007-kube-api-access-z6fml" (OuterVolumeSpecName: "kube-api-access-z6fml") pod "37191f96-a543-4969-b93f-a7a73da13007" (UID: "37191f96-a543-4969-b93f-a7a73da13007"). InnerVolumeSpecName "kube-api-access-z6fml". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 15:30:53 crc kubenswrapper[4869]: I1001 15:30:53.018178 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/37191f96-a543-4969-b93f-a7a73da13007-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "37191f96-a543-4969-b93f-a7a73da13007" (UID: "37191f96-a543-4969-b93f-a7a73da13007"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 15:30:53 crc kubenswrapper[4869]: I1001 15:30:53.044731 4869 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/37191f96-a543-4969-b93f-a7a73da13007-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 01 15:30:53 crc kubenswrapper[4869]: I1001 15:30:53.044781 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-z6fml\" (UniqueName: \"kubernetes.io/projected/37191f96-a543-4969-b93f-a7a73da13007-kube-api-access-z6fml\") on node \"crc\" DevicePath \"\"" Oct 01 15:30:53 crc kubenswrapper[4869]: I1001 15:30:53.360567 4869 generic.go:334] "Generic (PLEG): container finished" podID="37191f96-a543-4969-b93f-a7a73da13007" containerID="a7cab69c6266e56b22da81e85b783e2c2cad876477d53809c67c49a040ea5b93" exitCode=0 Oct 01 15:30:53 crc kubenswrapper[4869]: I1001 15:30:53.360634 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-t6j8h" Oct 01 15:30:53 crc kubenswrapper[4869]: I1001 15:30:53.360644 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-t6j8h" event={"ID":"37191f96-a543-4969-b93f-a7a73da13007","Type":"ContainerDied","Data":"a7cab69c6266e56b22da81e85b783e2c2cad876477d53809c67c49a040ea5b93"} Oct 01 15:30:53 crc kubenswrapper[4869]: I1001 15:30:53.360709 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-t6j8h" event={"ID":"37191f96-a543-4969-b93f-a7a73da13007","Type":"ContainerDied","Data":"b87665a9e5b83f409a0c9f6363ae1b7a9664555f105ef73039e178ee4109d4ab"} Oct 01 15:30:53 crc kubenswrapper[4869]: I1001 15:30:53.360736 4869 scope.go:117] "RemoveContainer" containerID="a7cab69c6266e56b22da81e85b783e2c2cad876477d53809c67c49a040ea5b93" Oct 01 15:30:53 crc kubenswrapper[4869]: I1001 15:30:53.385764 4869 scope.go:117] "RemoveContainer" containerID="f2b8c006db6a1e9bd00a5145e481bca7a1a7e2d50660c06ed04e517a72d99966" Oct 01 15:30:53 crc kubenswrapper[4869]: I1001 15:30:53.413866 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-t6j8h"] Oct 01 15:30:53 crc kubenswrapper[4869]: I1001 15:30:53.423001 4869 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-t6j8h"] Oct 01 15:30:53 crc kubenswrapper[4869]: I1001 15:30:53.437403 4869 scope.go:117] "RemoveContainer" containerID="0f69edbd2898d1f7c40c5289e4f9333bddfcff81c97206eda8b564a193a96894" Oct 01 15:30:53 crc kubenswrapper[4869]: I1001 15:30:53.481971 4869 scope.go:117] "RemoveContainer" containerID="a7cab69c6266e56b22da81e85b783e2c2cad876477d53809c67c49a040ea5b93" Oct 01 15:30:53 crc kubenswrapper[4869]: E1001 15:30:53.482556 4869 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a7cab69c6266e56b22da81e85b783e2c2cad876477d53809c67c49a040ea5b93\": container with ID starting with a7cab69c6266e56b22da81e85b783e2c2cad876477d53809c67c49a040ea5b93 not found: ID does not exist" containerID="a7cab69c6266e56b22da81e85b783e2c2cad876477d53809c67c49a040ea5b93" Oct 01 15:30:53 crc kubenswrapper[4869]: I1001 15:30:53.482583 4869 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a7cab69c6266e56b22da81e85b783e2c2cad876477d53809c67c49a040ea5b93"} err="failed to get container status \"a7cab69c6266e56b22da81e85b783e2c2cad876477d53809c67c49a040ea5b93\": rpc error: code = NotFound desc = could not find container \"a7cab69c6266e56b22da81e85b783e2c2cad876477d53809c67c49a040ea5b93\": container with ID starting with a7cab69c6266e56b22da81e85b783e2c2cad876477d53809c67c49a040ea5b93 not found: ID does not exist" Oct 01 15:30:53 crc kubenswrapper[4869]: I1001 15:30:53.482605 4869 scope.go:117] "RemoveContainer" containerID="f2b8c006db6a1e9bd00a5145e481bca7a1a7e2d50660c06ed04e517a72d99966" Oct 01 15:30:53 crc kubenswrapper[4869]: E1001 15:30:53.482946 4869 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f2b8c006db6a1e9bd00a5145e481bca7a1a7e2d50660c06ed04e517a72d99966\": container with ID starting with f2b8c006db6a1e9bd00a5145e481bca7a1a7e2d50660c06ed04e517a72d99966 not found: ID does not exist" containerID="f2b8c006db6a1e9bd00a5145e481bca7a1a7e2d50660c06ed04e517a72d99966" Oct 01 15:30:53 crc kubenswrapper[4869]: I1001 15:30:53.482986 4869 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f2b8c006db6a1e9bd00a5145e481bca7a1a7e2d50660c06ed04e517a72d99966"} err="failed to get container status \"f2b8c006db6a1e9bd00a5145e481bca7a1a7e2d50660c06ed04e517a72d99966\": rpc error: code = NotFound desc = could not find container \"f2b8c006db6a1e9bd00a5145e481bca7a1a7e2d50660c06ed04e517a72d99966\": container with ID starting with f2b8c006db6a1e9bd00a5145e481bca7a1a7e2d50660c06ed04e517a72d99966 not found: ID does not exist" Oct 01 15:30:53 crc kubenswrapper[4869]: I1001 15:30:53.483018 4869 scope.go:117] "RemoveContainer" containerID="0f69edbd2898d1f7c40c5289e4f9333bddfcff81c97206eda8b564a193a96894" Oct 01 15:30:53 crc kubenswrapper[4869]: E1001 15:30:53.483544 4869 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0f69edbd2898d1f7c40c5289e4f9333bddfcff81c97206eda8b564a193a96894\": container with ID starting with 0f69edbd2898d1f7c40c5289e4f9333bddfcff81c97206eda8b564a193a96894 not found: ID does not exist" containerID="0f69edbd2898d1f7c40c5289e4f9333bddfcff81c97206eda8b564a193a96894" Oct 01 15:30:53 crc kubenswrapper[4869]: I1001 15:30:53.483567 4869 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0f69edbd2898d1f7c40c5289e4f9333bddfcff81c97206eda8b564a193a96894"} err="failed to get container status \"0f69edbd2898d1f7c40c5289e4f9333bddfcff81c97206eda8b564a193a96894\": rpc error: code = NotFound desc = could not find container \"0f69edbd2898d1f7c40c5289e4f9333bddfcff81c97206eda8b564a193a96894\": container with ID starting with 0f69edbd2898d1f7c40c5289e4f9333bddfcff81c97206eda8b564a193a96894 not found: ID does not exist" Oct 01 15:30:53 crc kubenswrapper[4869]: I1001 15:30:53.593845 4869 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="37191f96-a543-4969-b93f-a7a73da13007" path="/var/lib/kubelet/pods/37191f96-a543-4969-b93f-a7a73da13007/volumes" Oct 01 15:30:53 crc kubenswrapper[4869]: I1001 15:30:53.780310 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-l2t45" Oct 01 15:30:53 crc kubenswrapper[4869]: I1001 15:30:53.959780 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/b21a8f1c-ddea-4663-ba39-f2c75c93acec-ssh-key\") pod \"b21a8f1c-ddea-4663-ba39-f2c75c93acec\" (UID: \"b21a8f1c-ddea-4663-ba39-f2c75c93acec\") " Oct 01 15:30:53 crc kubenswrapper[4869]: I1001 15:30:53.959820 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-55qxm\" (UniqueName: \"kubernetes.io/projected/b21a8f1c-ddea-4663-ba39-f2c75c93acec-kube-api-access-55qxm\") pod \"b21a8f1c-ddea-4663-ba39-f2c75c93acec\" (UID: \"b21a8f1c-ddea-4663-ba39-f2c75c93acec\") " Oct 01 15:30:53 crc kubenswrapper[4869]: I1001 15:30:53.959907 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/b21a8f1c-ddea-4663-ba39-f2c75c93acec-inventory\") pod \"b21a8f1c-ddea-4663-ba39-f2c75c93acec\" (UID: \"b21a8f1c-ddea-4663-ba39-f2c75c93acec\") " Oct 01 15:30:53 crc kubenswrapper[4869]: I1001 15:30:53.973545 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b21a8f1c-ddea-4663-ba39-f2c75c93acec-kube-api-access-55qxm" (OuterVolumeSpecName: "kube-api-access-55qxm") pod "b21a8f1c-ddea-4663-ba39-f2c75c93acec" (UID: "b21a8f1c-ddea-4663-ba39-f2c75c93acec"). InnerVolumeSpecName "kube-api-access-55qxm". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 15:30:54 crc kubenswrapper[4869]: I1001 15:30:54.000460 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b21a8f1c-ddea-4663-ba39-f2c75c93acec-inventory" (OuterVolumeSpecName: "inventory") pod "b21a8f1c-ddea-4663-ba39-f2c75c93acec" (UID: "b21a8f1c-ddea-4663-ba39-f2c75c93acec"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 15:30:54 crc kubenswrapper[4869]: I1001 15:30:54.008158 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b21a8f1c-ddea-4663-ba39-f2c75c93acec-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "b21a8f1c-ddea-4663-ba39-f2c75c93acec" (UID: "b21a8f1c-ddea-4663-ba39-f2c75c93acec"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 15:30:54 crc kubenswrapper[4869]: I1001 15:30:54.062005 4869 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/b21a8f1c-ddea-4663-ba39-f2c75c93acec-ssh-key\") on node \"crc\" DevicePath \"\"" Oct 01 15:30:54 crc kubenswrapper[4869]: I1001 15:30:54.062042 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-55qxm\" (UniqueName: \"kubernetes.io/projected/b21a8f1c-ddea-4663-ba39-f2c75c93acec-kube-api-access-55qxm\") on node \"crc\" DevicePath \"\"" Oct 01 15:30:54 crc kubenswrapper[4869]: I1001 15:30:54.062058 4869 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/b21a8f1c-ddea-4663-ba39-f2c75c93acec-inventory\") on node \"crc\" DevicePath \"\"" Oct 01 15:30:54 crc kubenswrapper[4869]: I1001 15:30:54.375643 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-l2t45" event={"ID":"b21a8f1c-ddea-4663-ba39-f2c75c93acec","Type":"ContainerDied","Data":"2d9e9f271a685f542b68c0203a2b61e4b8601738af8049d2c380bdcc6f89c71c"} Oct 01 15:30:54 crc kubenswrapper[4869]: I1001 15:30:54.375716 4869 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="2d9e9f271a685f542b68c0203a2b61e4b8601738af8049d2c380bdcc6f89c71c" Oct 01 15:30:54 crc kubenswrapper[4869]: I1001 15:30:54.375817 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-l2t45" Oct 01 15:30:58 crc kubenswrapper[4869]: I1001 15:30:58.581225 4869 scope.go:117] "RemoveContainer" containerID="1d80d75dc72da971c79fa8b67243b56fe5690fce0a3f0d0147f32ac22b29db29" Oct 01 15:30:58 crc kubenswrapper[4869]: E1001 15:30:58.581942 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-c86m8_openshift-machine-config-operator(a4b64f7f-0b03-4f47-965b-9fde048b735c)\"" pod="openshift-machine-config-operator/machine-config-daemon-c86m8" podUID="a4b64f7f-0b03-4f47-965b-9fde048b735c" Oct 01 15:31:02 crc kubenswrapper[4869]: I1001 15:31:02.030100 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/configure-os-edpm-deployment-openstack-edpm-ipam-xjgrr"] Oct 01 15:31:02 crc kubenswrapper[4869]: E1001 15:31:02.030941 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b21a8f1c-ddea-4663-ba39-f2c75c93acec" containerName="configure-os-edpm-deployment-openstack-edpm-ipam" Oct 01 15:31:02 crc kubenswrapper[4869]: I1001 15:31:02.030960 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="b21a8f1c-ddea-4663-ba39-f2c75c93acec" containerName="configure-os-edpm-deployment-openstack-edpm-ipam" Oct 01 15:31:02 crc kubenswrapper[4869]: E1001 15:31:02.030998 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="37191f96-a543-4969-b93f-a7a73da13007" containerName="registry-server" Oct 01 15:31:02 crc kubenswrapper[4869]: I1001 15:31:02.031006 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="37191f96-a543-4969-b93f-a7a73da13007" containerName="registry-server" Oct 01 15:31:02 crc kubenswrapper[4869]: E1001 15:31:02.031026 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="37191f96-a543-4969-b93f-a7a73da13007" containerName="extract-utilities" Oct 01 15:31:02 crc kubenswrapper[4869]: I1001 15:31:02.031036 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="37191f96-a543-4969-b93f-a7a73da13007" containerName="extract-utilities" Oct 01 15:31:02 crc kubenswrapper[4869]: E1001 15:31:02.031047 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="37191f96-a543-4969-b93f-a7a73da13007" containerName="extract-content" Oct 01 15:31:02 crc kubenswrapper[4869]: I1001 15:31:02.031054 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="37191f96-a543-4969-b93f-a7a73da13007" containerName="extract-content" Oct 01 15:31:02 crc kubenswrapper[4869]: I1001 15:31:02.031273 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="37191f96-a543-4969-b93f-a7a73da13007" containerName="registry-server" Oct 01 15:31:02 crc kubenswrapper[4869]: I1001 15:31:02.031296 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="b21a8f1c-ddea-4663-ba39-f2c75c93acec" containerName="configure-os-edpm-deployment-openstack-edpm-ipam" Oct 01 15:31:02 crc kubenswrapper[4869]: I1001 15:31:02.032039 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-xjgrr" Oct 01 15:31:02 crc kubenswrapper[4869]: I1001 15:31:02.033939 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Oct 01 15:31:02 crc kubenswrapper[4869]: I1001 15:31:02.034509 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Oct 01 15:31:02 crc kubenswrapper[4869]: I1001 15:31:02.034680 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Oct 01 15:31:02 crc kubenswrapper[4869]: I1001 15:31:02.035379 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-cjg8g" Oct 01 15:31:02 crc kubenswrapper[4869]: I1001 15:31:02.038836 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/configure-os-edpm-deployment-openstack-edpm-ipam-xjgrr"] Oct 01 15:31:02 crc kubenswrapper[4869]: I1001 15:31:02.222641 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/e9c07398-7b35-43e1-a67a-1678b23ee63d-inventory\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-xjgrr\" (UID: \"e9c07398-7b35-43e1-a67a-1678b23ee63d\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-xjgrr" Oct 01 15:31:02 crc kubenswrapper[4869]: I1001 15:31:02.222688 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/e9c07398-7b35-43e1-a67a-1678b23ee63d-ssh-key\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-xjgrr\" (UID: \"e9c07398-7b35-43e1-a67a-1678b23ee63d\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-xjgrr" Oct 01 15:31:02 crc kubenswrapper[4869]: I1001 15:31:02.222722 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-f689b\" (UniqueName: \"kubernetes.io/projected/e9c07398-7b35-43e1-a67a-1678b23ee63d-kube-api-access-f689b\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-xjgrr\" (UID: \"e9c07398-7b35-43e1-a67a-1678b23ee63d\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-xjgrr" Oct 01 15:31:02 crc kubenswrapper[4869]: I1001 15:31:02.324598 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/e9c07398-7b35-43e1-a67a-1678b23ee63d-inventory\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-xjgrr\" (UID: \"e9c07398-7b35-43e1-a67a-1678b23ee63d\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-xjgrr" Oct 01 15:31:02 crc kubenswrapper[4869]: I1001 15:31:02.324665 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/e9c07398-7b35-43e1-a67a-1678b23ee63d-ssh-key\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-xjgrr\" (UID: \"e9c07398-7b35-43e1-a67a-1678b23ee63d\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-xjgrr" Oct 01 15:31:02 crc kubenswrapper[4869]: I1001 15:31:02.324718 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-f689b\" (UniqueName: \"kubernetes.io/projected/e9c07398-7b35-43e1-a67a-1678b23ee63d-kube-api-access-f689b\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-xjgrr\" (UID: \"e9c07398-7b35-43e1-a67a-1678b23ee63d\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-xjgrr" Oct 01 15:31:02 crc kubenswrapper[4869]: I1001 15:31:02.334246 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/e9c07398-7b35-43e1-a67a-1678b23ee63d-ssh-key\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-xjgrr\" (UID: \"e9c07398-7b35-43e1-a67a-1678b23ee63d\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-xjgrr" Oct 01 15:31:02 crc kubenswrapper[4869]: I1001 15:31:02.339155 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/e9c07398-7b35-43e1-a67a-1678b23ee63d-inventory\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-xjgrr\" (UID: \"e9c07398-7b35-43e1-a67a-1678b23ee63d\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-xjgrr" Oct 01 15:31:02 crc kubenswrapper[4869]: I1001 15:31:02.376140 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-f689b\" (UniqueName: \"kubernetes.io/projected/e9c07398-7b35-43e1-a67a-1678b23ee63d-kube-api-access-f689b\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-xjgrr\" (UID: \"e9c07398-7b35-43e1-a67a-1678b23ee63d\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-xjgrr" Oct 01 15:31:02 crc kubenswrapper[4869]: I1001 15:31:02.651699 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-xjgrr" Oct 01 15:31:03 crc kubenswrapper[4869]: I1001 15:31:03.044376 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-db-create-tbsmb"] Oct 01 15:31:03 crc kubenswrapper[4869]: I1001 15:31:03.061107 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-db-create-gtw2h"] Oct 01 15:31:03 crc kubenswrapper[4869]: I1001 15:31:03.073154 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-db-create-k5cms"] Oct 01 15:31:03 crc kubenswrapper[4869]: I1001 15:31:03.085353 4869 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-db-create-tbsmb"] Oct 01 15:31:03 crc kubenswrapper[4869]: I1001 15:31:03.094389 4869 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-db-create-k5cms"] Oct 01 15:31:03 crc kubenswrapper[4869]: I1001 15:31:03.104912 4869 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-db-create-gtw2h"] Oct 01 15:31:03 crc kubenswrapper[4869]: I1001 15:31:03.148936 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/configure-os-edpm-deployment-openstack-edpm-ipam-xjgrr"] Oct 01 15:31:03 crc kubenswrapper[4869]: I1001 15:31:03.471141 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-xjgrr" event={"ID":"e9c07398-7b35-43e1-a67a-1678b23ee63d","Type":"ContainerStarted","Data":"3dd5d7cb31997519505c04cf16a936234875c8457afb25ab8da3af083dcfd905"} Oct 01 15:31:03 crc kubenswrapper[4869]: I1001 15:31:03.592026 4869 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="88a0cb67-d211-4d9f-b794-c6d3f2a552b7" path="/var/lib/kubelet/pods/88a0cb67-d211-4d9f-b794-c6d3f2a552b7/volumes" Oct 01 15:31:03 crc kubenswrapper[4869]: I1001 15:31:03.593214 4869 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f10e4cd8-ebba-4aec-a029-0fd2536e170d" path="/var/lib/kubelet/pods/f10e4cd8-ebba-4aec-a029-0fd2536e170d/volumes" Oct 01 15:31:03 crc kubenswrapper[4869]: I1001 15:31:03.594177 4869 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f7f7278d-fa72-44d6-bf31-2a3ca3d3e417" path="/var/lib/kubelet/pods/f7f7278d-fa72-44d6-bf31-2a3ca3d3e417/volumes" Oct 01 15:31:04 crc kubenswrapper[4869]: I1001 15:31:04.486597 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-xjgrr" event={"ID":"e9c07398-7b35-43e1-a67a-1678b23ee63d","Type":"ContainerStarted","Data":"cb1689f9cd035cfe3c04150cfdc86bfa5eae7952cfd102d67886467a32dcd6ae"} Oct 01 15:31:04 crc kubenswrapper[4869]: I1001 15:31:04.507207 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-xjgrr" podStartSLOduration=1.920789365 podStartE2EDuration="2.507193303s" podCreationTimestamp="2025-10-01 15:31:02 +0000 UTC" firstStartedPulling="2025-10-01 15:31:03.154392088 +0000 UTC m=+1572.301235214" lastFinishedPulling="2025-10-01 15:31:03.740796036 +0000 UTC m=+1572.887639152" observedRunningTime="2025-10-01 15:31:04.505514541 +0000 UTC m=+1573.652357677" watchObservedRunningTime="2025-10-01 15:31:04.507193303 +0000 UTC m=+1573.654036419" Oct 01 15:31:10 crc kubenswrapper[4869]: I1001 15:31:10.581165 4869 scope.go:117] "RemoveContainer" containerID="1d80d75dc72da971c79fa8b67243b56fe5690fce0a3f0d0147f32ac22b29db29" Oct 01 15:31:10 crc kubenswrapper[4869]: E1001 15:31:10.581699 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-c86m8_openshift-machine-config-operator(a4b64f7f-0b03-4f47-965b-9fde048b735c)\"" pod="openshift-machine-config-operator/machine-config-daemon-c86m8" podUID="a4b64f7f-0b03-4f47-965b-9fde048b735c" Oct 01 15:31:12 crc kubenswrapper[4869]: I1001 15:31:12.046973 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-20ce-account-create-tgz6c"] Oct 01 15:31:12 crc kubenswrapper[4869]: I1001 15:31:12.057250 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-44c5-account-create-8zw48"] Oct 01 15:31:12 crc kubenswrapper[4869]: I1001 15:31:12.064434 4869 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-20ce-account-create-tgz6c"] Oct 01 15:31:12 crc kubenswrapper[4869]: I1001 15:31:12.072595 4869 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-44c5-account-create-8zw48"] Oct 01 15:31:12 crc kubenswrapper[4869]: I1001 15:31:12.873102 4869 scope.go:117] "RemoveContainer" containerID="7d6df449e6cafe4a57fc8ee53ec67420f8668a6c47309ba282568772b8dfffda" Oct 01 15:31:12 crc kubenswrapper[4869]: I1001 15:31:12.911356 4869 scope.go:117] "RemoveContainer" containerID="346ca77027203d6eadf53d5f1a1d64a58f3c7f0e500aa8c8d4d64c71e76ee52b" Oct 01 15:31:12 crc kubenswrapper[4869]: I1001 15:31:12.970700 4869 scope.go:117] "RemoveContainer" containerID="a0388f97bcf8bcce90875c04ef6844d95dab9ed796f85800020fc72658f7804b" Oct 01 15:31:13 crc kubenswrapper[4869]: I1001 15:31:13.009850 4869 scope.go:117] "RemoveContainer" containerID="5f3d74c9f688f4adb962a4cb2f7313ad3a9a7845f406092acd4f11e626a4d230" Oct 01 15:31:13 crc kubenswrapper[4869]: I1001 15:31:13.034939 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-3a05-account-create-q2kxl"] Oct 01 15:31:13 crc kubenswrapper[4869]: I1001 15:31:13.049548 4869 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-3a05-account-create-q2kxl"] Oct 01 15:31:13 crc kubenswrapper[4869]: I1001 15:31:13.061894 4869 scope.go:117] "RemoveContainer" containerID="d76f347a36de5d899965d01a6e6e6851b17ed2a3beb4214cb1d38c4588d3f693" Oct 01 15:31:13 crc kubenswrapper[4869]: I1001 15:31:13.081728 4869 scope.go:117] "RemoveContainer" containerID="c00c29af0c199f885bd34258cce7079f4ee9948ec7ee596002e7641554320b97" Oct 01 15:31:13 crc kubenswrapper[4869]: I1001 15:31:13.125616 4869 scope.go:117] "RemoveContainer" containerID="ca3dc69a1e196b7aba3d96fd90ce4056225902db6371625177623336a532d0c2" Oct 01 15:31:13 crc kubenswrapper[4869]: I1001 15:31:13.153468 4869 scope.go:117] "RemoveContainer" containerID="9886ef7bb5e4513d1bc4817813e0abdd353b827af210635d1572fa784cb072ad" Oct 01 15:31:13 crc kubenswrapper[4869]: I1001 15:31:13.594826 4869 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0ce74364-60ae-42f0-9151-265fe3a38e1a" path="/var/lib/kubelet/pods/0ce74364-60ae-42f0-9151-265fe3a38e1a/volumes" Oct 01 15:31:13 crc kubenswrapper[4869]: I1001 15:31:13.596302 4869 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="709b2b83-2eba-4778-beda-8e312ef3a6d9" path="/var/lib/kubelet/pods/709b2b83-2eba-4778-beda-8e312ef3a6d9/volumes" Oct 01 15:31:13 crc kubenswrapper[4869]: I1001 15:31:13.597324 4869 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f88907af-39e7-4499-9745-1fe0a8c42774" path="/var/lib/kubelet/pods/f88907af-39e7-4499-9745-1fe0a8c42774/volumes" Oct 01 15:31:21 crc kubenswrapper[4869]: I1001 15:31:21.588710 4869 scope.go:117] "RemoveContainer" containerID="1d80d75dc72da971c79fa8b67243b56fe5690fce0a3f0d0147f32ac22b29db29" Oct 01 15:31:21 crc kubenswrapper[4869]: E1001 15:31:21.589752 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-c86m8_openshift-machine-config-operator(a4b64f7f-0b03-4f47-965b-9fde048b735c)\"" pod="openshift-machine-config-operator/machine-config-daemon-c86m8" podUID="a4b64f7f-0b03-4f47-965b-9fde048b735c" Oct 01 15:31:32 crc kubenswrapper[4869]: I1001 15:31:32.581635 4869 scope.go:117] "RemoveContainer" containerID="1d80d75dc72da971c79fa8b67243b56fe5690fce0a3f0d0147f32ac22b29db29" Oct 01 15:31:32 crc kubenswrapper[4869]: E1001 15:31:32.582792 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-c86m8_openshift-machine-config-operator(a4b64f7f-0b03-4f47-965b-9fde048b735c)\"" pod="openshift-machine-config-operator/machine-config-daemon-c86m8" podUID="a4b64f7f-0b03-4f47-965b-9fde048b735c" Oct 01 15:31:35 crc kubenswrapper[4869]: I1001 15:31:35.029672 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-conductor-db-sync-vs5p9"] Oct 01 15:31:35 crc kubenswrapper[4869]: I1001 15:31:35.040716 4869 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-conductor-db-sync-vs5p9"] Oct 01 15:31:35 crc kubenswrapper[4869]: I1001 15:31:35.596849 4869 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5d10a15f-ad57-459e-90e7-9dbe3d0033a4" path="/var/lib/kubelet/pods/5d10a15f-ad57-459e-90e7-9dbe3d0033a4/volumes" Oct 01 15:31:45 crc kubenswrapper[4869]: I1001 15:31:45.581506 4869 scope.go:117] "RemoveContainer" containerID="1d80d75dc72da971c79fa8b67243b56fe5690fce0a3f0d0147f32ac22b29db29" Oct 01 15:31:45 crc kubenswrapper[4869]: E1001 15:31:45.582730 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-c86m8_openshift-machine-config-operator(a4b64f7f-0b03-4f47-965b-9fde048b735c)\"" pod="openshift-machine-config-operator/machine-config-daemon-c86m8" podUID="a4b64f7f-0b03-4f47-965b-9fde048b735c" Oct 01 15:31:49 crc kubenswrapper[4869]: I1001 15:31:49.933191 4869 generic.go:334] "Generic (PLEG): container finished" podID="e9c07398-7b35-43e1-a67a-1678b23ee63d" containerID="cb1689f9cd035cfe3c04150cfdc86bfa5eae7952cfd102d67886467a32dcd6ae" exitCode=0 Oct 01 15:31:49 crc kubenswrapper[4869]: I1001 15:31:49.933292 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-xjgrr" event={"ID":"e9c07398-7b35-43e1-a67a-1678b23ee63d","Type":"ContainerDied","Data":"cb1689f9cd035cfe3c04150cfdc86bfa5eae7952cfd102d67886467a32dcd6ae"} Oct 01 15:31:51 crc kubenswrapper[4869]: I1001 15:31:51.372799 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-xjgrr" Oct 01 15:31:51 crc kubenswrapper[4869]: I1001 15:31:51.477535 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-f689b\" (UniqueName: \"kubernetes.io/projected/e9c07398-7b35-43e1-a67a-1678b23ee63d-kube-api-access-f689b\") pod \"e9c07398-7b35-43e1-a67a-1678b23ee63d\" (UID: \"e9c07398-7b35-43e1-a67a-1678b23ee63d\") " Oct 01 15:31:51 crc kubenswrapper[4869]: I1001 15:31:51.477626 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/e9c07398-7b35-43e1-a67a-1678b23ee63d-inventory\") pod \"e9c07398-7b35-43e1-a67a-1678b23ee63d\" (UID: \"e9c07398-7b35-43e1-a67a-1678b23ee63d\") " Oct 01 15:31:51 crc kubenswrapper[4869]: I1001 15:31:51.477711 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/e9c07398-7b35-43e1-a67a-1678b23ee63d-ssh-key\") pod \"e9c07398-7b35-43e1-a67a-1678b23ee63d\" (UID: \"e9c07398-7b35-43e1-a67a-1678b23ee63d\") " Oct 01 15:31:51 crc kubenswrapper[4869]: I1001 15:31:51.486539 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e9c07398-7b35-43e1-a67a-1678b23ee63d-kube-api-access-f689b" (OuterVolumeSpecName: "kube-api-access-f689b") pod "e9c07398-7b35-43e1-a67a-1678b23ee63d" (UID: "e9c07398-7b35-43e1-a67a-1678b23ee63d"). InnerVolumeSpecName "kube-api-access-f689b". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 15:31:51 crc kubenswrapper[4869]: I1001 15:31:51.511568 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e9c07398-7b35-43e1-a67a-1678b23ee63d-inventory" (OuterVolumeSpecName: "inventory") pod "e9c07398-7b35-43e1-a67a-1678b23ee63d" (UID: "e9c07398-7b35-43e1-a67a-1678b23ee63d"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 15:31:51 crc kubenswrapper[4869]: I1001 15:31:51.528237 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e9c07398-7b35-43e1-a67a-1678b23ee63d-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "e9c07398-7b35-43e1-a67a-1678b23ee63d" (UID: "e9c07398-7b35-43e1-a67a-1678b23ee63d"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 15:31:51 crc kubenswrapper[4869]: I1001 15:31:51.580626 4869 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/e9c07398-7b35-43e1-a67a-1678b23ee63d-ssh-key\") on node \"crc\" DevicePath \"\"" Oct 01 15:31:51 crc kubenswrapper[4869]: I1001 15:31:51.580680 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-f689b\" (UniqueName: \"kubernetes.io/projected/e9c07398-7b35-43e1-a67a-1678b23ee63d-kube-api-access-f689b\") on node \"crc\" DevicePath \"\"" Oct 01 15:31:51 crc kubenswrapper[4869]: I1001 15:31:51.580702 4869 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/e9c07398-7b35-43e1-a67a-1678b23ee63d-inventory\") on node \"crc\" DevicePath \"\"" Oct 01 15:31:51 crc kubenswrapper[4869]: I1001 15:31:51.954982 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-xjgrr" event={"ID":"e9c07398-7b35-43e1-a67a-1678b23ee63d","Type":"ContainerDied","Data":"3dd5d7cb31997519505c04cf16a936234875c8457afb25ab8da3af083dcfd905"} Oct 01 15:31:51 crc kubenswrapper[4869]: I1001 15:31:51.955530 4869 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="3dd5d7cb31997519505c04cf16a936234875c8457afb25ab8da3af083dcfd905" Oct 01 15:31:51 crc kubenswrapper[4869]: I1001 15:31:51.955086 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-xjgrr" Oct 01 15:31:52 crc kubenswrapper[4869]: I1001 15:31:52.055410 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ssh-known-hosts-edpm-deployment-qc7bt"] Oct 01 15:31:52 crc kubenswrapper[4869]: E1001 15:31:52.056039 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e9c07398-7b35-43e1-a67a-1678b23ee63d" containerName="configure-os-edpm-deployment-openstack-edpm-ipam" Oct 01 15:31:52 crc kubenswrapper[4869]: I1001 15:31:52.056061 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="e9c07398-7b35-43e1-a67a-1678b23ee63d" containerName="configure-os-edpm-deployment-openstack-edpm-ipam" Oct 01 15:31:52 crc kubenswrapper[4869]: I1001 15:31:52.056293 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="e9c07398-7b35-43e1-a67a-1678b23ee63d" containerName="configure-os-edpm-deployment-openstack-edpm-ipam" Oct 01 15:31:52 crc kubenswrapper[4869]: I1001 15:31:52.057168 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ssh-known-hosts-edpm-deployment-qc7bt" Oct 01 15:31:52 crc kubenswrapper[4869]: I1001 15:31:52.060780 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Oct 01 15:31:52 crc kubenswrapper[4869]: I1001 15:31:52.062123 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Oct 01 15:31:52 crc kubenswrapper[4869]: I1001 15:31:52.062315 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-cjg8g" Oct 01 15:31:52 crc kubenswrapper[4869]: I1001 15:31:52.070158 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Oct 01 15:31:52 crc kubenswrapper[4869]: I1001 15:31:52.079791 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ssh-known-hosts-edpm-deployment-qc7bt"] Oct 01 15:31:52 crc kubenswrapper[4869]: I1001 15:31:52.092406 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory-0\" (UniqueName: \"kubernetes.io/secret/448c8396-8f33-435c-811e-ec4a295c9759-inventory-0\") pod \"ssh-known-hosts-edpm-deployment-qc7bt\" (UID: \"448c8396-8f33-435c-811e-ec4a295c9759\") " pod="openstack/ssh-known-hosts-edpm-deployment-qc7bt" Oct 01 15:31:52 crc kubenswrapper[4869]: I1001 15:31:52.092452 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xcn4b\" (UniqueName: \"kubernetes.io/projected/448c8396-8f33-435c-811e-ec4a295c9759-kube-api-access-xcn4b\") pod \"ssh-known-hosts-edpm-deployment-qc7bt\" (UID: \"448c8396-8f33-435c-811e-ec4a295c9759\") " pod="openstack/ssh-known-hosts-edpm-deployment-qc7bt" Oct 01 15:31:52 crc kubenswrapper[4869]: I1001 15:31:52.092475 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/448c8396-8f33-435c-811e-ec4a295c9759-ssh-key-openstack-edpm-ipam\") pod \"ssh-known-hosts-edpm-deployment-qc7bt\" (UID: \"448c8396-8f33-435c-811e-ec4a295c9759\") " pod="openstack/ssh-known-hosts-edpm-deployment-qc7bt" Oct 01 15:31:52 crc kubenswrapper[4869]: I1001 15:31:52.193980 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory-0\" (UniqueName: \"kubernetes.io/secret/448c8396-8f33-435c-811e-ec4a295c9759-inventory-0\") pod \"ssh-known-hosts-edpm-deployment-qc7bt\" (UID: \"448c8396-8f33-435c-811e-ec4a295c9759\") " pod="openstack/ssh-known-hosts-edpm-deployment-qc7bt" Oct 01 15:31:52 crc kubenswrapper[4869]: I1001 15:31:52.194037 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xcn4b\" (UniqueName: \"kubernetes.io/projected/448c8396-8f33-435c-811e-ec4a295c9759-kube-api-access-xcn4b\") pod \"ssh-known-hosts-edpm-deployment-qc7bt\" (UID: \"448c8396-8f33-435c-811e-ec4a295c9759\") " pod="openstack/ssh-known-hosts-edpm-deployment-qc7bt" Oct 01 15:31:52 crc kubenswrapper[4869]: I1001 15:31:52.194058 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/448c8396-8f33-435c-811e-ec4a295c9759-ssh-key-openstack-edpm-ipam\") pod \"ssh-known-hosts-edpm-deployment-qc7bt\" (UID: \"448c8396-8f33-435c-811e-ec4a295c9759\") " pod="openstack/ssh-known-hosts-edpm-deployment-qc7bt" Oct 01 15:31:52 crc kubenswrapper[4869]: I1001 15:31:52.200972 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory-0\" (UniqueName: \"kubernetes.io/secret/448c8396-8f33-435c-811e-ec4a295c9759-inventory-0\") pod \"ssh-known-hosts-edpm-deployment-qc7bt\" (UID: \"448c8396-8f33-435c-811e-ec4a295c9759\") " pod="openstack/ssh-known-hosts-edpm-deployment-qc7bt" Oct 01 15:31:52 crc kubenswrapper[4869]: I1001 15:31:52.210056 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/448c8396-8f33-435c-811e-ec4a295c9759-ssh-key-openstack-edpm-ipam\") pod \"ssh-known-hosts-edpm-deployment-qc7bt\" (UID: \"448c8396-8f33-435c-811e-ec4a295c9759\") " pod="openstack/ssh-known-hosts-edpm-deployment-qc7bt" Oct 01 15:31:52 crc kubenswrapper[4869]: I1001 15:31:52.214348 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xcn4b\" (UniqueName: \"kubernetes.io/projected/448c8396-8f33-435c-811e-ec4a295c9759-kube-api-access-xcn4b\") pod \"ssh-known-hosts-edpm-deployment-qc7bt\" (UID: \"448c8396-8f33-435c-811e-ec4a295c9759\") " pod="openstack/ssh-known-hosts-edpm-deployment-qc7bt" Oct 01 15:31:52 crc kubenswrapper[4869]: I1001 15:31:52.383340 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ssh-known-hosts-edpm-deployment-qc7bt" Oct 01 15:31:52 crc kubenswrapper[4869]: I1001 15:31:52.941586 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ssh-known-hosts-edpm-deployment-qc7bt"] Oct 01 15:31:52 crc kubenswrapper[4869]: W1001 15:31:52.950030 4869 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod448c8396_8f33_435c_811e_ec4a295c9759.slice/crio-0411472a879931dbb932c8b51964c43194bc7578b537bbcf8c79269b38b43e72 WatchSource:0}: Error finding container 0411472a879931dbb932c8b51964c43194bc7578b537bbcf8c79269b38b43e72: Status 404 returned error can't find the container with id 0411472a879931dbb932c8b51964c43194bc7578b537bbcf8c79269b38b43e72 Oct 01 15:31:52 crc kubenswrapper[4869]: I1001 15:31:52.967809 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ssh-known-hosts-edpm-deployment-qc7bt" event={"ID":"448c8396-8f33-435c-811e-ec4a295c9759","Type":"ContainerStarted","Data":"0411472a879931dbb932c8b51964c43194bc7578b537bbcf8c79269b38b43e72"} Oct 01 15:31:53 crc kubenswrapper[4869]: E1001 15:31:53.933615 4869 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pode9c07398_7b35_43e1_a67a_1678b23ee63d.slice\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pode9c07398_7b35_43e1_a67a_1678b23ee63d.slice/crio-3dd5d7cb31997519505c04cf16a936234875c8457afb25ab8da3af083dcfd905\": RecentStats: unable to find data in memory cache]" Oct 01 15:31:53 crc kubenswrapper[4869]: I1001 15:31:53.977420 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ssh-known-hosts-edpm-deployment-qc7bt" event={"ID":"448c8396-8f33-435c-811e-ec4a295c9759","Type":"ContainerStarted","Data":"d76594c69089b736be60e31e1f3b79b8a758908c6a92b664ee6ebe187d739276"} Oct 01 15:31:53 crc kubenswrapper[4869]: I1001 15:31:53.995604 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ssh-known-hosts-edpm-deployment-qc7bt" podStartSLOduration=1.370328638 podStartE2EDuration="1.995584155s" podCreationTimestamp="2025-10-01 15:31:52 +0000 UTC" firstStartedPulling="2025-10-01 15:31:52.959983432 +0000 UTC m=+1622.106826538" lastFinishedPulling="2025-10-01 15:31:53.585238939 +0000 UTC m=+1622.732082055" observedRunningTime="2025-10-01 15:31:53.995533444 +0000 UTC m=+1623.142376560" watchObservedRunningTime="2025-10-01 15:31:53.995584155 +0000 UTC m=+1623.142427271" Oct 01 15:31:57 crc kubenswrapper[4869]: I1001 15:31:57.051092 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-cell-mapping-g8tmw"] Oct 01 15:31:57 crc kubenswrapper[4869]: I1001 15:31:57.068035 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-conductor-db-sync-fmgks"] Oct 01 15:31:57 crc kubenswrapper[4869]: I1001 15:31:57.079230 4869 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-cell-mapping-g8tmw"] Oct 01 15:31:57 crc kubenswrapper[4869]: I1001 15:31:57.090565 4869 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-conductor-db-sync-fmgks"] Oct 01 15:31:57 crc kubenswrapper[4869]: I1001 15:31:57.591130 4869 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5d724a70-eb91-4679-9f86-c3d1e874bdc6" path="/var/lib/kubelet/pods/5d724a70-eb91-4679-9f86-c3d1e874bdc6/volumes" Oct 01 15:31:57 crc kubenswrapper[4869]: I1001 15:31:57.591888 4869 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d7bd9313-7a67-41cd-9ab7-efd58f5ab44f" path="/var/lib/kubelet/pods/d7bd9313-7a67-41cd-9ab7-efd58f5ab44f/volumes" Oct 01 15:32:00 crc kubenswrapper[4869]: I1001 15:32:00.581558 4869 scope.go:117] "RemoveContainer" containerID="1d80d75dc72da971c79fa8b67243b56fe5690fce0a3f0d0147f32ac22b29db29" Oct 01 15:32:00 crc kubenswrapper[4869]: E1001 15:32:00.582132 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-c86m8_openshift-machine-config-operator(a4b64f7f-0b03-4f47-965b-9fde048b735c)\"" pod="openshift-machine-config-operator/machine-config-daemon-c86m8" podUID="a4b64f7f-0b03-4f47-965b-9fde048b735c" Oct 01 15:32:01 crc kubenswrapper[4869]: I1001 15:32:01.077198 4869 generic.go:334] "Generic (PLEG): container finished" podID="448c8396-8f33-435c-811e-ec4a295c9759" containerID="d76594c69089b736be60e31e1f3b79b8a758908c6a92b664ee6ebe187d739276" exitCode=0 Oct 01 15:32:01 crc kubenswrapper[4869]: I1001 15:32:01.077249 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ssh-known-hosts-edpm-deployment-qc7bt" event={"ID":"448c8396-8f33-435c-811e-ec4a295c9759","Type":"ContainerDied","Data":"d76594c69089b736be60e31e1f3b79b8a758908c6a92b664ee6ebe187d739276"} Oct 01 15:32:02 crc kubenswrapper[4869]: I1001 15:32:02.517107 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ssh-known-hosts-edpm-deployment-qc7bt" Oct 01 15:32:02 crc kubenswrapper[4869]: I1001 15:32:02.712945 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xcn4b\" (UniqueName: \"kubernetes.io/projected/448c8396-8f33-435c-811e-ec4a295c9759-kube-api-access-xcn4b\") pod \"448c8396-8f33-435c-811e-ec4a295c9759\" (UID: \"448c8396-8f33-435c-811e-ec4a295c9759\") " Oct 01 15:32:02 crc kubenswrapper[4869]: I1001 15:32:02.713860 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory-0\" (UniqueName: \"kubernetes.io/secret/448c8396-8f33-435c-811e-ec4a295c9759-inventory-0\") pod \"448c8396-8f33-435c-811e-ec4a295c9759\" (UID: \"448c8396-8f33-435c-811e-ec4a295c9759\") " Oct 01 15:32:02 crc kubenswrapper[4869]: I1001 15:32:02.714060 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/448c8396-8f33-435c-811e-ec4a295c9759-ssh-key-openstack-edpm-ipam\") pod \"448c8396-8f33-435c-811e-ec4a295c9759\" (UID: \"448c8396-8f33-435c-811e-ec4a295c9759\") " Oct 01 15:32:02 crc kubenswrapper[4869]: I1001 15:32:02.724529 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/448c8396-8f33-435c-811e-ec4a295c9759-kube-api-access-xcn4b" (OuterVolumeSpecName: "kube-api-access-xcn4b") pod "448c8396-8f33-435c-811e-ec4a295c9759" (UID: "448c8396-8f33-435c-811e-ec4a295c9759"). InnerVolumeSpecName "kube-api-access-xcn4b". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 15:32:02 crc kubenswrapper[4869]: I1001 15:32:02.740812 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/448c8396-8f33-435c-811e-ec4a295c9759-inventory-0" (OuterVolumeSpecName: "inventory-0") pod "448c8396-8f33-435c-811e-ec4a295c9759" (UID: "448c8396-8f33-435c-811e-ec4a295c9759"). InnerVolumeSpecName "inventory-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 15:32:02 crc kubenswrapper[4869]: I1001 15:32:02.758497 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/448c8396-8f33-435c-811e-ec4a295c9759-ssh-key-openstack-edpm-ipam" (OuterVolumeSpecName: "ssh-key-openstack-edpm-ipam") pod "448c8396-8f33-435c-811e-ec4a295c9759" (UID: "448c8396-8f33-435c-811e-ec4a295c9759"). InnerVolumeSpecName "ssh-key-openstack-edpm-ipam". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 15:32:02 crc kubenswrapper[4869]: I1001 15:32:02.815976 4869 reconciler_common.go:293] "Volume detached for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/448c8396-8f33-435c-811e-ec4a295c9759-ssh-key-openstack-edpm-ipam\") on node \"crc\" DevicePath \"\"" Oct 01 15:32:02 crc kubenswrapper[4869]: I1001 15:32:02.816011 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xcn4b\" (UniqueName: \"kubernetes.io/projected/448c8396-8f33-435c-811e-ec4a295c9759-kube-api-access-xcn4b\") on node \"crc\" DevicePath \"\"" Oct 01 15:32:02 crc kubenswrapper[4869]: I1001 15:32:02.816024 4869 reconciler_common.go:293] "Volume detached for volume \"inventory-0\" (UniqueName: \"kubernetes.io/secret/448c8396-8f33-435c-811e-ec4a295c9759-inventory-0\") on node \"crc\" DevicePath \"\"" Oct 01 15:32:03 crc kubenswrapper[4869]: I1001 15:32:03.107123 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ssh-known-hosts-edpm-deployment-qc7bt" event={"ID":"448c8396-8f33-435c-811e-ec4a295c9759","Type":"ContainerDied","Data":"0411472a879931dbb932c8b51964c43194bc7578b537bbcf8c79269b38b43e72"} Oct 01 15:32:03 crc kubenswrapper[4869]: I1001 15:32:03.107178 4869 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="0411472a879931dbb932c8b51964c43194bc7578b537bbcf8c79269b38b43e72" Oct 01 15:32:03 crc kubenswrapper[4869]: I1001 15:32:03.107249 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ssh-known-hosts-edpm-deployment-qc7bt" Oct 01 15:32:03 crc kubenswrapper[4869]: I1001 15:32:03.168024 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/run-os-edpm-deployment-openstack-edpm-ipam-cdpmf"] Oct 01 15:32:03 crc kubenswrapper[4869]: E1001 15:32:03.168484 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="448c8396-8f33-435c-811e-ec4a295c9759" containerName="ssh-known-hosts-edpm-deployment" Oct 01 15:32:03 crc kubenswrapper[4869]: I1001 15:32:03.168499 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="448c8396-8f33-435c-811e-ec4a295c9759" containerName="ssh-known-hosts-edpm-deployment" Oct 01 15:32:03 crc kubenswrapper[4869]: I1001 15:32:03.168704 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="448c8396-8f33-435c-811e-ec4a295c9759" containerName="ssh-known-hosts-edpm-deployment" Oct 01 15:32:03 crc kubenswrapper[4869]: I1001 15:32:03.169445 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-cdpmf" Oct 01 15:32:03 crc kubenswrapper[4869]: I1001 15:32:03.171775 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Oct 01 15:32:03 crc kubenswrapper[4869]: I1001 15:32:03.171787 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Oct 01 15:32:03 crc kubenswrapper[4869]: I1001 15:32:03.171792 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-cjg8g" Oct 01 15:32:03 crc kubenswrapper[4869]: I1001 15:32:03.172033 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Oct 01 15:32:03 crc kubenswrapper[4869]: I1001 15:32:03.192402 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/run-os-edpm-deployment-openstack-edpm-ipam-cdpmf"] Oct 01 15:32:03 crc kubenswrapper[4869]: I1001 15:32:03.323588 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/641c2646-4ed1-4e58-ad8f-4fd0a9ae3d84-ssh-key\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-cdpmf\" (UID: \"641c2646-4ed1-4e58-ad8f-4fd0a9ae3d84\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-cdpmf" Oct 01 15:32:03 crc kubenswrapper[4869]: I1001 15:32:03.324015 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/641c2646-4ed1-4e58-ad8f-4fd0a9ae3d84-inventory\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-cdpmf\" (UID: \"641c2646-4ed1-4e58-ad8f-4fd0a9ae3d84\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-cdpmf" Oct 01 15:32:03 crc kubenswrapper[4869]: I1001 15:32:03.324088 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gtkpr\" (UniqueName: \"kubernetes.io/projected/641c2646-4ed1-4e58-ad8f-4fd0a9ae3d84-kube-api-access-gtkpr\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-cdpmf\" (UID: \"641c2646-4ed1-4e58-ad8f-4fd0a9ae3d84\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-cdpmf" Oct 01 15:32:03 crc kubenswrapper[4869]: I1001 15:32:03.425478 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gtkpr\" (UniqueName: \"kubernetes.io/projected/641c2646-4ed1-4e58-ad8f-4fd0a9ae3d84-kube-api-access-gtkpr\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-cdpmf\" (UID: \"641c2646-4ed1-4e58-ad8f-4fd0a9ae3d84\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-cdpmf" Oct 01 15:32:03 crc kubenswrapper[4869]: I1001 15:32:03.425635 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/641c2646-4ed1-4e58-ad8f-4fd0a9ae3d84-ssh-key\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-cdpmf\" (UID: \"641c2646-4ed1-4e58-ad8f-4fd0a9ae3d84\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-cdpmf" Oct 01 15:32:03 crc kubenswrapper[4869]: I1001 15:32:03.425807 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/641c2646-4ed1-4e58-ad8f-4fd0a9ae3d84-inventory\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-cdpmf\" (UID: \"641c2646-4ed1-4e58-ad8f-4fd0a9ae3d84\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-cdpmf" Oct 01 15:32:03 crc kubenswrapper[4869]: I1001 15:32:03.433212 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/641c2646-4ed1-4e58-ad8f-4fd0a9ae3d84-ssh-key\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-cdpmf\" (UID: \"641c2646-4ed1-4e58-ad8f-4fd0a9ae3d84\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-cdpmf" Oct 01 15:32:03 crc kubenswrapper[4869]: I1001 15:32:03.436592 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/641c2646-4ed1-4e58-ad8f-4fd0a9ae3d84-inventory\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-cdpmf\" (UID: \"641c2646-4ed1-4e58-ad8f-4fd0a9ae3d84\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-cdpmf" Oct 01 15:32:03 crc kubenswrapper[4869]: I1001 15:32:03.442082 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gtkpr\" (UniqueName: \"kubernetes.io/projected/641c2646-4ed1-4e58-ad8f-4fd0a9ae3d84-kube-api-access-gtkpr\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-cdpmf\" (UID: \"641c2646-4ed1-4e58-ad8f-4fd0a9ae3d84\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-cdpmf" Oct 01 15:32:03 crc kubenswrapper[4869]: I1001 15:32:03.490181 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-cdpmf" Oct 01 15:32:04 crc kubenswrapper[4869]: I1001 15:32:04.067022 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/run-os-edpm-deployment-openstack-edpm-ipam-cdpmf"] Oct 01 15:32:04 crc kubenswrapper[4869]: W1001 15:32:04.070292 4869 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod641c2646_4ed1_4e58_ad8f_4fd0a9ae3d84.slice/crio-eca1bbe8e56cbbef1ca18fc059aa153d8b10b7ee5c09a48fb968b4f99b4ed3ac WatchSource:0}: Error finding container eca1bbe8e56cbbef1ca18fc059aa153d8b10b7ee5c09a48fb968b4f99b4ed3ac: Status 404 returned error can't find the container with id eca1bbe8e56cbbef1ca18fc059aa153d8b10b7ee5c09a48fb968b4f99b4ed3ac Oct 01 15:32:04 crc kubenswrapper[4869]: I1001 15:32:04.115605 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-cdpmf" event={"ID":"641c2646-4ed1-4e58-ad8f-4fd0a9ae3d84","Type":"ContainerStarted","Data":"eca1bbe8e56cbbef1ca18fc059aa153d8b10b7ee5c09a48fb968b4f99b4ed3ac"} Oct 01 15:32:04 crc kubenswrapper[4869]: E1001 15:32:04.245372 4869 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pode9c07398_7b35_43e1_a67a_1678b23ee63d.slice/crio-3dd5d7cb31997519505c04cf16a936234875c8457afb25ab8da3af083dcfd905\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pode9c07398_7b35_43e1_a67a_1678b23ee63d.slice\": RecentStats: unable to find data in memory cache]" Oct 01 15:32:05 crc kubenswrapper[4869]: I1001 15:32:05.127870 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-cdpmf" event={"ID":"641c2646-4ed1-4e58-ad8f-4fd0a9ae3d84","Type":"ContainerStarted","Data":"8561e99de471e1faf68cc4aed08fee80f54220362b813c439a91c5b7ae5e8348"} Oct 01 15:32:05 crc kubenswrapper[4869]: I1001 15:32:05.149734 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-cdpmf" podStartSLOduration=1.7082478380000001 podStartE2EDuration="2.14971769s" podCreationTimestamp="2025-10-01 15:32:03 +0000 UTC" firstStartedPulling="2025-10-01 15:32:04.073243499 +0000 UTC m=+1633.220086635" lastFinishedPulling="2025-10-01 15:32:04.514713371 +0000 UTC m=+1633.661556487" observedRunningTime="2025-10-01 15:32:05.146577492 +0000 UTC m=+1634.293420608" watchObservedRunningTime="2025-10-01 15:32:05.14971769 +0000 UTC m=+1634.296560806" Oct 01 15:32:12 crc kubenswrapper[4869]: I1001 15:32:12.581189 4869 scope.go:117] "RemoveContainer" containerID="1d80d75dc72da971c79fa8b67243b56fe5690fce0a3f0d0147f32ac22b29db29" Oct 01 15:32:12 crc kubenswrapper[4869]: E1001 15:32:12.581966 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-c86m8_openshift-machine-config-operator(a4b64f7f-0b03-4f47-965b-9fde048b735c)\"" pod="openshift-machine-config-operator/machine-config-daemon-c86m8" podUID="a4b64f7f-0b03-4f47-965b-9fde048b735c" Oct 01 15:32:13 crc kubenswrapper[4869]: I1001 15:32:13.215043 4869 generic.go:334] "Generic (PLEG): container finished" podID="641c2646-4ed1-4e58-ad8f-4fd0a9ae3d84" containerID="8561e99de471e1faf68cc4aed08fee80f54220362b813c439a91c5b7ae5e8348" exitCode=0 Oct 01 15:32:13 crc kubenswrapper[4869]: I1001 15:32:13.215224 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-cdpmf" event={"ID":"641c2646-4ed1-4e58-ad8f-4fd0a9ae3d84","Type":"ContainerDied","Data":"8561e99de471e1faf68cc4aed08fee80f54220362b813c439a91c5b7ae5e8348"} Oct 01 15:32:13 crc kubenswrapper[4869]: I1001 15:32:13.341301 4869 scope.go:117] "RemoveContainer" containerID="cdee20a8079220f9ce6fb5e85101ee74f6da54f93cbcf0a2371e0ece2638efb8" Oct 01 15:32:13 crc kubenswrapper[4869]: I1001 15:32:13.398158 4869 scope.go:117] "RemoveContainer" containerID="77089dd930d91b9a4efd8ec345e1ee177333454e634521e014a73eb7358a3614" Oct 01 15:32:13 crc kubenswrapper[4869]: I1001 15:32:13.439710 4869 scope.go:117] "RemoveContainer" containerID="7716692b47a29a7f4208d74481f25906b16eabd51acdb9c242e3824d8349e096" Oct 01 15:32:13 crc kubenswrapper[4869]: I1001 15:32:13.466765 4869 scope.go:117] "RemoveContainer" containerID="8d120ffac4c62007201ef1382a40498f8c7ad1994d4c95c61f10b5565efd2225" Oct 01 15:32:13 crc kubenswrapper[4869]: I1001 15:32:13.530965 4869 scope.go:117] "RemoveContainer" containerID="e033c41ddad75eec30a51c662b6b6647edceda69c44ec10eac99c04612c42039" Oct 01 15:32:13 crc kubenswrapper[4869]: I1001 15:32:13.603644 4869 scope.go:117] "RemoveContainer" containerID="362b0cf8b20f92cb86aa76db20dd0cdb552a66f501bff1efb744f875b3060801" Oct 01 15:32:14 crc kubenswrapper[4869]: E1001 15:32:14.492011 4869 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pode9c07398_7b35_43e1_a67a_1678b23ee63d.slice\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pode9c07398_7b35_43e1_a67a_1678b23ee63d.slice/crio-3dd5d7cb31997519505c04cf16a936234875c8457afb25ab8da3af083dcfd905\": RecentStats: unable to find data in memory cache]" Oct 01 15:32:14 crc kubenswrapper[4869]: I1001 15:32:14.676198 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-cdpmf" Oct 01 15:32:14 crc kubenswrapper[4869]: I1001 15:32:14.769019 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/641c2646-4ed1-4e58-ad8f-4fd0a9ae3d84-ssh-key\") pod \"641c2646-4ed1-4e58-ad8f-4fd0a9ae3d84\" (UID: \"641c2646-4ed1-4e58-ad8f-4fd0a9ae3d84\") " Oct 01 15:32:14 crc kubenswrapper[4869]: I1001 15:32:14.769102 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/641c2646-4ed1-4e58-ad8f-4fd0a9ae3d84-inventory\") pod \"641c2646-4ed1-4e58-ad8f-4fd0a9ae3d84\" (UID: \"641c2646-4ed1-4e58-ad8f-4fd0a9ae3d84\") " Oct 01 15:32:14 crc kubenswrapper[4869]: I1001 15:32:14.769282 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gtkpr\" (UniqueName: \"kubernetes.io/projected/641c2646-4ed1-4e58-ad8f-4fd0a9ae3d84-kube-api-access-gtkpr\") pod \"641c2646-4ed1-4e58-ad8f-4fd0a9ae3d84\" (UID: \"641c2646-4ed1-4e58-ad8f-4fd0a9ae3d84\") " Oct 01 15:32:14 crc kubenswrapper[4869]: I1001 15:32:14.774661 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/641c2646-4ed1-4e58-ad8f-4fd0a9ae3d84-kube-api-access-gtkpr" (OuterVolumeSpecName: "kube-api-access-gtkpr") pod "641c2646-4ed1-4e58-ad8f-4fd0a9ae3d84" (UID: "641c2646-4ed1-4e58-ad8f-4fd0a9ae3d84"). InnerVolumeSpecName "kube-api-access-gtkpr". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 15:32:14 crc kubenswrapper[4869]: I1001 15:32:14.792624 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/641c2646-4ed1-4e58-ad8f-4fd0a9ae3d84-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "641c2646-4ed1-4e58-ad8f-4fd0a9ae3d84" (UID: "641c2646-4ed1-4e58-ad8f-4fd0a9ae3d84"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 15:32:14 crc kubenswrapper[4869]: I1001 15:32:14.810331 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/641c2646-4ed1-4e58-ad8f-4fd0a9ae3d84-inventory" (OuterVolumeSpecName: "inventory") pod "641c2646-4ed1-4e58-ad8f-4fd0a9ae3d84" (UID: "641c2646-4ed1-4e58-ad8f-4fd0a9ae3d84"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 15:32:14 crc kubenswrapper[4869]: I1001 15:32:14.872012 4869 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/641c2646-4ed1-4e58-ad8f-4fd0a9ae3d84-ssh-key\") on node \"crc\" DevicePath \"\"" Oct 01 15:32:14 crc kubenswrapper[4869]: I1001 15:32:14.872053 4869 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/641c2646-4ed1-4e58-ad8f-4fd0a9ae3d84-inventory\") on node \"crc\" DevicePath \"\"" Oct 01 15:32:14 crc kubenswrapper[4869]: I1001 15:32:14.872069 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gtkpr\" (UniqueName: \"kubernetes.io/projected/641c2646-4ed1-4e58-ad8f-4fd0a9ae3d84-kube-api-access-gtkpr\") on node \"crc\" DevicePath \"\"" Oct 01 15:32:15 crc kubenswrapper[4869]: I1001 15:32:15.242162 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-cdpmf" event={"ID":"641c2646-4ed1-4e58-ad8f-4fd0a9ae3d84","Type":"ContainerDied","Data":"eca1bbe8e56cbbef1ca18fc059aa153d8b10b7ee5c09a48fb968b4f99b4ed3ac"} Oct 01 15:32:15 crc kubenswrapper[4869]: I1001 15:32:15.242247 4869 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="eca1bbe8e56cbbef1ca18fc059aa153d8b10b7ee5c09a48fb968b4f99b4ed3ac" Oct 01 15:32:15 crc kubenswrapper[4869]: I1001 15:32:15.242757 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-cdpmf" Oct 01 15:32:15 crc kubenswrapper[4869]: I1001 15:32:15.352638 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-tlpkx"] Oct 01 15:32:15 crc kubenswrapper[4869]: E1001 15:32:15.353239 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="641c2646-4ed1-4e58-ad8f-4fd0a9ae3d84" containerName="run-os-edpm-deployment-openstack-edpm-ipam" Oct 01 15:32:15 crc kubenswrapper[4869]: I1001 15:32:15.353287 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="641c2646-4ed1-4e58-ad8f-4fd0a9ae3d84" containerName="run-os-edpm-deployment-openstack-edpm-ipam" Oct 01 15:32:15 crc kubenswrapper[4869]: I1001 15:32:15.353599 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="641c2646-4ed1-4e58-ad8f-4fd0a9ae3d84" containerName="run-os-edpm-deployment-openstack-edpm-ipam" Oct 01 15:32:15 crc kubenswrapper[4869]: I1001 15:32:15.354596 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-tlpkx" Oct 01 15:32:15 crc kubenswrapper[4869]: I1001 15:32:15.359216 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Oct 01 15:32:15 crc kubenswrapper[4869]: I1001 15:32:15.359759 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-cjg8g" Oct 01 15:32:15 crc kubenswrapper[4869]: I1001 15:32:15.360102 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Oct 01 15:32:15 crc kubenswrapper[4869]: I1001 15:32:15.363875 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Oct 01 15:32:15 crc kubenswrapper[4869]: I1001 15:32:15.367851 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-tlpkx"] Oct 01 15:32:15 crc kubenswrapper[4869]: I1001 15:32:15.485470 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7c9zb\" (UniqueName: \"kubernetes.io/projected/e93f273e-6567-45a8-b40d-e9148bceb7b5-kube-api-access-7c9zb\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-tlpkx\" (UID: \"e93f273e-6567-45a8-b40d-e9148bceb7b5\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-tlpkx" Oct 01 15:32:15 crc kubenswrapper[4869]: I1001 15:32:15.485572 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/e93f273e-6567-45a8-b40d-e9148bceb7b5-ssh-key\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-tlpkx\" (UID: \"e93f273e-6567-45a8-b40d-e9148bceb7b5\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-tlpkx" Oct 01 15:32:15 crc kubenswrapper[4869]: I1001 15:32:15.485633 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/e93f273e-6567-45a8-b40d-e9148bceb7b5-inventory\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-tlpkx\" (UID: \"e93f273e-6567-45a8-b40d-e9148bceb7b5\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-tlpkx" Oct 01 15:32:15 crc kubenswrapper[4869]: I1001 15:32:15.586815 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7c9zb\" (UniqueName: \"kubernetes.io/projected/e93f273e-6567-45a8-b40d-e9148bceb7b5-kube-api-access-7c9zb\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-tlpkx\" (UID: \"e93f273e-6567-45a8-b40d-e9148bceb7b5\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-tlpkx" Oct 01 15:32:15 crc kubenswrapper[4869]: I1001 15:32:15.586915 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/e93f273e-6567-45a8-b40d-e9148bceb7b5-ssh-key\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-tlpkx\" (UID: \"e93f273e-6567-45a8-b40d-e9148bceb7b5\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-tlpkx" Oct 01 15:32:15 crc kubenswrapper[4869]: I1001 15:32:15.586992 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/e93f273e-6567-45a8-b40d-e9148bceb7b5-inventory\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-tlpkx\" (UID: \"e93f273e-6567-45a8-b40d-e9148bceb7b5\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-tlpkx" Oct 01 15:32:15 crc kubenswrapper[4869]: I1001 15:32:15.592528 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/e93f273e-6567-45a8-b40d-e9148bceb7b5-inventory\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-tlpkx\" (UID: \"e93f273e-6567-45a8-b40d-e9148bceb7b5\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-tlpkx" Oct 01 15:32:15 crc kubenswrapper[4869]: I1001 15:32:15.593758 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/e93f273e-6567-45a8-b40d-e9148bceb7b5-ssh-key\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-tlpkx\" (UID: \"e93f273e-6567-45a8-b40d-e9148bceb7b5\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-tlpkx" Oct 01 15:32:15 crc kubenswrapper[4869]: I1001 15:32:15.620175 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7c9zb\" (UniqueName: \"kubernetes.io/projected/e93f273e-6567-45a8-b40d-e9148bceb7b5-kube-api-access-7c9zb\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-tlpkx\" (UID: \"e93f273e-6567-45a8-b40d-e9148bceb7b5\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-tlpkx" Oct 01 15:32:15 crc kubenswrapper[4869]: I1001 15:32:15.720753 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-tlpkx" Oct 01 15:32:16 crc kubenswrapper[4869]: I1001 15:32:16.298313 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-tlpkx"] Oct 01 15:32:17 crc kubenswrapper[4869]: I1001 15:32:17.260039 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-tlpkx" event={"ID":"e93f273e-6567-45a8-b40d-e9148bceb7b5","Type":"ContainerStarted","Data":"fba129297637ffe02d64b293921551dad383d78ff9d2bf2f736f00abf748bcf4"} Oct 01 15:32:17 crc kubenswrapper[4869]: I1001 15:32:17.260479 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-tlpkx" event={"ID":"e93f273e-6567-45a8-b40d-e9148bceb7b5","Type":"ContainerStarted","Data":"986e41800643f2f3396fcfb89b5f055b0d61f823ccb0cd4955924a030130530d"} Oct 01 15:32:17 crc kubenswrapper[4869]: I1001 15:32:17.288393 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-tlpkx" podStartSLOduration=1.8413029220000001 podStartE2EDuration="2.288363234s" podCreationTimestamp="2025-10-01 15:32:15 +0000 UTC" firstStartedPulling="2025-10-01 15:32:16.293664721 +0000 UTC m=+1645.440507837" lastFinishedPulling="2025-10-01 15:32:16.740724993 +0000 UTC m=+1645.887568149" observedRunningTime="2025-10-01 15:32:17.277205135 +0000 UTC m=+1646.424048281" watchObservedRunningTime="2025-10-01 15:32:17.288363234 +0000 UTC m=+1646.435206390" Oct 01 15:32:24 crc kubenswrapper[4869]: I1001 15:32:24.580986 4869 scope.go:117] "RemoveContainer" containerID="1d80d75dc72da971c79fa8b67243b56fe5690fce0a3f0d0147f32ac22b29db29" Oct 01 15:32:24 crc kubenswrapper[4869]: E1001 15:32:24.582040 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-c86m8_openshift-machine-config-operator(a4b64f7f-0b03-4f47-965b-9fde048b735c)\"" pod="openshift-machine-config-operator/machine-config-daemon-c86m8" podUID="a4b64f7f-0b03-4f47-965b-9fde048b735c" Oct 01 15:32:24 crc kubenswrapper[4869]: E1001 15:32:24.760391 4869 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pode9c07398_7b35_43e1_a67a_1678b23ee63d.slice/crio-3dd5d7cb31997519505c04cf16a936234875c8457afb25ab8da3af083dcfd905\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pode9c07398_7b35_43e1_a67a_1678b23ee63d.slice\": RecentStats: unable to find data in memory cache]" Oct 01 15:32:27 crc kubenswrapper[4869]: I1001 15:32:27.356938 4869 generic.go:334] "Generic (PLEG): container finished" podID="e93f273e-6567-45a8-b40d-e9148bceb7b5" containerID="fba129297637ffe02d64b293921551dad383d78ff9d2bf2f736f00abf748bcf4" exitCode=0 Oct 01 15:32:27 crc kubenswrapper[4869]: I1001 15:32:27.357064 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-tlpkx" event={"ID":"e93f273e-6567-45a8-b40d-e9148bceb7b5","Type":"ContainerDied","Data":"fba129297637ffe02d64b293921551dad383d78ff9d2bf2f736f00abf748bcf4"} Oct 01 15:32:28 crc kubenswrapper[4869]: I1001 15:32:28.856856 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-tlpkx" Oct 01 15:32:28 crc kubenswrapper[4869]: I1001 15:32:28.963079 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/e93f273e-6567-45a8-b40d-e9148bceb7b5-inventory\") pod \"e93f273e-6567-45a8-b40d-e9148bceb7b5\" (UID: \"e93f273e-6567-45a8-b40d-e9148bceb7b5\") " Oct 01 15:32:28 crc kubenswrapper[4869]: I1001 15:32:28.963253 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7c9zb\" (UniqueName: \"kubernetes.io/projected/e93f273e-6567-45a8-b40d-e9148bceb7b5-kube-api-access-7c9zb\") pod \"e93f273e-6567-45a8-b40d-e9148bceb7b5\" (UID: \"e93f273e-6567-45a8-b40d-e9148bceb7b5\") " Oct 01 15:32:28 crc kubenswrapper[4869]: I1001 15:32:28.963317 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/e93f273e-6567-45a8-b40d-e9148bceb7b5-ssh-key\") pod \"e93f273e-6567-45a8-b40d-e9148bceb7b5\" (UID: \"e93f273e-6567-45a8-b40d-e9148bceb7b5\") " Oct 01 15:32:28 crc kubenswrapper[4869]: I1001 15:32:28.969353 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e93f273e-6567-45a8-b40d-e9148bceb7b5-kube-api-access-7c9zb" (OuterVolumeSpecName: "kube-api-access-7c9zb") pod "e93f273e-6567-45a8-b40d-e9148bceb7b5" (UID: "e93f273e-6567-45a8-b40d-e9148bceb7b5"). InnerVolumeSpecName "kube-api-access-7c9zb". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 15:32:29 crc kubenswrapper[4869]: I1001 15:32:29.011793 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e93f273e-6567-45a8-b40d-e9148bceb7b5-inventory" (OuterVolumeSpecName: "inventory") pod "e93f273e-6567-45a8-b40d-e9148bceb7b5" (UID: "e93f273e-6567-45a8-b40d-e9148bceb7b5"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 15:32:29 crc kubenswrapper[4869]: I1001 15:32:29.012303 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e93f273e-6567-45a8-b40d-e9148bceb7b5-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "e93f273e-6567-45a8-b40d-e9148bceb7b5" (UID: "e93f273e-6567-45a8-b40d-e9148bceb7b5"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 15:32:29 crc kubenswrapper[4869]: I1001 15:32:29.066167 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7c9zb\" (UniqueName: \"kubernetes.io/projected/e93f273e-6567-45a8-b40d-e9148bceb7b5-kube-api-access-7c9zb\") on node \"crc\" DevicePath \"\"" Oct 01 15:32:29 crc kubenswrapper[4869]: I1001 15:32:29.066372 4869 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/e93f273e-6567-45a8-b40d-e9148bceb7b5-ssh-key\") on node \"crc\" DevicePath \"\"" Oct 01 15:32:29 crc kubenswrapper[4869]: I1001 15:32:29.066453 4869 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/e93f273e-6567-45a8-b40d-e9148bceb7b5-inventory\") on node \"crc\" DevicePath \"\"" Oct 01 15:32:29 crc kubenswrapper[4869]: I1001 15:32:29.387347 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-tlpkx" event={"ID":"e93f273e-6567-45a8-b40d-e9148bceb7b5","Type":"ContainerDied","Data":"986e41800643f2f3396fcfb89b5f055b0d61f823ccb0cd4955924a030130530d"} Oct 01 15:32:29 crc kubenswrapper[4869]: I1001 15:32:29.387394 4869 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="986e41800643f2f3396fcfb89b5f055b0d61f823ccb0cd4955924a030130530d" Oct 01 15:32:29 crc kubenswrapper[4869]: I1001 15:32:29.387850 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-tlpkx" Oct 01 15:32:34 crc kubenswrapper[4869]: E1001 15:32:34.986616 4869 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pode9c07398_7b35_43e1_a67a_1678b23ee63d.slice\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pode9c07398_7b35_43e1_a67a_1678b23ee63d.slice/crio-3dd5d7cb31997519505c04cf16a936234875c8457afb25ab8da3af083dcfd905\": RecentStats: unable to find data in memory cache]" Oct 01 15:32:36 crc kubenswrapper[4869]: I1001 15:32:36.581008 4869 scope.go:117] "RemoveContainer" containerID="1d80d75dc72da971c79fa8b67243b56fe5690fce0a3f0d0147f32ac22b29db29" Oct 01 15:32:36 crc kubenswrapper[4869]: E1001 15:32:36.581552 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-c86m8_openshift-machine-config-operator(a4b64f7f-0b03-4f47-965b-9fde048b735c)\"" pod="openshift-machine-config-operator/machine-config-daemon-c86m8" podUID="a4b64f7f-0b03-4f47-965b-9fde048b735c" Oct 01 15:32:41 crc kubenswrapper[4869]: I1001 15:32:41.060661 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-cell-mapping-zhzd5"] Oct 01 15:32:41 crc kubenswrapper[4869]: I1001 15:32:41.069323 4869 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-cell-mapping-zhzd5"] Oct 01 15:32:41 crc kubenswrapper[4869]: I1001 15:32:41.597167 4869 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="03242a06-2186-4f5d-9f1b-9b11db33e397" path="/var/lib/kubelet/pods/03242a06-2186-4f5d-9f1b-9b11db33e397/volumes" Oct 01 15:32:45 crc kubenswrapper[4869]: E1001 15:32:45.256094 4869 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pode9c07398_7b35_43e1_a67a_1678b23ee63d.slice/crio-3dd5d7cb31997519505c04cf16a936234875c8457afb25ab8da3af083dcfd905\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pode9c07398_7b35_43e1_a67a_1678b23ee63d.slice\": RecentStats: unable to find data in memory cache]" Oct 01 15:32:51 crc kubenswrapper[4869]: I1001 15:32:51.593655 4869 scope.go:117] "RemoveContainer" containerID="1d80d75dc72da971c79fa8b67243b56fe5690fce0a3f0d0147f32ac22b29db29" Oct 01 15:32:51 crc kubenswrapper[4869]: E1001 15:32:51.597859 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-c86m8_openshift-machine-config-operator(a4b64f7f-0b03-4f47-965b-9fde048b735c)\"" pod="openshift-machine-config-operator/machine-config-daemon-c86m8" podUID="a4b64f7f-0b03-4f47-965b-9fde048b735c" Oct 01 15:33:06 crc kubenswrapper[4869]: I1001 15:33:06.581049 4869 scope.go:117] "RemoveContainer" containerID="1d80d75dc72da971c79fa8b67243b56fe5690fce0a3f0d0147f32ac22b29db29" Oct 01 15:33:06 crc kubenswrapper[4869]: E1001 15:33:06.582304 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-c86m8_openshift-machine-config-operator(a4b64f7f-0b03-4f47-965b-9fde048b735c)\"" pod="openshift-machine-config-operator/machine-config-daemon-c86m8" podUID="a4b64f7f-0b03-4f47-965b-9fde048b735c" Oct 01 15:33:13 crc kubenswrapper[4869]: I1001 15:33:13.744088 4869 scope.go:117] "RemoveContainer" containerID="63bdd2cb5281851b9793c3d9cdf1618ecff3b8f2d442a1b0795322788144dd0a" Oct 01 15:33:21 crc kubenswrapper[4869]: I1001 15:33:21.593421 4869 scope.go:117] "RemoveContainer" containerID="1d80d75dc72da971c79fa8b67243b56fe5690fce0a3f0d0147f32ac22b29db29" Oct 01 15:33:21 crc kubenswrapper[4869]: E1001 15:33:21.594667 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-c86m8_openshift-machine-config-operator(a4b64f7f-0b03-4f47-965b-9fde048b735c)\"" pod="openshift-machine-config-operator/machine-config-daemon-c86m8" podUID="a4b64f7f-0b03-4f47-965b-9fde048b735c" Oct 01 15:33:35 crc kubenswrapper[4869]: I1001 15:33:35.581541 4869 scope.go:117] "RemoveContainer" containerID="1d80d75dc72da971c79fa8b67243b56fe5690fce0a3f0d0147f32ac22b29db29" Oct 01 15:33:35 crc kubenswrapper[4869]: E1001 15:33:35.582388 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-c86m8_openshift-machine-config-operator(a4b64f7f-0b03-4f47-965b-9fde048b735c)\"" pod="openshift-machine-config-operator/machine-config-daemon-c86m8" podUID="a4b64f7f-0b03-4f47-965b-9fde048b735c" Oct 01 15:33:49 crc kubenswrapper[4869]: I1001 15:33:49.581077 4869 scope.go:117] "RemoveContainer" containerID="1d80d75dc72da971c79fa8b67243b56fe5690fce0a3f0d0147f32ac22b29db29" Oct 01 15:33:49 crc kubenswrapper[4869]: E1001 15:33:49.582342 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-c86m8_openshift-machine-config-operator(a4b64f7f-0b03-4f47-965b-9fde048b735c)\"" pod="openshift-machine-config-operator/machine-config-daemon-c86m8" podUID="a4b64f7f-0b03-4f47-965b-9fde048b735c" Oct 01 15:34:01 crc kubenswrapper[4869]: I1001 15:34:01.597627 4869 scope.go:117] "RemoveContainer" containerID="1d80d75dc72da971c79fa8b67243b56fe5690fce0a3f0d0147f32ac22b29db29" Oct 01 15:34:01 crc kubenswrapper[4869]: E1001 15:34:01.600336 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-c86m8_openshift-machine-config-operator(a4b64f7f-0b03-4f47-965b-9fde048b735c)\"" pod="openshift-machine-config-operator/machine-config-daemon-c86m8" podUID="a4b64f7f-0b03-4f47-965b-9fde048b735c" Oct 01 15:34:13 crc kubenswrapper[4869]: I1001 15:34:13.581467 4869 scope.go:117] "RemoveContainer" containerID="1d80d75dc72da971c79fa8b67243b56fe5690fce0a3f0d0147f32ac22b29db29" Oct 01 15:34:14 crc kubenswrapper[4869]: I1001 15:34:14.405123 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-c86m8" event={"ID":"a4b64f7f-0b03-4f47-965b-9fde048b735c","Type":"ContainerStarted","Data":"9a99d03d53eede2552e3857831adcef848702c91bd2e41e6690813d14315bcda"} Oct 01 15:36:13 crc kubenswrapper[4869]: I1001 15:36:13.354553 4869 patch_prober.go:28] interesting pod/machine-config-daemon-c86m8 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 01 15:36:13 crc kubenswrapper[4869]: I1001 15:36:13.355010 4869 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-c86m8" podUID="a4b64f7f-0b03-4f47-965b-9fde048b735c" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 01 15:36:43 crc kubenswrapper[4869]: I1001 15:36:43.353734 4869 patch_prober.go:28] interesting pod/machine-config-daemon-c86m8 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 01 15:36:43 crc kubenswrapper[4869]: I1001 15:36:43.354244 4869 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-c86m8" podUID="a4b64f7f-0b03-4f47-965b-9fde048b735c" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 01 15:37:13 crc kubenswrapper[4869]: I1001 15:37:13.354437 4869 patch_prober.go:28] interesting pod/machine-config-daemon-c86m8 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 01 15:37:13 crc kubenswrapper[4869]: I1001 15:37:13.355179 4869 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-c86m8" podUID="a4b64f7f-0b03-4f47-965b-9fde048b735c" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 01 15:37:13 crc kubenswrapper[4869]: I1001 15:37:13.355300 4869 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-c86m8" Oct 01 15:37:13 crc kubenswrapper[4869]: I1001 15:37:13.356290 4869 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"9a99d03d53eede2552e3857831adcef848702c91bd2e41e6690813d14315bcda"} pod="openshift-machine-config-operator/machine-config-daemon-c86m8" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Oct 01 15:37:13 crc kubenswrapper[4869]: I1001 15:37:13.356385 4869 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-c86m8" podUID="a4b64f7f-0b03-4f47-965b-9fde048b735c" containerName="machine-config-daemon" containerID="cri-o://9a99d03d53eede2552e3857831adcef848702c91bd2e41e6690813d14315bcda" gracePeriod=600 Oct 01 15:37:13 crc kubenswrapper[4869]: I1001 15:37:13.996726 4869 generic.go:334] "Generic (PLEG): container finished" podID="a4b64f7f-0b03-4f47-965b-9fde048b735c" containerID="9a99d03d53eede2552e3857831adcef848702c91bd2e41e6690813d14315bcda" exitCode=0 Oct 01 15:37:13 crc kubenswrapper[4869]: I1001 15:37:13.997132 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-c86m8" event={"ID":"a4b64f7f-0b03-4f47-965b-9fde048b735c","Type":"ContainerDied","Data":"9a99d03d53eede2552e3857831adcef848702c91bd2e41e6690813d14315bcda"} Oct 01 15:37:13 crc kubenswrapper[4869]: I1001 15:37:13.997396 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-c86m8" event={"ID":"a4b64f7f-0b03-4f47-965b-9fde048b735c","Type":"ContainerStarted","Data":"850fa9657d55ab61ed48a042dba1eb165240f62a294e8f87d32c9a3206247a54"} Oct 01 15:37:13 crc kubenswrapper[4869]: I1001 15:37:13.997502 4869 scope.go:117] "RemoveContainer" containerID="1d80d75dc72da971c79fa8b67243b56fe5690fce0a3f0d0147f32ac22b29db29" Oct 01 15:37:49 crc kubenswrapper[4869]: I1001 15:37:49.613493 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-fdxq4"] Oct 01 15:37:49 crc kubenswrapper[4869]: E1001 15:37:49.614926 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e93f273e-6567-45a8-b40d-e9148bceb7b5" containerName="reboot-os-edpm-deployment-openstack-edpm-ipam" Oct 01 15:37:49 crc kubenswrapper[4869]: I1001 15:37:49.614956 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="e93f273e-6567-45a8-b40d-e9148bceb7b5" containerName="reboot-os-edpm-deployment-openstack-edpm-ipam" Oct 01 15:37:49 crc kubenswrapper[4869]: I1001 15:37:49.617429 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="e93f273e-6567-45a8-b40d-e9148bceb7b5" containerName="reboot-os-edpm-deployment-openstack-edpm-ipam" Oct 01 15:37:49 crc kubenswrapper[4869]: I1001 15:37:49.620559 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-fdxq4" Oct 01 15:37:49 crc kubenswrapper[4869]: I1001 15:37:49.649861 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-fdxq4"] Oct 01 15:37:49 crc kubenswrapper[4869]: I1001 15:37:49.809651 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6ttbg\" (UniqueName: \"kubernetes.io/projected/9a3e69e2-838b-486d-a93e-35bc2aff571a-kube-api-access-6ttbg\") pod \"community-operators-fdxq4\" (UID: \"9a3e69e2-838b-486d-a93e-35bc2aff571a\") " pod="openshift-marketplace/community-operators-fdxq4" Oct 01 15:37:49 crc kubenswrapper[4869]: I1001 15:37:49.809708 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9a3e69e2-838b-486d-a93e-35bc2aff571a-catalog-content\") pod \"community-operators-fdxq4\" (UID: \"9a3e69e2-838b-486d-a93e-35bc2aff571a\") " pod="openshift-marketplace/community-operators-fdxq4" Oct 01 15:37:49 crc kubenswrapper[4869]: I1001 15:37:49.809744 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9a3e69e2-838b-486d-a93e-35bc2aff571a-utilities\") pod \"community-operators-fdxq4\" (UID: \"9a3e69e2-838b-486d-a93e-35bc2aff571a\") " pod="openshift-marketplace/community-operators-fdxq4" Oct 01 15:37:49 crc kubenswrapper[4869]: I1001 15:37:49.911608 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6ttbg\" (UniqueName: \"kubernetes.io/projected/9a3e69e2-838b-486d-a93e-35bc2aff571a-kube-api-access-6ttbg\") pod \"community-operators-fdxq4\" (UID: \"9a3e69e2-838b-486d-a93e-35bc2aff571a\") " pod="openshift-marketplace/community-operators-fdxq4" Oct 01 15:37:49 crc kubenswrapper[4869]: I1001 15:37:49.914689 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9a3e69e2-838b-486d-a93e-35bc2aff571a-catalog-content\") pod \"community-operators-fdxq4\" (UID: \"9a3e69e2-838b-486d-a93e-35bc2aff571a\") " pod="openshift-marketplace/community-operators-fdxq4" Oct 01 15:37:49 crc kubenswrapper[4869]: I1001 15:37:49.915188 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9a3e69e2-838b-486d-a93e-35bc2aff571a-catalog-content\") pod \"community-operators-fdxq4\" (UID: \"9a3e69e2-838b-486d-a93e-35bc2aff571a\") " pod="openshift-marketplace/community-operators-fdxq4" Oct 01 15:37:49 crc kubenswrapper[4869]: I1001 15:37:49.915398 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9a3e69e2-838b-486d-a93e-35bc2aff571a-utilities\") pod \"community-operators-fdxq4\" (UID: \"9a3e69e2-838b-486d-a93e-35bc2aff571a\") " pod="openshift-marketplace/community-operators-fdxq4" Oct 01 15:37:49 crc kubenswrapper[4869]: I1001 15:37:49.915653 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9a3e69e2-838b-486d-a93e-35bc2aff571a-utilities\") pod \"community-operators-fdxq4\" (UID: \"9a3e69e2-838b-486d-a93e-35bc2aff571a\") " pod="openshift-marketplace/community-operators-fdxq4" Oct 01 15:37:49 crc kubenswrapper[4869]: I1001 15:37:49.935068 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6ttbg\" (UniqueName: \"kubernetes.io/projected/9a3e69e2-838b-486d-a93e-35bc2aff571a-kube-api-access-6ttbg\") pod \"community-operators-fdxq4\" (UID: \"9a3e69e2-838b-486d-a93e-35bc2aff571a\") " pod="openshift-marketplace/community-operators-fdxq4" Oct 01 15:37:49 crc kubenswrapper[4869]: I1001 15:37:49.955470 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-fdxq4" Oct 01 15:37:50 crc kubenswrapper[4869]: I1001 15:37:50.497124 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-fdxq4"] Oct 01 15:37:51 crc kubenswrapper[4869]: I1001 15:37:51.354426 4869 generic.go:334] "Generic (PLEG): container finished" podID="9a3e69e2-838b-486d-a93e-35bc2aff571a" containerID="23245509889d0a6c9eac9e57221fceda1976605d9296c1173d1754c1ce32b284" exitCode=0 Oct 01 15:37:51 crc kubenswrapper[4869]: I1001 15:37:51.354477 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-fdxq4" event={"ID":"9a3e69e2-838b-486d-a93e-35bc2aff571a","Type":"ContainerDied","Data":"23245509889d0a6c9eac9e57221fceda1976605d9296c1173d1754c1ce32b284"} Oct 01 15:37:51 crc kubenswrapper[4869]: I1001 15:37:51.354919 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-fdxq4" event={"ID":"9a3e69e2-838b-486d-a93e-35bc2aff571a","Type":"ContainerStarted","Data":"c78b6fb4401c626a7480b69ced47f4c9e20773e3d465f17d9e273bae75ffa973"} Oct 01 15:37:51 crc kubenswrapper[4869]: I1001 15:37:51.359654 4869 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Oct 01 15:37:53 crc kubenswrapper[4869]: I1001 15:37:53.382285 4869 generic.go:334] "Generic (PLEG): container finished" podID="9a3e69e2-838b-486d-a93e-35bc2aff571a" containerID="46f049b495a3acccc9c71a140d9d4f4f7be790baa3966d3ac32a8b0ae5f3342c" exitCode=0 Oct 01 15:37:53 crc kubenswrapper[4869]: I1001 15:37:53.382384 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-fdxq4" event={"ID":"9a3e69e2-838b-486d-a93e-35bc2aff571a","Type":"ContainerDied","Data":"46f049b495a3acccc9c71a140d9d4f4f7be790baa3966d3ac32a8b0ae5f3342c"} Oct 01 15:37:54 crc kubenswrapper[4869]: I1001 15:37:54.394520 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-fdxq4" event={"ID":"9a3e69e2-838b-486d-a93e-35bc2aff571a","Type":"ContainerStarted","Data":"fddcbc602ab4ccdb461800a78ddbc28e4f89eac69167c5dc8f5df1eecfe85638"} Oct 01 15:37:54 crc kubenswrapper[4869]: I1001 15:37:54.411218 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-fdxq4" podStartSLOduration=2.959996409 podStartE2EDuration="5.411193891s" podCreationTimestamp="2025-10-01 15:37:49 +0000 UTC" firstStartedPulling="2025-10-01 15:37:51.359413868 +0000 UTC m=+1980.506256974" lastFinishedPulling="2025-10-01 15:37:53.81061134 +0000 UTC m=+1982.957454456" observedRunningTime="2025-10-01 15:37:54.408986785 +0000 UTC m=+1983.555829961" watchObservedRunningTime="2025-10-01 15:37:54.411193891 +0000 UTC m=+1983.558037027" Oct 01 15:37:59 crc kubenswrapper[4869]: I1001 15:37:59.348865 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-f8mtp"] Oct 01 15:37:59 crc kubenswrapper[4869]: I1001 15:37:59.368320 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/configure-network-edpm-deployment-openstack-edpm-ipam-ph2hh"] Oct 01 15:37:59 crc kubenswrapper[4869]: I1001 15:37:59.377377 4869 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-f8mtp"] Oct 01 15:37:59 crc kubenswrapper[4869]: I1001 15:37:59.384117 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/install-os-edpm-deployment-openstack-edpm-ipam-fwpqm"] Oct 01 15:37:59 crc kubenswrapper[4869]: I1001 15:37:59.390750 4869 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/install-os-edpm-deployment-openstack-edpm-ipam-fwpqm"] Oct 01 15:37:59 crc kubenswrapper[4869]: I1001 15:37:59.397686 4869 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/configure-network-edpm-deployment-openstack-edpm-ipam-ph2hh"] Oct 01 15:37:59 crc kubenswrapper[4869]: I1001 15:37:59.403794 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-xn8rv"] Oct 01 15:37:59 crc kubenswrapper[4869]: I1001 15:37:59.410542 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/validate-network-edpm-deployment-openstack-edpm-ipam-68jb4"] Oct 01 15:37:59 crc kubenswrapper[4869]: I1001 15:37:59.417268 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/run-os-edpm-deployment-openstack-edpm-ipam-cdpmf"] Oct 01 15:37:59 crc kubenswrapper[4869]: I1001 15:37:59.422942 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/configure-os-edpm-deployment-openstack-edpm-ipam-xjgrr"] Oct 01 15:37:59 crc kubenswrapper[4869]: I1001 15:37:59.428111 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/configure-os-edpm-deployment-openstack-edpm-ipam-l2t45"] Oct 01 15:37:59 crc kubenswrapper[4869]: I1001 15:37:59.433297 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ssh-known-hosts-edpm-deployment-qc7bt"] Oct 01 15:37:59 crc kubenswrapper[4869]: I1001 15:37:59.439227 4869 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/run-os-edpm-deployment-openstack-edpm-ipam-cdpmf"] Oct 01 15:37:59 crc kubenswrapper[4869]: I1001 15:37:59.445990 4869 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/configure-os-edpm-deployment-openstack-edpm-ipam-xjgrr"] Oct 01 15:37:59 crc kubenswrapper[4869]: I1001 15:37:59.452613 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-22x79"] Oct 01 15:37:59 crc kubenswrapper[4869]: I1001 15:37:59.458429 4869 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/validate-network-edpm-deployment-openstack-edpm-ipam-68jb4"] Oct 01 15:37:59 crc kubenswrapper[4869]: I1001 15:37:59.464356 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-tlpkx"] Oct 01 15:37:59 crc kubenswrapper[4869]: I1001 15:37:59.471008 4869 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ssh-known-hosts-edpm-deployment-qc7bt"] Oct 01 15:37:59 crc kubenswrapper[4869]: I1001 15:37:59.477710 4869 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-xn8rv"] Oct 01 15:37:59 crc kubenswrapper[4869]: I1001 15:37:59.483622 4869 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-22x79"] Oct 01 15:37:59 crc kubenswrapper[4869]: I1001 15:37:59.490476 4869 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/configure-os-edpm-deployment-openstack-edpm-ipam-l2t45"] Oct 01 15:37:59 crc kubenswrapper[4869]: I1001 15:37:59.496128 4869 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-tlpkx"] Oct 01 15:37:59 crc kubenswrapper[4869]: I1001 15:37:59.600702 4869 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="12855739-df17-48b6-886b-469e6a39d7f8" path="/var/lib/kubelet/pods/12855739-df17-48b6-886b-469e6a39d7f8/volumes" Oct 01 15:37:59 crc kubenswrapper[4869]: I1001 15:37:59.601478 4869 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="199eb3c4-6ff2-4910-9fe3-51a68f736017" path="/var/lib/kubelet/pods/199eb3c4-6ff2-4910-9fe3-51a68f736017/volumes" Oct 01 15:37:59 crc kubenswrapper[4869]: I1001 15:37:59.602041 4869 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="23e89dd5-daa6-4174-90e3-a9a3a84dde66" path="/var/lib/kubelet/pods/23e89dd5-daa6-4174-90e3-a9a3a84dde66/volumes" Oct 01 15:37:59 crc kubenswrapper[4869]: I1001 15:37:59.602806 4869 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="448c8396-8f33-435c-811e-ec4a295c9759" path="/var/lib/kubelet/pods/448c8396-8f33-435c-811e-ec4a295c9759/volumes" Oct 01 15:37:59 crc kubenswrapper[4869]: I1001 15:37:59.603821 4869 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4741e7d5-5ceb-4699-9d3c-f5798a08af91" path="/var/lib/kubelet/pods/4741e7d5-5ceb-4699-9d3c-f5798a08af91/volumes" Oct 01 15:37:59 crc kubenswrapper[4869]: I1001 15:37:59.604345 4869 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="641c2646-4ed1-4e58-ad8f-4fd0a9ae3d84" path="/var/lib/kubelet/pods/641c2646-4ed1-4e58-ad8f-4fd0a9ae3d84/volumes" Oct 01 15:37:59 crc kubenswrapper[4869]: I1001 15:37:59.604886 4869 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6dc7fd52-a0b5-47e1-9fa7-348ca07979c9" path="/var/lib/kubelet/pods/6dc7fd52-a0b5-47e1-9fa7-348ca07979c9/volumes" Oct 01 15:37:59 crc kubenswrapper[4869]: I1001 15:37:59.605915 4869 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="74968806-6b3a-4d29-a5ed-49987fafba72" path="/var/lib/kubelet/pods/74968806-6b3a-4d29-a5ed-49987fafba72/volumes" Oct 01 15:37:59 crc kubenswrapper[4869]: I1001 15:37:59.606512 4869 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b21a8f1c-ddea-4663-ba39-f2c75c93acec" path="/var/lib/kubelet/pods/b21a8f1c-ddea-4663-ba39-f2c75c93acec/volumes" Oct 01 15:37:59 crc kubenswrapper[4869]: I1001 15:37:59.607182 4869 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e93f273e-6567-45a8-b40d-e9148bceb7b5" path="/var/lib/kubelet/pods/e93f273e-6567-45a8-b40d-e9148bceb7b5/volumes" Oct 01 15:37:59 crc kubenswrapper[4869]: I1001 15:37:59.608153 4869 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e9c07398-7b35-43e1-a67a-1678b23ee63d" path="/var/lib/kubelet/pods/e9c07398-7b35-43e1-a67a-1678b23ee63d/volumes" Oct 01 15:37:59 crc kubenswrapper[4869]: I1001 15:37:59.956694 4869 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-fdxq4" Oct 01 15:37:59 crc kubenswrapper[4869]: I1001 15:37:59.956742 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-fdxq4" Oct 01 15:38:00 crc kubenswrapper[4869]: I1001 15:38:00.016063 4869 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-fdxq4" Oct 01 15:38:00 crc kubenswrapper[4869]: I1001 15:38:00.515562 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-fdxq4" Oct 01 15:38:00 crc kubenswrapper[4869]: I1001 15:38:00.584397 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-fdxq4"] Oct 01 15:38:02 crc kubenswrapper[4869]: I1001 15:38:02.466875 4869 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-fdxq4" podUID="9a3e69e2-838b-486d-a93e-35bc2aff571a" containerName="registry-server" containerID="cri-o://fddcbc602ab4ccdb461800a78ddbc28e4f89eac69167c5dc8f5df1eecfe85638" gracePeriod=2 Oct 01 15:38:02 crc kubenswrapper[4869]: I1001 15:38:02.925468 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-fdxq4" Oct 01 15:38:03 crc kubenswrapper[4869]: I1001 15:38:03.054917 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6ttbg\" (UniqueName: \"kubernetes.io/projected/9a3e69e2-838b-486d-a93e-35bc2aff571a-kube-api-access-6ttbg\") pod \"9a3e69e2-838b-486d-a93e-35bc2aff571a\" (UID: \"9a3e69e2-838b-486d-a93e-35bc2aff571a\") " Oct 01 15:38:03 crc kubenswrapper[4869]: I1001 15:38:03.055421 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9a3e69e2-838b-486d-a93e-35bc2aff571a-catalog-content\") pod \"9a3e69e2-838b-486d-a93e-35bc2aff571a\" (UID: \"9a3e69e2-838b-486d-a93e-35bc2aff571a\") " Oct 01 15:38:03 crc kubenswrapper[4869]: I1001 15:38:03.055507 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9a3e69e2-838b-486d-a93e-35bc2aff571a-utilities\") pod \"9a3e69e2-838b-486d-a93e-35bc2aff571a\" (UID: \"9a3e69e2-838b-486d-a93e-35bc2aff571a\") " Oct 01 15:38:03 crc kubenswrapper[4869]: I1001 15:38:03.056659 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/9a3e69e2-838b-486d-a93e-35bc2aff571a-utilities" (OuterVolumeSpecName: "utilities") pod "9a3e69e2-838b-486d-a93e-35bc2aff571a" (UID: "9a3e69e2-838b-486d-a93e-35bc2aff571a"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 15:38:03 crc kubenswrapper[4869]: I1001 15:38:03.061098 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9a3e69e2-838b-486d-a93e-35bc2aff571a-kube-api-access-6ttbg" (OuterVolumeSpecName: "kube-api-access-6ttbg") pod "9a3e69e2-838b-486d-a93e-35bc2aff571a" (UID: "9a3e69e2-838b-486d-a93e-35bc2aff571a"). InnerVolumeSpecName "kube-api-access-6ttbg". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 15:38:03 crc kubenswrapper[4869]: I1001 15:38:03.111870 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/9a3e69e2-838b-486d-a93e-35bc2aff571a-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "9a3e69e2-838b-486d-a93e-35bc2aff571a" (UID: "9a3e69e2-838b-486d-a93e-35bc2aff571a"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 15:38:03 crc kubenswrapper[4869]: I1001 15:38:03.157660 4869 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9a3e69e2-838b-486d-a93e-35bc2aff571a-utilities\") on node \"crc\" DevicePath \"\"" Oct 01 15:38:03 crc kubenswrapper[4869]: I1001 15:38:03.157717 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6ttbg\" (UniqueName: \"kubernetes.io/projected/9a3e69e2-838b-486d-a93e-35bc2aff571a-kube-api-access-6ttbg\") on node \"crc\" DevicePath \"\"" Oct 01 15:38:03 crc kubenswrapper[4869]: I1001 15:38:03.157730 4869 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9a3e69e2-838b-486d-a93e-35bc2aff571a-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 01 15:38:03 crc kubenswrapper[4869]: I1001 15:38:03.477079 4869 generic.go:334] "Generic (PLEG): container finished" podID="9a3e69e2-838b-486d-a93e-35bc2aff571a" containerID="fddcbc602ab4ccdb461800a78ddbc28e4f89eac69167c5dc8f5df1eecfe85638" exitCode=0 Oct 01 15:38:03 crc kubenswrapper[4869]: I1001 15:38:03.477120 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-fdxq4" event={"ID":"9a3e69e2-838b-486d-a93e-35bc2aff571a","Type":"ContainerDied","Data":"fddcbc602ab4ccdb461800a78ddbc28e4f89eac69167c5dc8f5df1eecfe85638"} Oct 01 15:38:03 crc kubenswrapper[4869]: I1001 15:38:03.477161 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-fdxq4" event={"ID":"9a3e69e2-838b-486d-a93e-35bc2aff571a","Type":"ContainerDied","Data":"c78b6fb4401c626a7480b69ced47f4c9e20773e3d465f17d9e273bae75ffa973"} Oct 01 15:38:03 crc kubenswrapper[4869]: I1001 15:38:03.477160 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-fdxq4" Oct 01 15:38:03 crc kubenswrapper[4869]: I1001 15:38:03.477205 4869 scope.go:117] "RemoveContainer" containerID="fddcbc602ab4ccdb461800a78ddbc28e4f89eac69167c5dc8f5df1eecfe85638" Oct 01 15:38:03 crc kubenswrapper[4869]: I1001 15:38:03.499670 4869 scope.go:117] "RemoveContainer" containerID="46f049b495a3acccc9c71a140d9d4f4f7be790baa3966d3ac32a8b0ae5f3342c" Oct 01 15:38:03 crc kubenswrapper[4869]: I1001 15:38:03.514086 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-fdxq4"] Oct 01 15:38:03 crc kubenswrapper[4869]: I1001 15:38:03.521976 4869 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-fdxq4"] Oct 01 15:38:03 crc kubenswrapper[4869]: I1001 15:38:03.540343 4869 scope.go:117] "RemoveContainer" containerID="23245509889d0a6c9eac9e57221fceda1976605d9296c1173d1754c1ce32b284" Oct 01 15:38:03 crc kubenswrapper[4869]: I1001 15:38:03.564088 4869 scope.go:117] "RemoveContainer" containerID="fddcbc602ab4ccdb461800a78ddbc28e4f89eac69167c5dc8f5df1eecfe85638" Oct 01 15:38:03 crc kubenswrapper[4869]: E1001 15:38:03.564624 4869 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"fddcbc602ab4ccdb461800a78ddbc28e4f89eac69167c5dc8f5df1eecfe85638\": container with ID starting with fddcbc602ab4ccdb461800a78ddbc28e4f89eac69167c5dc8f5df1eecfe85638 not found: ID does not exist" containerID="fddcbc602ab4ccdb461800a78ddbc28e4f89eac69167c5dc8f5df1eecfe85638" Oct 01 15:38:03 crc kubenswrapper[4869]: I1001 15:38:03.564702 4869 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"fddcbc602ab4ccdb461800a78ddbc28e4f89eac69167c5dc8f5df1eecfe85638"} err="failed to get container status \"fddcbc602ab4ccdb461800a78ddbc28e4f89eac69167c5dc8f5df1eecfe85638\": rpc error: code = NotFound desc = could not find container \"fddcbc602ab4ccdb461800a78ddbc28e4f89eac69167c5dc8f5df1eecfe85638\": container with ID starting with fddcbc602ab4ccdb461800a78ddbc28e4f89eac69167c5dc8f5df1eecfe85638 not found: ID does not exist" Oct 01 15:38:03 crc kubenswrapper[4869]: I1001 15:38:03.564731 4869 scope.go:117] "RemoveContainer" containerID="46f049b495a3acccc9c71a140d9d4f4f7be790baa3966d3ac32a8b0ae5f3342c" Oct 01 15:38:03 crc kubenswrapper[4869]: E1001 15:38:03.565037 4869 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"46f049b495a3acccc9c71a140d9d4f4f7be790baa3966d3ac32a8b0ae5f3342c\": container with ID starting with 46f049b495a3acccc9c71a140d9d4f4f7be790baa3966d3ac32a8b0ae5f3342c not found: ID does not exist" containerID="46f049b495a3acccc9c71a140d9d4f4f7be790baa3966d3ac32a8b0ae5f3342c" Oct 01 15:38:03 crc kubenswrapper[4869]: I1001 15:38:03.565075 4869 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"46f049b495a3acccc9c71a140d9d4f4f7be790baa3966d3ac32a8b0ae5f3342c"} err="failed to get container status \"46f049b495a3acccc9c71a140d9d4f4f7be790baa3966d3ac32a8b0ae5f3342c\": rpc error: code = NotFound desc = could not find container \"46f049b495a3acccc9c71a140d9d4f4f7be790baa3966d3ac32a8b0ae5f3342c\": container with ID starting with 46f049b495a3acccc9c71a140d9d4f4f7be790baa3966d3ac32a8b0ae5f3342c not found: ID does not exist" Oct 01 15:38:03 crc kubenswrapper[4869]: I1001 15:38:03.565102 4869 scope.go:117] "RemoveContainer" containerID="23245509889d0a6c9eac9e57221fceda1976605d9296c1173d1754c1ce32b284" Oct 01 15:38:03 crc kubenswrapper[4869]: E1001 15:38:03.565388 4869 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"23245509889d0a6c9eac9e57221fceda1976605d9296c1173d1754c1ce32b284\": container with ID starting with 23245509889d0a6c9eac9e57221fceda1976605d9296c1173d1754c1ce32b284 not found: ID does not exist" containerID="23245509889d0a6c9eac9e57221fceda1976605d9296c1173d1754c1ce32b284" Oct 01 15:38:03 crc kubenswrapper[4869]: I1001 15:38:03.565414 4869 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"23245509889d0a6c9eac9e57221fceda1976605d9296c1173d1754c1ce32b284"} err="failed to get container status \"23245509889d0a6c9eac9e57221fceda1976605d9296c1173d1754c1ce32b284\": rpc error: code = NotFound desc = could not find container \"23245509889d0a6c9eac9e57221fceda1976605d9296c1173d1754c1ce32b284\": container with ID starting with 23245509889d0a6c9eac9e57221fceda1976605d9296c1173d1754c1ce32b284 not found: ID does not exist" Oct 01 15:38:03 crc kubenswrapper[4869]: I1001 15:38:03.591961 4869 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9a3e69e2-838b-486d-a93e-35bc2aff571a" path="/var/lib/kubelet/pods/9a3e69e2-838b-486d-a93e-35bc2aff571a/volumes" Oct 01 15:38:05 crc kubenswrapper[4869]: I1001 15:38:05.658687 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-vgwqs"] Oct 01 15:38:05 crc kubenswrapper[4869]: E1001 15:38:05.659623 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9a3e69e2-838b-486d-a93e-35bc2aff571a" containerName="extract-content" Oct 01 15:38:05 crc kubenswrapper[4869]: I1001 15:38:05.659641 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="9a3e69e2-838b-486d-a93e-35bc2aff571a" containerName="extract-content" Oct 01 15:38:05 crc kubenswrapper[4869]: E1001 15:38:05.659689 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9a3e69e2-838b-486d-a93e-35bc2aff571a" containerName="registry-server" Oct 01 15:38:05 crc kubenswrapper[4869]: I1001 15:38:05.659696 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="9a3e69e2-838b-486d-a93e-35bc2aff571a" containerName="registry-server" Oct 01 15:38:05 crc kubenswrapper[4869]: E1001 15:38:05.659710 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9a3e69e2-838b-486d-a93e-35bc2aff571a" containerName="extract-utilities" Oct 01 15:38:05 crc kubenswrapper[4869]: I1001 15:38:05.659717 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="9a3e69e2-838b-486d-a93e-35bc2aff571a" containerName="extract-utilities" Oct 01 15:38:05 crc kubenswrapper[4869]: I1001 15:38:05.659921 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="9a3e69e2-838b-486d-a93e-35bc2aff571a" containerName="registry-server" Oct 01 15:38:05 crc kubenswrapper[4869]: I1001 15:38:05.661395 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-vgwqs" Oct 01 15:38:05 crc kubenswrapper[4869]: I1001 15:38:05.693987 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-vgwqs"] Oct 01 15:38:05 crc kubenswrapper[4869]: I1001 15:38:05.806224 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d6f8ac01-0c9a-44d0-91e3-363811338874-catalog-content\") pod \"certified-operators-vgwqs\" (UID: \"d6f8ac01-0c9a-44d0-91e3-363811338874\") " pod="openshift-marketplace/certified-operators-vgwqs" Oct 01 15:38:05 crc kubenswrapper[4869]: I1001 15:38:05.806515 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pfz4q\" (UniqueName: \"kubernetes.io/projected/d6f8ac01-0c9a-44d0-91e3-363811338874-kube-api-access-pfz4q\") pod \"certified-operators-vgwqs\" (UID: \"d6f8ac01-0c9a-44d0-91e3-363811338874\") " pod="openshift-marketplace/certified-operators-vgwqs" Oct 01 15:38:05 crc kubenswrapper[4869]: I1001 15:38:05.806718 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d6f8ac01-0c9a-44d0-91e3-363811338874-utilities\") pod \"certified-operators-vgwqs\" (UID: \"d6f8ac01-0c9a-44d0-91e3-363811338874\") " pod="openshift-marketplace/certified-operators-vgwqs" Oct 01 15:38:05 crc kubenswrapper[4869]: I1001 15:38:05.909192 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d6f8ac01-0c9a-44d0-91e3-363811338874-utilities\") pod \"certified-operators-vgwqs\" (UID: \"d6f8ac01-0c9a-44d0-91e3-363811338874\") " pod="openshift-marketplace/certified-operators-vgwqs" Oct 01 15:38:05 crc kubenswrapper[4869]: I1001 15:38:05.909404 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d6f8ac01-0c9a-44d0-91e3-363811338874-catalog-content\") pod \"certified-operators-vgwqs\" (UID: \"d6f8ac01-0c9a-44d0-91e3-363811338874\") " pod="openshift-marketplace/certified-operators-vgwqs" Oct 01 15:38:05 crc kubenswrapper[4869]: I1001 15:38:05.909460 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pfz4q\" (UniqueName: \"kubernetes.io/projected/d6f8ac01-0c9a-44d0-91e3-363811338874-kube-api-access-pfz4q\") pod \"certified-operators-vgwqs\" (UID: \"d6f8ac01-0c9a-44d0-91e3-363811338874\") " pod="openshift-marketplace/certified-operators-vgwqs" Oct 01 15:38:05 crc kubenswrapper[4869]: I1001 15:38:05.909712 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d6f8ac01-0c9a-44d0-91e3-363811338874-utilities\") pod \"certified-operators-vgwqs\" (UID: \"d6f8ac01-0c9a-44d0-91e3-363811338874\") " pod="openshift-marketplace/certified-operators-vgwqs" Oct 01 15:38:05 crc kubenswrapper[4869]: I1001 15:38:05.909999 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d6f8ac01-0c9a-44d0-91e3-363811338874-catalog-content\") pod \"certified-operators-vgwqs\" (UID: \"d6f8ac01-0c9a-44d0-91e3-363811338874\") " pod="openshift-marketplace/certified-operators-vgwqs" Oct 01 15:38:05 crc kubenswrapper[4869]: I1001 15:38:05.936299 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pfz4q\" (UniqueName: \"kubernetes.io/projected/d6f8ac01-0c9a-44d0-91e3-363811338874-kube-api-access-pfz4q\") pod \"certified-operators-vgwqs\" (UID: \"d6f8ac01-0c9a-44d0-91e3-363811338874\") " pod="openshift-marketplace/certified-operators-vgwqs" Oct 01 15:38:06 crc kubenswrapper[4869]: I1001 15:38:06.003192 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-vgwqs" Oct 01 15:38:06 crc kubenswrapper[4869]: I1001 15:38:06.099691 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-qz9pw"] Oct 01 15:38:06 crc kubenswrapper[4869]: I1001 15:38:06.107697 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-qz9pw" Oct 01 15:38:06 crc kubenswrapper[4869]: I1001 15:38:06.109801 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Oct 01 15:38:06 crc kubenswrapper[4869]: I1001 15:38:06.110199 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Oct 01 15:38:06 crc kubenswrapper[4869]: I1001 15:38:06.110246 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Oct 01 15:38:06 crc kubenswrapper[4869]: I1001 15:38:06.110610 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceph-conf-files" Oct 01 15:38:06 crc kubenswrapper[4869]: I1001 15:38:06.111531 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-cjg8g" Oct 01 15:38:06 crc kubenswrapper[4869]: I1001 15:38:06.113056 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-qz9pw"] Oct 01 15:38:06 crc kubenswrapper[4869]: I1001 15:38:06.214086 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bbxpf\" (UniqueName: \"kubernetes.io/projected/b943448c-de46-41bc-a516-1364799a4eba-kube-api-access-bbxpf\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-qz9pw\" (UID: \"b943448c-de46-41bc-a516-1364799a4eba\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-qz9pw" Oct 01 15:38:06 crc kubenswrapper[4869]: I1001 15:38:06.214143 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/b943448c-de46-41bc-a516-1364799a4eba-ceph\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-qz9pw\" (UID: \"b943448c-de46-41bc-a516-1364799a4eba\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-qz9pw" Oct 01 15:38:06 crc kubenswrapper[4869]: I1001 15:38:06.215028 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/b943448c-de46-41bc-a516-1364799a4eba-ssh-key\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-qz9pw\" (UID: \"b943448c-de46-41bc-a516-1364799a4eba\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-qz9pw" Oct 01 15:38:06 crc kubenswrapper[4869]: I1001 15:38:06.215104 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b943448c-de46-41bc-a516-1364799a4eba-repo-setup-combined-ca-bundle\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-qz9pw\" (UID: \"b943448c-de46-41bc-a516-1364799a4eba\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-qz9pw" Oct 01 15:38:06 crc kubenswrapper[4869]: I1001 15:38:06.215230 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/b943448c-de46-41bc-a516-1364799a4eba-inventory\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-qz9pw\" (UID: \"b943448c-de46-41bc-a516-1364799a4eba\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-qz9pw" Oct 01 15:38:06 crc kubenswrapper[4869]: I1001 15:38:06.318748 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/b943448c-de46-41bc-a516-1364799a4eba-ssh-key\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-qz9pw\" (UID: \"b943448c-de46-41bc-a516-1364799a4eba\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-qz9pw" Oct 01 15:38:06 crc kubenswrapper[4869]: I1001 15:38:06.318847 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b943448c-de46-41bc-a516-1364799a4eba-repo-setup-combined-ca-bundle\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-qz9pw\" (UID: \"b943448c-de46-41bc-a516-1364799a4eba\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-qz9pw" Oct 01 15:38:06 crc kubenswrapper[4869]: I1001 15:38:06.318956 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/b943448c-de46-41bc-a516-1364799a4eba-inventory\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-qz9pw\" (UID: \"b943448c-de46-41bc-a516-1364799a4eba\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-qz9pw" Oct 01 15:38:06 crc kubenswrapper[4869]: I1001 15:38:06.318985 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bbxpf\" (UniqueName: \"kubernetes.io/projected/b943448c-de46-41bc-a516-1364799a4eba-kube-api-access-bbxpf\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-qz9pw\" (UID: \"b943448c-de46-41bc-a516-1364799a4eba\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-qz9pw" Oct 01 15:38:06 crc kubenswrapper[4869]: I1001 15:38:06.319019 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/b943448c-de46-41bc-a516-1364799a4eba-ceph\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-qz9pw\" (UID: \"b943448c-de46-41bc-a516-1364799a4eba\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-qz9pw" Oct 01 15:38:06 crc kubenswrapper[4869]: I1001 15:38:06.325955 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/b943448c-de46-41bc-a516-1364799a4eba-inventory\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-qz9pw\" (UID: \"b943448c-de46-41bc-a516-1364799a4eba\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-qz9pw" Oct 01 15:38:06 crc kubenswrapper[4869]: I1001 15:38:06.326053 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/b943448c-de46-41bc-a516-1364799a4eba-ssh-key\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-qz9pw\" (UID: \"b943448c-de46-41bc-a516-1364799a4eba\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-qz9pw" Oct 01 15:38:06 crc kubenswrapper[4869]: I1001 15:38:06.327805 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/b943448c-de46-41bc-a516-1364799a4eba-ceph\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-qz9pw\" (UID: \"b943448c-de46-41bc-a516-1364799a4eba\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-qz9pw" Oct 01 15:38:06 crc kubenswrapper[4869]: I1001 15:38:06.334184 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b943448c-de46-41bc-a516-1364799a4eba-repo-setup-combined-ca-bundle\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-qz9pw\" (UID: \"b943448c-de46-41bc-a516-1364799a4eba\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-qz9pw" Oct 01 15:38:06 crc kubenswrapper[4869]: I1001 15:38:06.338068 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bbxpf\" (UniqueName: \"kubernetes.io/projected/b943448c-de46-41bc-a516-1364799a4eba-kube-api-access-bbxpf\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-qz9pw\" (UID: \"b943448c-de46-41bc-a516-1364799a4eba\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-qz9pw" Oct 01 15:38:06 crc kubenswrapper[4869]: I1001 15:38:06.455621 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-qz9pw" Oct 01 15:38:06 crc kubenswrapper[4869]: I1001 15:38:06.534185 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-vgwqs"] Oct 01 15:38:06 crc kubenswrapper[4869]: I1001 15:38:06.955974 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-qz9pw"] Oct 01 15:38:06 crc kubenswrapper[4869]: W1001 15:38:06.961632 4869 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podb943448c_de46_41bc_a516_1364799a4eba.slice/crio-5587a025a6b2daa589d895004c5cd5cbf214d6a0d17a27e8d3b39f9c4fe3da3b WatchSource:0}: Error finding container 5587a025a6b2daa589d895004c5cd5cbf214d6a0d17a27e8d3b39f9c4fe3da3b: Status 404 returned error can't find the container with id 5587a025a6b2daa589d895004c5cd5cbf214d6a0d17a27e8d3b39f9c4fe3da3b Oct 01 15:38:07 crc kubenswrapper[4869]: I1001 15:38:07.511075 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-qz9pw" event={"ID":"b943448c-de46-41bc-a516-1364799a4eba","Type":"ContainerStarted","Data":"5587a025a6b2daa589d895004c5cd5cbf214d6a0d17a27e8d3b39f9c4fe3da3b"} Oct 01 15:38:07 crc kubenswrapper[4869]: I1001 15:38:07.512856 4869 generic.go:334] "Generic (PLEG): container finished" podID="d6f8ac01-0c9a-44d0-91e3-363811338874" containerID="af3684113b39f936ca708bb80b28b2775bbce3cf4268b90d4b9fa52313bc1aeb" exitCode=0 Oct 01 15:38:07 crc kubenswrapper[4869]: I1001 15:38:07.512887 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-vgwqs" event={"ID":"d6f8ac01-0c9a-44d0-91e3-363811338874","Type":"ContainerDied","Data":"af3684113b39f936ca708bb80b28b2775bbce3cf4268b90d4b9fa52313bc1aeb"} Oct 01 15:38:07 crc kubenswrapper[4869]: I1001 15:38:07.512904 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-vgwqs" event={"ID":"d6f8ac01-0c9a-44d0-91e3-363811338874","Type":"ContainerStarted","Data":"297a4bc79884a9b21f664f100451e54c7d42ce90dbd8a98ca2b77e5896c60680"} Oct 01 15:38:08 crc kubenswrapper[4869]: I1001 15:38:08.523642 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-qz9pw" event={"ID":"b943448c-de46-41bc-a516-1364799a4eba","Type":"ContainerStarted","Data":"2d6ab5427dcb6b8a2447e35caa8b4312ab996df97ec6af0a030c59b0c1835ddf"} Oct 01 15:38:08 crc kubenswrapper[4869]: I1001 15:38:08.526978 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-vgwqs" event={"ID":"d6f8ac01-0c9a-44d0-91e3-363811338874","Type":"ContainerStarted","Data":"238a591a318d95f4f5bd53a68584642ee89947f4f3c6ac1ed1b2e70aaf91417c"} Oct 01 15:38:08 crc kubenswrapper[4869]: I1001 15:38:08.546492 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-qz9pw" podStartSLOduration=2.091703454 podStartE2EDuration="2.546473682s" podCreationTimestamp="2025-10-01 15:38:06 +0000 UTC" firstStartedPulling="2025-10-01 15:38:06.964002748 +0000 UTC m=+1996.110845864" lastFinishedPulling="2025-10-01 15:38:07.418772976 +0000 UTC m=+1996.565616092" observedRunningTime="2025-10-01 15:38:08.545769464 +0000 UTC m=+1997.692612600" watchObservedRunningTime="2025-10-01 15:38:08.546473682 +0000 UTC m=+1997.693316798" Oct 01 15:38:09 crc kubenswrapper[4869]: I1001 15:38:09.552709 4869 generic.go:334] "Generic (PLEG): container finished" podID="d6f8ac01-0c9a-44d0-91e3-363811338874" containerID="238a591a318d95f4f5bd53a68584642ee89947f4f3c6ac1ed1b2e70aaf91417c" exitCode=0 Oct 01 15:38:09 crc kubenswrapper[4869]: I1001 15:38:09.553411 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-vgwqs" event={"ID":"d6f8ac01-0c9a-44d0-91e3-363811338874","Type":"ContainerDied","Data":"238a591a318d95f4f5bd53a68584642ee89947f4f3c6ac1ed1b2e70aaf91417c"} Oct 01 15:38:10 crc kubenswrapper[4869]: I1001 15:38:10.069189 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-mhntb"] Oct 01 15:38:10 crc kubenswrapper[4869]: I1001 15:38:10.071844 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-mhntb" Oct 01 15:38:10 crc kubenswrapper[4869]: I1001 15:38:10.081795 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-mhntb"] Oct 01 15:38:10 crc kubenswrapper[4869]: I1001 15:38:10.188042 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4c293cc8-e2fd-494e-beae-c1c7a97d8504-catalog-content\") pod \"redhat-marketplace-mhntb\" (UID: \"4c293cc8-e2fd-494e-beae-c1c7a97d8504\") " pod="openshift-marketplace/redhat-marketplace-mhntb" Oct 01 15:38:10 crc kubenswrapper[4869]: I1001 15:38:10.189151 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4c293cc8-e2fd-494e-beae-c1c7a97d8504-utilities\") pod \"redhat-marketplace-mhntb\" (UID: \"4c293cc8-e2fd-494e-beae-c1c7a97d8504\") " pod="openshift-marketplace/redhat-marketplace-mhntb" Oct 01 15:38:10 crc kubenswrapper[4869]: I1001 15:38:10.189198 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wbwhv\" (UniqueName: \"kubernetes.io/projected/4c293cc8-e2fd-494e-beae-c1c7a97d8504-kube-api-access-wbwhv\") pod \"redhat-marketplace-mhntb\" (UID: \"4c293cc8-e2fd-494e-beae-c1c7a97d8504\") " pod="openshift-marketplace/redhat-marketplace-mhntb" Oct 01 15:38:10 crc kubenswrapper[4869]: I1001 15:38:10.290551 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4c293cc8-e2fd-494e-beae-c1c7a97d8504-utilities\") pod \"redhat-marketplace-mhntb\" (UID: \"4c293cc8-e2fd-494e-beae-c1c7a97d8504\") " pod="openshift-marketplace/redhat-marketplace-mhntb" Oct 01 15:38:10 crc kubenswrapper[4869]: I1001 15:38:10.290909 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wbwhv\" (UniqueName: \"kubernetes.io/projected/4c293cc8-e2fd-494e-beae-c1c7a97d8504-kube-api-access-wbwhv\") pod \"redhat-marketplace-mhntb\" (UID: \"4c293cc8-e2fd-494e-beae-c1c7a97d8504\") " pod="openshift-marketplace/redhat-marketplace-mhntb" Oct 01 15:38:10 crc kubenswrapper[4869]: I1001 15:38:10.290999 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4c293cc8-e2fd-494e-beae-c1c7a97d8504-catalog-content\") pod \"redhat-marketplace-mhntb\" (UID: \"4c293cc8-e2fd-494e-beae-c1c7a97d8504\") " pod="openshift-marketplace/redhat-marketplace-mhntb" Oct 01 15:38:10 crc kubenswrapper[4869]: I1001 15:38:10.290996 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4c293cc8-e2fd-494e-beae-c1c7a97d8504-utilities\") pod \"redhat-marketplace-mhntb\" (UID: \"4c293cc8-e2fd-494e-beae-c1c7a97d8504\") " pod="openshift-marketplace/redhat-marketplace-mhntb" Oct 01 15:38:10 crc kubenswrapper[4869]: I1001 15:38:10.291226 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4c293cc8-e2fd-494e-beae-c1c7a97d8504-catalog-content\") pod \"redhat-marketplace-mhntb\" (UID: \"4c293cc8-e2fd-494e-beae-c1c7a97d8504\") " pod="openshift-marketplace/redhat-marketplace-mhntb" Oct 01 15:38:10 crc kubenswrapper[4869]: I1001 15:38:10.318395 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wbwhv\" (UniqueName: \"kubernetes.io/projected/4c293cc8-e2fd-494e-beae-c1c7a97d8504-kube-api-access-wbwhv\") pod \"redhat-marketplace-mhntb\" (UID: \"4c293cc8-e2fd-494e-beae-c1c7a97d8504\") " pod="openshift-marketplace/redhat-marketplace-mhntb" Oct 01 15:38:10 crc kubenswrapper[4869]: I1001 15:38:10.399901 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-mhntb" Oct 01 15:38:10 crc kubenswrapper[4869]: I1001 15:38:10.572370 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-vgwqs" event={"ID":"d6f8ac01-0c9a-44d0-91e3-363811338874","Type":"ContainerStarted","Data":"4edf59ee9b2b705d83039d1e1fec57e014bd304f6b89decf1b44b847cb2769fd"} Oct 01 15:38:10 crc kubenswrapper[4869]: I1001 15:38:10.601937 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-vgwqs" podStartSLOduration=3.0788215 podStartE2EDuration="5.60191921s" podCreationTimestamp="2025-10-01 15:38:05 +0000 UTC" firstStartedPulling="2025-10-01 15:38:07.515865685 +0000 UTC m=+1996.662708801" lastFinishedPulling="2025-10-01 15:38:10.038963385 +0000 UTC m=+1999.185806511" observedRunningTime="2025-10-01 15:38:10.595756165 +0000 UTC m=+1999.742599281" watchObservedRunningTime="2025-10-01 15:38:10.60191921 +0000 UTC m=+1999.748762326" Oct 01 15:38:10 crc kubenswrapper[4869]: I1001 15:38:10.869700 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-mhntb"] Oct 01 15:38:11 crc kubenswrapper[4869]: I1001 15:38:11.582116 4869 generic.go:334] "Generic (PLEG): container finished" podID="4c293cc8-e2fd-494e-beae-c1c7a97d8504" containerID="57646a12cc01e31973685a2e1a1eb897b08c941c98554c080b34f0e8844004d1" exitCode=0 Oct 01 15:38:11 crc kubenswrapper[4869]: I1001 15:38:11.597102 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-mhntb" event={"ID":"4c293cc8-e2fd-494e-beae-c1c7a97d8504","Type":"ContainerDied","Data":"57646a12cc01e31973685a2e1a1eb897b08c941c98554c080b34f0e8844004d1"} Oct 01 15:38:11 crc kubenswrapper[4869]: I1001 15:38:11.597163 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-mhntb" event={"ID":"4c293cc8-e2fd-494e-beae-c1c7a97d8504","Type":"ContainerStarted","Data":"0deed5194011916ebaae63af28a4924204b9bf29e2451313ca699c08702230f0"} Oct 01 15:38:12 crc kubenswrapper[4869]: I1001 15:38:12.594655 4869 generic.go:334] "Generic (PLEG): container finished" podID="4c293cc8-e2fd-494e-beae-c1c7a97d8504" containerID="b6bd2a71cba44167b5c55c048dfb2ab250c541ea5aababdf2560acc4f10e0812" exitCode=0 Oct 01 15:38:12 crc kubenswrapper[4869]: I1001 15:38:12.594722 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-mhntb" event={"ID":"4c293cc8-e2fd-494e-beae-c1c7a97d8504","Type":"ContainerDied","Data":"b6bd2a71cba44167b5c55c048dfb2ab250c541ea5aababdf2560acc4f10e0812"} Oct 01 15:38:13 crc kubenswrapper[4869]: I1001 15:38:13.605573 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-mhntb" event={"ID":"4c293cc8-e2fd-494e-beae-c1c7a97d8504","Type":"ContainerStarted","Data":"d8837138e7dc3a0f274dfb5eb6d23a90c4ace6c91b1143ffdb2f5d591d70a447"} Oct 01 15:38:13 crc kubenswrapper[4869]: I1001 15:38:13.626635 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-mhntb" podStartSLOduration=2.019743606 podStartE2EDuration="3.626615243s" podCreationTimestamp="2025-10-01 15:38:10 +0000 UTC" firstStartedPulling="2025-10-01 15:38:11.594037249 +0000 UTC m=+2000.740880375" lastFinishedPulling="2025-10-01 15:38:13.200908896 +0000 UTC m=+2002.347752012" observedRunningTime="2025-10-01 15:38:13.623634598 +0000 UTC m=+2002.770477724" watchObservedRunningTime="2025-10-01 15:38:13.626615243 +0000 UTC m=+2002.773458369" Oct 01 15:38:13 crc kubenswrapper[4869]: I1001 15:38:13.911006 4869 scope.go:117] "RemoveContainer" containerID="0644d998c969fb3bed012dc7a8f2ad9a8e87fcc549ee146354f20323a948af9f" Oct 01 15:38:13 crc kubenswrapper[4869]: I1001 15:38:13.968510 4869 scope.go:117] "RemoveContainer" containerID="40f587a480a7c39a6196a0a54aab2ff69baf31668f21bcaffb8dff340cdd4d67" Oct 01 15:38:14 crc kubenswrapper[4869]: I1001 15:38:14.005886 4869 scope.go:117] "RemoveContainer" containerID="8561e99de471e1faf68cc4aed08fee80f54220362b813c439a91c5b7ae5e8348" Oct 01 15:38:14 crc kubenswrapper[4869]: I1001 15:38:14.048323 4869 scope.go:117] "RemoveContainer" containerID="cb1689f9cd035cfe3c04150cfdc86bfa5eae7952cfd102d67886467a32dcd6ae" Oct 01 15:38:14 crc kubenswrapper[4869]: I1001 15:38:14.118456 4869 scope.go:117] "RemoveContainer" containerID="a4c30e7649e1d7e4e4b388df040c08752693c97867f649c3a99c026e12375a3a" Oct 01 15:38:14 crc kubenswrapper[4869]: I1001 15:38:14.162800 4869 scope.go:117] "RemoveContainer" containerID="01553e2b01891ddc71d4b8a1179cef6c85eeb8220e018db463ba9b08d73df080" Oct 01 15:38:14 crc kubenswrapper[4869]: I1001 15:38:14.441388 4869 scope.go:117] "RemoveContainer" containerID="a4b63cb3162da03b91af5d7cae161e2c3e36b6c7ff7c72dd1638a5e775113df3" Oct 01 15:38:14 crc kubenswrapper[4869]: I1001 15:38:14.477639 4869 scope.go:117] "RemoveContainer" containerID="7cf48ea29ff8f9ec58969e4312cb5b3e8ce8120ddd5c2843d36031b5c1942727" Oct 01 15:38:14 crc kubenswrapper[4869]: I1001 15:38:14.528214 4869 scope.go:117] "RemoveContainer" containerID="d76594c69089b736be60e31e1f3b79b8a758908c6a92b664ee6ebe187d739276" Oct 01 15:38:14 crc kubenswrapper[4869]: I1001 15:38:14.559473 4869 scope.go:117] "RemoveContainer" containerID="1ec0c0c6e1592364c174b2f0559ef58b726ce12d4b566f8be96b2cdb9253fdd1" Oct 01 15:38:16 crc kubenswrapper[4869]: I1001 15:38:16.003888 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-vgwqs" Oct 01 15:38:16 crc kubenswrapper[4869]: I1001 15:38:16.005170 4869 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-vgwqs" Oct 01 15:38:16 crc kubenswrapper[4869]: I1001 15:38:16.077044 4869 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-vgwqs" Oct 01 15:38:16 crc kubenswrapper[4869]: I1001 15:38:16.719036 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-vgwqs" Oct 01 15:38:17 crc kubenswrapper[4869]: I1001 15:38:17.051398 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-vgwqs"] Oct 01 15:38:18 crc kubenswrapper[4869]: I1001 15:38:18.665255 4869 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-vgwqs" podUID="d6f8ac01-0c9a-44d0-91e3-363811338874" containerName="registry-server" containerID="cri-o://4edf59ee9b2b705d83039d1e1fec57e014bd304f6b89decf1b44b847cb2769fd" gracePeriod=2 Oct 01 15:38:19 crc kubenswrapper[4869]: I1001 15:38:19.178623 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-vgwqs" Oct 01 15:38:19 crc kubenswrapper[4869]: I1001 15:38:19.351573 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pfz4q\" (UniqueName: \"kubernetes.io/projected/d6f8ac01-0c9a-44d0-91e3-363811338874-kube-api-access-pfz4q\") pod \"d6f8ac01-0c9a-44d0-91e3-363811338874\" (UID: \"d6f8ac01-0c9a-44d0-91e3-363811338874\") " Oct 01 15:38:19 crc kubenswrapper[4869]: I1001 15:38:19.351721 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d6f8ac01-0c9a-44d0-91e3-363811338874-utilities\") pod \"d6f8ac01-0c9a-44d0-91e3-363811338874\" (UID: \"d6f8ac01-0c9a-44d0-91e3-363811338874\") " Oct 01 15:38:19 crc kubenswrapper[4869]: I1001 15:38:19.351768 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d6f8ac01-0c9a-44d0-91e3-363811338874-catalog-content\") pod \"d6f8ac01-0c9a-44d0-91e3-363811338874\" (UID: \"d6f8ac01-0c9a-44d0-91e3-363811338874\") " Oct 01 15:38:19 crc kubenswrapper[4869]: I1001 15:38:19.353076 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d6f8ac01-0c9a-44d0-91e3-363811338874-utilities" (OuterVolumeSpecName: "utilities") pod "d6f8ac01-0c9a-44d0-91e3-363811338874" (UID: "d6f8ac01-0c9a-44d0-91e3-363811338874"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 15:38:19 crc kubenswrapper[4869]: I1001 15:38:19.358424 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d6f8ac01-0c9a-44d0-91e3-363811338874-kube-api-access-pfz4q" (OuterVolumeSpecName: "kube-api-access-pfz4q") pod "d6f8ac01-0c9a-44d0-91e3-363811338874" (UID: "d6f8ac01-0c9a-44d0-91e3-363811338874"). InnerVolumeSpecName "kube-api-access-pfz4q". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 15:38:19 crc kubenswrapper[4869]: I1001 15:38:19.401287 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d6f8ac01-0c9a-44d0-91e3-363811338874-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "d6f8ac01-0c9a-44d0-91e3-363811338874" (UID: "d6f8ac01-0c9a-44d0-91e3-363811338874"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 15:38:19 crc kubenswrapper[4869]: I1001 15:38:19.453748 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pfz4q\" (UniqueName: \"kubernetes.io/projected/d6f8ac01-0c9a-44d0-91e3-363811338874-kube-api-access-pfz4q\") on node \"crc\" DevicePath \"\"" Oct 01 15:38:19 crc kubenswrapper[4869]: I1001 15:38:19.453786 4869 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d6f8ac01-0c9a-44d0-91e3-363811338874-utilities\") on node \"crc\" DevicePath \"\"" Oct 01 15:38:19 crc kubenswrapper[4869]: I1001 15:38:19.453796 4869 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d6f8ac01-0c9a-44d0-91e3-363811338874-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 01 15:38:19 crc kubenswrapper[4869]: I1001 15:38:19.674729 4869 generic.go:334] "Generic (PLEG): container finished" podID="d6f8ac01-0c9a-44d0-91e3-363811338874" containerID="4edf59ee9b2b705d83039d1e1fec57e014bd304f6b89decf1b44b847cb2769fd" exitCode=0 Oct 01 15:38:19 crc kubenswrapper[4869]: I1001 15:38:19.674778 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-vgwqs" Oct 01 15:38:19 crc kubenswrapper[4869]: I1001 15:38:19.674793 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-vgwqs" event={"ID":"d6f8ac01-0c9a-44d0-91e3-363811338874","Type":"ContainerDied","Data":"4edf59ee9b2b705d83039d1e1fec57e014bd304f6b89decf1b44b847cb2769fd"} Oct 01 15:38:19 crc kubenswrapper[4869]: I1001 15:38:19.674836 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-vgwqs" event={"ID":"d6f8ac01-0c9a-44d0-91e3-363811338874","Type":"ContainerDied","Data":"297a4bc79884a9b21f664f100451e54c7d42ce90dbd8a98ca2b77e5896c60680"} Oct 01 15:38:19 crc kubenswrapper[4869]: I1001 15:38:19.674860 4869 scope.go:117] "RemoveContainer" containerID="4edf59ee9b2b705d83039d1e1fec57e014bd304f6b89decf1b44b847cb2769fd" Oct 01 15:38:19 crc kubenswrapper[4869]: I1001 15:38:19.696803 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-vgwqs"] Oct 01 15:38:19 crc kubenswrapper[4869]: I1001 15:38:19.698849 4869 scope.go:117] "RemoveContainer" containerID="238a591a318d95f4f5bd53a68584642ee89947f4f3c6ac1ed1b2e70aaf91417c" Oct 01 15:38:19 crc kubenswrapper[4869]: I1001 15:38:19.705189 4869 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-vgwqs"] Oct 01 15:38:19 crc kubenswrapper[4869]: I1001 15:38:19.719670 4869 scope.go:117] "RemoveContainer" containerID="af3684113b39f936ca708bb80b28b2775bbce3cf4268b90d4b9fa52313bc1aeb" Oct 01 15:38:19 crc kubenswrapper[4869]: I1001 15:38:19.763517 4869 scope.go:117] "RemoveContainer" containerID="4edf59ee9b2b705d83039d1e1fec57e014bd304f6b89decf1b44b847cb2769fd" Oct 01 15:38:19 crc kubenswrapper[4869]: E1001 15:38:19.763897 4869 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4edf59ee9b2b705d83039d1e1fec57e014bd304f6b89decf1b44b847cb2769fd\": container with ID starting with 4edf59ee9b2b705d83039d1e1fec57e014bd304f6b89decf1b44b847cb2769fd not found: ID does not exist" containerID="4edf59ee9b2b705d83039d1e1fec57e014bd304f6b89decf1b44b847cb2769fd" Oct 01 15:38:19 crc kubenswrapper[4869]: I1001 15:38:19.763928 4869 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4edf59ee9b2b705d83039d1e1fec57e014bd304f6b89decf1b44b847cb2769fd"} err="failed to get container status \"4edf59ee9b2b705d83039d1e1fec57e014bd304f6b89decf1b44b847cb2769fd\": rpc error: code = NotFound desc = could not find container \"4edf59ee9b2b705d83039d1e1fec57e014bd304f6b89decf1b44b847cb2769fd\": container with ID starting with 4edf59ee9b2b705d83039d1e1fec57e014bd304f6b89decf1b44b847cb2769fd not found: ID does not exist" Oct 01 15:38:19 crc kubenswrapper[4869]: I1001 15:38:19.763956 4869 scope.go:117] "RemoveContainer" containerID="238a591a318d95f4f5bd53a68584642ee89947f4f3c6ac1ed1b2e70aaf91417c" Oct 01 15:38:19 crc kubenswrapper[4869]: E1001 15:38:19.764293 4869 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"238a591a318d95f4f5bd53a68584642ee89947f4f3c6ac1ed1b2e70aaf91417c\": container with ID starting with 238a591a318d95f4f5bd53a68584642ee89947f4f3c6ac1ed1b2e70aaf91417c not found: ID does not exist" containerID="238a591a318d95f4f5bd53a68584642ee89947f4f3c6ac1ed1b2e70aaf91417c" Oct 01 15:38:19 crc kubenswrapper[4869]: I1001 15:38:19.764324 4869 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"238a591a318d95f4f5bd53a68584642ee89947f4f3c6ac1ed1b2e70aaf91417c"} err="failed to get container status \"238a591a318d95f4f5bd53a68584642ee89947f4f3c6ac1ed1b2e70aaf91417c\": rpc error: code = NotFound desc = could not find container \"238a591a318d95f4f5bd53a68584642ee89947f4f3c6ac1ed1b2e70aaf91417c\": container with ID starting with 238a591a318d95f4f5bd53a68584642ee89947f4f3c6ac1ed1b2e70aaf91417c not found: ID does not exist" Oct 01 15:38:19 crc kubenswrapper[4869]: I1001 15:38:19.764341 4869 scope.go:117] "RemoveContainer" containerID="af3684113b39f936ca708bb80b28b2775bbce3cf4268b90d4b9fa52313bc1aeb" Oct 01 15:38:19 crc kubenswrapper[4869]: E1001 15:38:19.764771 4869 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"af3684113b39f936ca708bb80b28b2775bbce3cf4268b90d4b9fa52313bc1aeb\": container with ID starting with af3684113b39f936ca708bb80b28b2775bbce3cf4268b90d4b9fa52313bc1aeb not found: ID does not exist" containerID="af3684113b39f936ca708bb80b28b2775bbce3cf4268b90d4b9fa52313bc1aeb" Oct 01 15:38:19 crc kubenswrapper[4869]: I1001 15:38:19.764819 4869 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"af3684113b39f936ca708bb80b28b2775bbce3cf4268b90d4b9fa52313bc1aeb"} err="failed to get container status \"af3684113b39f936ca708bb80b28b2775bbce3cf4268b90d4b9fa52313bc1aeb\": rpc error: code = NotFound desc = could not find container \"af3684113b39f936ca708bb80b28b2775bbce3cf4268b90d4b9fa52313bc1aeb\": container with ID starting with af3684113b39f936ca708bb80b28b2775bbce3cf4268b90d4b9fa52313bc1aeb not found: ID does not exist" Oct 01 15:38:20 crc kubenswrapper[4869]: I1001 15:38:20.400587 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-mhntb" Oct 01 15:38:20 crc kubenswrapper[4869]: I1001 15:38:20.400969 4869 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-mhntb" Oct 01 15:38:20 crc kubenswrapper[4869]: I1001 15:38:20.482119 4869 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-mhntb" Oct 01 15:38:20 crc kubenswrapper[4869]: I1001 15:38:20.686428 4869 generic.go:334] "Generic (PLEG): container finished" podID="b943448c-de46-41bc-a516-1364799a4eba" containerID="2d6ab5427dcb6b8a2447e35caa8b4312ab996df97ec6af0a030c59b0c1835ddf" exitCode=0 Oct 01 15:38:20 crc kubenswrapper[4869]: I1001 15:38:20.686502 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-qz9pw" event={"ID":"b943448c-de46-41bc-a516-1364799a4eba","Type":"ContainerDied","Data":"2d6ab5427dcb6b8a2447e35caa8b4312ab996df97ec6af0a030c59b0c1835ddf"} Oct 01 15:38:20 crc kubenswrapper[4869]: I1001 15:38:20.738339 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-mhntb" Oct 01 15:38:21 crc kubenswrapper[4869]: I1001 15:38:21.600584 4869 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d6f8ac01-0c9a-44d0-91e3-363811338874" path="/var/lib/kubelet/pods/d6f8ac01-0c9a-44d0-91e3-363811338874/volumes" Oct 01 15:38:22 crc kubenswrapper[4869]: I1001 15:38:22.156316 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-qz9pw" Oct 01 15:38:22 crc kubenswrapper[4869]: I1001 15:38:22.309435 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/b943448c-de46-41bc-a516-1364799a4eba-inventory\") pod \"b943448c-de46-41bc-a516-1364799a4eba\" (UID: \"b943448c-de46-41bc-a516-1364799a4eba\") " Oct 01 15:38:22 crc kubenswrapper[4869]: I1001 15:38:22.309471 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/b943448c-de46-41bc-a516-1364799a4eba-ceph\") pod \"b943448c-de46-41bc-a516-1364799a4eba\" (UID: \"b943448c-de46-41bc-a516-1364799a4eba\") " Oct 01 15:38:22 crc kubenswrapper[4869]: I1001 15:38:22.309607 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/b943448c-de46-41bc-a516-1364799a4eba-ssh-key\") pod \"b943448c-de46-41bc-a516-1364799a4eba\" (UID: \"b943448c-de46-41bc-a516-1364799a4eba\") " Oct 01 15:38:22 crc kubenswrapper[4869]: I1001 15:38:22.309661 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bbxpf\" (UniqueName: \"kubernetes.io/projected/b943448c-de46-41bc-a516-1364799a4eba-kube-api-access-bbxpf\") pod \"b943448c-de46-41bc-a516-1364799a4eba\" (UID: \"b943448c-de46-41bc-a516-1364799a4eba\") " Oct 01 15:38:22 crc kubenswrapper[4869]: I1001 15:38:22.309770 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b943448c-de46-41bc-a516-1364799a4eba-repo-setup-combined-ca-bundle\") pod \"b943448c-de46-41bc-a516-1364799a4eba\" (UID: \"b943448c-de46-41bc-a516-1364799a4eba\") " Oct 01 15:38:22 crc kubenswrapper[4869]: I1001 15:38:22.315811 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b943448c-de46-41bc-a516-1364799a4eba-repo-setup-combined-ca-bundle" (OuterVolumeSpecName: "repo-setup-combined-ca-bundle") pod "b943448c-de46-41bc-a516-1364799a4eba" (UID: "b943448c-de46-41bc-a516-1364799a4eba"). InnerVolumeSpecName "repo-setup-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 15:38:22 crc kubenswrapper[4869]: I1001 15:38:22.316314 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b943448c-de46-41bc-a516-1364799a4eba-ceph" (OuterVolumeSpecName: "ceph") pod "b943448c-de46-41bc-a516-1364799a4eba" (UID: "b943448c-de46-41bc-a516-1364799a4eba"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 15:38:22 crc kubenswrapper[4869]: I1001 15:38:22.316781 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b943448c-de46-41bc-a516-1364799a4eba-kube-api-access-bbxpf" (OuterVolumeSpecName: "kube-api-access-bbxpf") pod "b943448c-de46-41bc-a516-1364799a4eba" (UID: "b943448c-de46-41bc-a516-1364799a4eba"). InnerVolumeSpecName "kube-api-access-bbxpf". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 15:38:22 crc kubenswrapper[4869]: I1001 15:38:22.335784 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b943448c-de46-41bc-a516-1364799a4eba-inventory" (OuterVolumeSpecName: "inventory") pod "b943448c-de46-41bc-a516-1364799a4eba" (UID: "b943448c-de46-41bc-a516-1364799a4eba"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 15:38:22 crc kubenswrapper[4869]: I1001 15:38:22.355945 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b943448c-de46-41bc-a516-1364799a4eba-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "b943448c-de46-41bc-a516-1364799a4eba" (UID: "b943448c-de46-41bc-a516-1364799a4eba"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 15:38:22 crc kubenswrapper[4869]: I1001 15:38:22.411931 4869 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/b943448c-de46-41bc-a516-1364799a4eba-ssh-key\") on node \"crc\" DevicePath \"\"" Oct 01 15:38:22 crc kubenswrapper[4869]: I1001 15:38:22.411970 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bbxpf\" (UniqueName: \"kubernetes.io/projected/b943448c-de46-41bc-a516-1364799a4eba-kube-api-access-bbxpf\") on node \"crc\" DevicePath \"\"" Oct 01 15:38:22 crc kubenswrapper[4869]: I1001 15:38:22.411986 4869 reconciler_common.go:293] "Volume detached for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b943448c-de46-41bc-a516-1364799a4eba-repo-setup-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 01 15:38:22 crc kubenswrapper[4869]: I1001 15:38:22.411999 4869 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/b943448c-de46-41bc-a516-1364799a4eba-inventory\") on node \"crc\" DevicePath \"\"" Oct 01 15:38:22 crc kubenswrapper[4869]: I1001 15:38:22.412010 4869 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/b943448c-de46-41bc-a516-1364799a4eba-ceph\") on node \"crc\" DevicePath \"\"" Oct 01 15:38:22 crc kubenswrapper[4869]: I1001 15:38:22.708841 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-qz9pw" event={"ID":"b943448c-de46-41bc-a516-1364799a4eba","Type":"ContainerDied","Data":"5587a025a6b2daa589d895004c5cd5cbf214d6a0d17a27e8d3b39f9c4fe3da3b"} Oct 01 15:38:22 crc kubenswrapper[4869]: I1001 15:38:22.708881 4869 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="5587a025a6b2daa589d895004c5cd5cbf214d6a0d17a27e8d3b39f9c4fe3da3b" Oct 01 15:38:22 crc kubenswrapper[4869]: I1001 15:38:22.708934 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-qz9pw" Oct 01 15:38:22 crc kubenswrapper[4869]: I1001 15:38:22.784010 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-4fzsw"] Oct 01 15:38:22 crc kubenswrapper[4869]: E1001 15:38:22.784715 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d6f8ac01-0c9a-44d0-91e3-363811338874" containerName="extract-utilities" Oct 01 15:38:22 crc kubenswrapper[4869]: I1001 15:38:22.784811 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="d6f8ac01-0c9a-44d0-91e3-363811338874" containerName="extract-utilities" Oct 01 15:38:22 crc kubenswrapper[4869]: E1001 15:38:22.788494 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b943448c-de46-41bc-a516-1364799a4eba" containerName="repo-setup-edpm-deployment-openstack-edpm-ipam" Oct 01 15:38:22 crc kubenswrapper[4869]: I1001 15:38:22.788571 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="b943448c-de46-41bc-a516-1364799a4eba" containerName="repo-setup-edpm-deployment-openstack-edpm-ipam" Oct 01 15:38:22 crc kubenswrapper[4869]: E1001 15:38:22.788654 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d6f8ac01-0c9a-44d0-91e3-363811338874" containerName="extract-content" Oct 01 15:38:22 crc kubenswrapper[4869]: I1001 15:38:22.788704 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="d6f8ac01-0c9a-44d0-91e3-363811338874" containerName="extract-content" Oct 01 15:38:22 crc kubenswrapper[4869]: E1001 15:38:22.788776 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d6f8ac01-0c9a-44d0-91e3-363811338874" containerName="registry-server" Oct 01 15:38:22 crc kubenswrapper[4869]: I1001 15:38:22.788829 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="d6f8ac01-0c9a-44d0-91e3-363811338874" containerName="registry-server" Oct 01 15:38:22 crc kubenswrapper[4869]: I1001 15:38:22.789108 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="b943448c-de46-41bc-a516-1364799a4eba" containerName="repo-setup-edpm-deployment-openstack-edpm-ipam" Oct 01 15:38:22 crc kubenswrapper[4869]: I1001 15:38:22.789184 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="d6f8ac01-0c9a-44d0-91e3-363811338874" containerName="registry-server" Oct 01 15:38:22 crc kubenswrapper[4869]: I1001 15:38:22.789883 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-4fzsw" Oct 01 15:38:22 crc kubenswrapper[4869]: I1001 15:38:22.793039 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Oct 01 15:38:22 crc kubenswrapper[4869]: I1001 15:38:22.793416 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceph-conf-files" Oct 01 15:38:22 crc kubenswrapper[4869]: I1001 15:38:22.793651 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Oct 01 15:38:22 crc kubenswrapper[4869]: I1001 15:38:22.793710 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Oct 01 15:38:22 crc kubenswrapper[4869]: I1001 15:38:22.793551 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-cjg8g" Oct 01 15:38:22 crc kubenswrapper[4869]: I1001 15:38:22.797291 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-4fzsw"] Oct 01 15:38:22 crc kubenswrapper[4869]: I1001 15:38:22.920896 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/498b6c97-b2b2-4a70-9886-86d2fad1852f-ceph\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-4fzsw\" (UID: \"498b6c97-b2b2-4a70-9886-86d2fad1852f\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-4fzsw" Oct 01 15:38:22 crc kubenswrapper[4869]: I1001 15:38:22.921110 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/498b6c97-b2b2-4a70-9886-86d2fad1852f-ssh-key\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-4fzsw\" (UID: \"498b6c97-b2b2-4a70-9886-86d2fad1852f\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-4fzsw" Oct 01 15:38:22 crc kubenswrapper[4869]: I1001 15:38:22.921248 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rxx9g\" (UniqueName: \"kubernetes.io/projected/498b6c97-b2b2-4a70-9886-86d2fad1852f-kube-api-access-rxx9g\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-4fzsw\" (UID: \"498b6c97-b2b2-4a70-9886-86d2fad1852f\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-4fzsw" Oct 01 15:38:22 crc kubenswrapper[4869]: I1001 15:38:22.921297 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/498b6c97-b2b2-4a70-9886-86d2fad1852f-inventory\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-4fzsw\" (UID: \"498b6c97-b2b2-4a70-9886-86d2fad1852f\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-4fzsw" Oct 01 15:38:22 crc kubenswrapper[4869]: I1001 15:38:22.921393 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/498b6c97-b2b2-4a70-9886-86d2fad1852f-bootstrap-combined-ca-bundle\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-4fzsw\" (UID: \"498b6c97-b2b2-4a70-9886-86d2fad1852f\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-4fzsw" Oct 01 15:38:23 crc kubenswrapper[4869]: I1001 15:38:23.023514 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/498b6c97-b2b2-4a70-9886-86d2fad1852f-ceph\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-4fzsw\" (UID: \"498b6c97-b2b2-4a70-9886-86d2fad1852f\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-4fzsw" Oct 01 15:38:23 crc kubenswrapper[4869]: I1001 15:38:23.023842 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/498b6c97-b2b2-4a70-9886-86d2fad1852f-ssh-key\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-4fzsw\" (UID: \"498b6c97-b2b2-4a70-9886-86d2fad1852f\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-4fzsw" Oct 01 15:38:23 crc kubenswrapper[4869]: I1001 15:38:23.023955 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/498b6c97-b2b2-4a70-9886-86d2fad1852f-inventory\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-4fzsw\" (UID: \"498b6c97-b2b2-4a70-9886-86d2fad1852f\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-4fzsw" Oct 01 15:38:23 crc kubenswrapper[4869]: I1001 15:38:23.024051 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rxx9g\" (UniqueName: \"kubernetes.io/projected/498b6c97-b2b2-4a70-9886-86d2fad1852f-kube-api-access-rxx9g\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-4fzsw\" (UID: \"498b6c97-b2b2-4a70-9886-86d2fad1852f\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-4fzsw" Oct 01 15:38:23 crc kubenswrapper[4869]: I1001 15:38:23.024172 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/498b6c97-b2b2-4a70-9886-86d2fad1852f-bootstrap-combined-ca-bundle\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-4fzsw\" (UID: \"498b6c97-b2b2-4a70-9886-86d2fad1852f\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-4fzsw" Oct 01 15:38:23 crc kubenswrapper[4869]: I1001 15:38:23.027667 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/498b6c97-b2b2-4a70-9886-86d2fad1852f-inventory\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-4fzsw\" (UID: \"498b6c97-b2b2-4a70-9886-86d2fad1852f\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-4fzsw" Oct 01 15:38:23 crc kubenswrapper[4869]: I1001 15:38:23.027695 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/498b6c97-b2b2-4a70-9886-86d2fad1852f-ssh-key\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-4fzsw\" (UID: \"498b6c97-b2b2-4a70-9886-86d2fad1852f\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-4fzsw" Oct 01 15:38:23 crc kubenswrapper[4869]: I1001 15:38:23.028816 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/498b6c97-b2b2-4a70-9886-86d2fad1852f-bootstrap-combined-ca-bundle\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-4fzsw\" (UID: \"498b6c97-b2b2-4a70-9886-86d2fad1852f\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-4fzsw" Oct 01 15:38:23 crc kubenswrapper[4869]: I1001 15:38:23.029020 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/498b6c97-b2b2-4a70-9886-86d2fad1852f-ceph\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-4fzsw\" (UID: \"498b6c97-b2b2-4a70-9886-86d2fad1852f\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-4fzsw" Oct 01 15:38:23 crc kubenswrapper[4869]: I1001 15:38:23.050208 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rxx9g\" (UniqueName: \"kubernetes.io/projected/498b6c97-b2b2-4a70-9886-86d2fad1852f-kube-api-access-rxx9g\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-4fzsw\" (UID: \"498b6c97-b2b2-4a70-9886-86d2fad1852f\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-4fzsw" Oct 01 15:38:23 crc kubenswrapper[4869]: I1001 15:38:23.051436 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-mhntb"] Oct 01 15:38:23 crc kubenswrapper[4869]: I1001 15:38:23.051672 4869 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-mhntb" podUID="4c293cc8-e2fd-494e-beae-c1c7a97d8504" containerName="registry-server" containerID="cri-o://d8837138e7dc3a0f274dfb5eb6d23a90c4ace6c91b1143ffdb2f5d591d70a447" gracePeriod=2 Oct 01 15:38:23 crc kubenswrapper[4869]: I1001 15:38:23.124572 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-4fzsw" Oct 01 15:38:23 crc kubenswrapper[4869]: E1001 15:38:23.303901 4869 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod4c293cc8_e2fd_494e_beae_c1c7a97d8504.slice/crio-conmon-d8837138e7dc3a0f274dfb5eb6d23a90c4ace6c91b1143ffdb2f5d591d70a447.scope\": RecentStats: unable to find data in memory cache]" Oct 01 15:38:23 crc kubenswrapper[4869]: I1001 15:38:23.530073 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-mhntb" Oct 01 15:38:23 crc kubenswrapper[4869]: I1001 15:38:23.629955 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-4fzsw"] Oct 01 15:38:23 crc kubenswrapper[4869]: W1001 15:38:23.631948 4869 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod498b6c97_b2b2_4a70_9886_86d2fad1852f.slice/crio-eeca8728ed6b2d6a1739522f5837489a396b41158fcf78a3579763b76610e6de WatchSource:0}: Error finding container eeca8728ed6b2d6a1739522f5837489a396b41158fcf78a3579763b76610e6de: Status 404 returned error can't find the container with id eeca8728ed6b2d6a1739522f5837489a396b41158fcf78a3579763b76610e6de Oct 01 15:38:23 crc kubenswrapper[4869]: I1001 15:38:23.638145 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wbwhv\" (UniqueName: \"kubernetes.io/projected/4c293cc8-e2fd-494e-beae-c1c7a97d8504-kube-api-access-wbwhv\") pod \"4c293cc8-e2fd-494e-beae-c1c7a97d8504\" (UID: \"4c293cc8-e2fd-494e-beae-c1c7a97d8504\") " Oct 01 15:38:23 crc kubenswrapper[4869]: I1001 15:38:23.638236 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4c293cc8-e2fd-494e-beae-c1c7a97d8504-utilities\") pod \"4c293cc8-e2fd-494e-beae-c1c7a97d8504\" (UID: \"4c293cc8-e2fd-494e-beae-c1c7a97d8504\") " Oct 01 15:38:23 crc kubenswrapper[4869]: I1001 15:38:23.638297 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4c293cc8-e2fd-494e-beae-c1c7a97d8504-catalog-content\") pod \"4c293cc8-e2fd-494e-beae-c1c7a97d8504\" (UID: \"4c293cc8-e2fd-494e-beae-c1c7a97d8504\") " Oct 01 15:38:23 crc kubenswrapper[4869]: I1001 15:38:23.639233 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4c293cc8-e2fd-494e-beae-c1c7a97d8504-utilities" (OuterVolumeSpecName: "utilities") pod "4c293cc8-e2fd-494e-beae-c1c7a97d8504" (UID: "4c293cc8-e2fd-494e-beae-c1c7a97d8504"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 15:38:23 crc kubenswrapper[4869]: I1001 15:38:23.639990 4869 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4c293cc8-e2fd-494e-beae-c1c7a97d8504-utilities\") on node \"crc\" DevicePath \"\"" Oct 01 15:38:23 crc kubenswrapper[4869]: I1001 15:38:23.643822 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4c293cc8-e2fd-494e-beae-c1c7a97d8504-kube-api-access-wbwhv" (OuterVolumeSpecName: "kube-api-access-wbwhv") pod "4c293cc8-e2fd-494e-beae-c1c7a97d8504" (UID: "4c293cc8-e2fd-494e-beae-c1c7a97d8504"). InnerVolumeSpecName "kube-api-access-wbwhv". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 15:38:23 crc kubenswrapper[4869]: I1001 15:38:23.652681 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4c293cc8-e2fd-494e-beae-c1c7a97d8504-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "4c293cc8-e2fd-494e-beae-c1c7a97d8504" (UID: "4c293cc8-e2fd-494e-beae-c1c7a97d8504"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 15:38:23 crc kubenswrapper[4869]: I1001 15:38:23.723629 4869 generic.go:334] "Generic (PLEG): container finished" podID="4c293cc8-e2fd-494e-beae-c1c7a97d8504" containerID="d8837138e7dc3a0f274dfb5eb6d23a90c4ace6c91b1143ffdb2f5d591d70a447" exitCode=0 Oct 01 15:38:23 crc kubenswrapper[4869]: I1001 15:38:23.723856 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-mhntb" Oct 01 15:38:23 crc kubenswrapper[4869]: I1001 15:38:23.723915 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-mhntb" event={"ID":"4c293cc8-e2fd-494e-beae-c1c7a97d8504","Type":"ContainerDied","Data":"d8837138e7dc3a0f274dfb5eb6d23a90c4ace6c91b1143ffdb2f5d591d70a447"} Oct 01 15:38:23 crc kubenswrapper[4869]: I1001 15:38:23.723970 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-mhntb" event={"ID":"4c293cc8-e2fd-494e-beae-c1c7a97d8504","Type":"ContainerDied","Data":"0deed5194011916ebaae63af28a4924204b9bf29e2451313ca699c08702230f0"} Oct 01 15:38:23 crc kubenswrapper[4869]: I1001 15:38:23.723992 4869 scope.go:117] "RemoveContainer" containerID="d8837138e7dc3a0f274dfb5eb6d23a90c4ace6c91b1143ffdb2f5d591d70a447" Oct 01 15:38:23 crc kubenswrapper[4869]: I1001 15:38:23.727352 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-4fzsw" event={"ID":"498b6c97-b2b2-4a70-9886-86d2fad1852f","Type":"ContainerStarted","Data":"eeca8728ed6b2d6a1739522f5837489a396b41158fcf78a3579763b76610e6de"} Oct 01 15:38:23 crc kubenswrapper[4869]: I1001 15:38:23.742607 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wbwhv\" (UniqueName: \"kubernetes.io/projected/4c293cc8-e2fd-494e-beae-c1c7a97d8504-kube-api-access-wbwhv\") on node \"crc\" DevicePath \"\"" Oct 01 15:38:23 crc kubenswrapper[4869]: I1001 15:38:23.742667 4869 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4c293cc8-e2fd-494e-beae-c1c7a97d8504-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 01 15:38:23 crc kubenswrapper[4869]: I1001 15:38:23.743187 4869 scope.go:117] "RemoveContainer" containerID="b6bd2a71cba44167b5c55c048dfb2ab250c541ea5aababdf2560acc4f10e0812" Oct 01 15:38:23 crc kubenswrapper[4869]: I1001 15:38:23.764985 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-mhntb"] Oct 01 15:38:23 crc kubenswrapper[4869]: I1001 15:38:23.766823 4869 scope.go:117] "RemoveContainer" containerID="57646a12cc01e31973685a2e1a1eb897b08c941c98554c080b34f0e8844004d1" Oct 01 15:38:23 crc kubenswrapper[4869]: I1001 15:38:23.772106 4869 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-mhntb"] Oct 01 15:38:23 crc kubenswrapper[4869]: I1001 15:38:23.783349 4869 scope.go:117] "RemoveContainer" containerID="d8837138e7dc3a0f274dfb5eb6d23a90c4ace6c91b1143ffdb2f5d591d70a447" Oct 01 15:38:23 crc kubenswrapper[4869]: E1001 15:38:23.783731 4869 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d8837138e7dc3a0f274dfb5eb6d23a90c4ace6c91b1143ffdb2f5d591d70a447\": container with ID starting with d8837138e7dc3a0f274dfb5eb6d23a90c4ace6c91b1143ffdb2f5d591d70a447 not found: ID does not exist" containerID="d8837138e7dc3a0f274dfb5eb6d23a90c4ace6c91b1143ffdb2f5d591d70a447" Oct 01 15:38:23 crc kubenswrapper[4869]: I1001 15:38:23.783764 4869 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d8837138e7dc3a0f274dfb5eb6d23a90c4ace6c91b1143ffdb2f5d591d70a447"} err="failed to get container status \"d8837138e7dc3a0f274dfb5eb6d23a90c4ace6c91b1143ffdb2f5d591d70a447\": rpc error: code = NotFound desc = could not find container \"d8837138e7dc3a0f274dfb5eb6d23a90c4ace6c91b1143ffdb2f5d591d70a447\": container with ID starting with d8837138e7dc3a0f274dfb5eb6d23a90c4ace6c91b1143ffdb2f5d591d70a447 not found: ID does not exist" Oct 01 15:38:23 crc kubenswrapper[4869]: I1001 15:38:23.783787 4869 scope.go:117] "RemoveContainer" containerID="b6bd2a71cba44167b5c55c048dfb2ab250c541ea5aababdf2560acc4f10e0812" Oct 01 15:38:23 crc kubenswrapper[4869]: E1001 15:38:23.784126 4869 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b6bd2a71cba44167b5c55c048dfb2ab250c541ea5aababdf2560acc4f10e0812\": container with ID starting with b6bd2a71cba44167b5c55c048dfb2ab250c541ea5aababdf2560acc4f10e0812 not found: ID does not exist" containerID="b6bd2a71cba44167b5c55c048dfb2ab250c541ea5aababdf2560acc4f10e0812" Oct 01 15:38:23 crc kubenswrapper[4869]: I1001 15:38:23.784198 4869 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b6bd2a71cba44167b5c55c048dfb2ab250c541ea5aababdf2560acc4f10e0812"} err="failed to get container status \"b6bd2a71cba44167b5c55c048dfb2ab250c541ea5aababdf2560acc4f10e0812\": rpc error: code = NotFound desc = could not find container \"b6bd2a71cba44167b5c55c048dfb2ab250c541ea5aababdf2560acc4f10e0812\": container with ID starting with b6bd2a71cba44167b5c55c048dfb2ab250c541ea5aababdf2560acc4f10e0812 not found: ID does not exist" Oct 01 15:38:23 crc kubenswrapper[4869]: I1001 15:38:23.784231 4869 scope.go:117] "RemoveContainer" containerID="57646a12cc01e31973685a2e1a1eb897b08c941c98554c080b34f0e8844004d1" Oct 01 15:38:23 crc kubenswrapper[4869]: E1001 15:38:23.784602 4869 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"57646a12cc01e31973685a2e1a1eb897b08c941c98554c080b34f0e8844004d1\": container with ID starting with 57646a12cc01e31973685a2e1a1eb897b08c941c98554c080b34f0e8844004d1 not found: ID does not exist" containerID="57646a12cc01e31973685a2e1a1eb897b08c941c98554c080b34f0e8844004d1" Oct 01 15:38:23 crc kubenswrapper[4869]: I1001 15:38:23.784634 4869 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"57646a12cc01e31973685a2e1a1eb897b08c941c98554c080b34f0e8844004d1"} err="failed to get container status \"57646a12cc01e31973685a2e1a1eb897b08c941c98554c080b34f0e8844004d1\": rpc error: code = NotFound desc = could not find container \"57646a12cc01e31973685a2e1a1eb897b08c941c98554c080b34f0e8844004d1\": container with ID starting with 57646a12cc01e31973685a2e1a1eb897b08c941c98554c080b34f0e8844004d1 not found: ID does not exist" Oct 01 15:38:25 crc kubenswrapper[4869]: I1001 15:38:25.594300 4869 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4c293cc8-e2fd-494e-beae-c1c7a97d8504" path="/var/lib/kubelet/pods/4c293cc8-e2fd-494e-beae-c1c7a97d8504/volumes" Oct 01 15:38:25 crc kubenswrapper[4869]: I1001 15:38:25.750404 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-4fzsw" event={"ID":"498b6c97-b2b2-4a70-9886-86d2fad1852f","Type":"ContainerStarted","Data":"24da4b33a1c2ca7b408b34d654f72307d6b37e1c25337c45023b8443a2476f3c"} Oct 01 15:38:25 crc kubenswrapper[4869]: I1001 15:38:25.771924 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-4fzsw" podStartSLOduration=2.900733949 podStartE2EDuration="3.771903049s" podCreationTimestamp="2025-10-01 15:38:22 +0000 UTC" firstStartedPulling="2025-10-01 15:38:23.634336538 +0000 UTC m=+2012.781179654" lastFinishedPulling="2025-10-01 15:38:24.505505638 +0000 UTC m=+2013.652348754" observedRunningTime="2025-10-01 15:38:25.771003816 +0000 UTC m=+2014.917846962" watchObservedRunningTime="2025-10-01 15:38:25.771903049 +0000 UTC m=+2014.918746175" Oct 01 15:39:13 crc kubenswrapper[4869]: I1001 15:39:13.354477 4869 patch_prober.go:28] interesting pod/machine-config-daemon-c86m8 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 01 15:39:13 crc kubenswrapper[4869]: I1001 15:39:13.354974 4869 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-c86m8" podUID="a4b64f7f-0b03-4f47-965b-9fde048b735c" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 01 15:39:14 crc kubenswrapper[4869]: I1001 15:39:14.778781 4869 scope.go:117] "RemoveContainer" containerID="fba129297637ffe02d64b293921551dad383d78ff9d2bf2f736f00abf748bcf4" Oct 01 15:39:43 crc kubenswrapper[4869]: I1001 15:39:43.354003 4869 patch_prober.go:28] interesting pod/machine-config-daemon-c86m8 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 01 15:39:43 crc kubenswrapper[4869]: I1001 15:39:43.354692 4869 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-c86m8" podUID="a4b64f7f-0b03-4f47-965b-9fde048b735c" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 01 15:39:58 crc kubenswrapper[4869]: I1001 15:39:58.568336 4869 generic.go:334] "Generic (PLEG): container finished" podID="498b6c97-b2b2-4a70-9886-86d2fad1852f" containerID="24da4b33a1c2ca7b408b34d654f72307d6b37e1c25337c45023b8443a2476f3c" exitCode=0 Oct 01 15:39:58 crc kubenswrapper[4869]: I1001 15:39:58.569654 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-4fzsw" event={"ID":"498b6c97-b2b2-4a70-9886-86d2fad1852f","Type":"ContainerDied","Data":"24da4b33a1c2ca7b408b34d654f72307d6b37e1c25337c45023b8443a2476f3c"} Oct 01 15:40:00 crc kubenswrapper[4869]: I1001 15:40:00.082409 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-4fzsw" Oct 01 15:40:00 crc kubenswrapper[4869]: I1001 15:40:00.222101 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/498b6c97-b2b2-4a70-9886-86d2fad1852f-bootstrap-combined-ca-bundle\") pod \"498b6c97-b2b2-4a70-9886-86d2fad1852f\" (UID: \"498b6c97-b2b2-4a70-9886-86d2fad1852f\") " Oct 01 15:40:00 crc kubenswrapper[4869]: I1001 15:40:00.222645 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/498b6c97-b2b2-4a70-9886-86d2fad1852f-ssh-key\") pod \"498b6c97-b2b2-4a70-9886-86d2fad1852f\" (UID: \"498b6c97-b2b2-4a70-9886-86d2fad1852f\") " Oct 01 15:40:00 crc kubenswrapper[4869]: I1001 15:40:00.222728 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/498b6c97-b2b2-4a70-9886-86d2fad1852f-inventory\") pod \"498b6c97-b2b2-4a70-9886-86d2fad1852f\" (UID: \"498b6c97-b2b2-4a70-9886-86d2fad1852f\") " Oct 01 15:40:00 crc kubenswrapper[4869]: I1001 15:40:00.222778 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/498b6c97-b2b2-4a70-9886-86d2fad1852f-ceph\") pod \"498b6c97-b2b2-4a70-9886-86d2fad1852f\" (UID: \"498b6c97-b2b2-4a70-9886-86d2fad1852f\") " Oct 01 15:40:00 crc kubenswrapper[4869]: I1001 15:40:00.222990 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rxx9g\" (UniqueName: \"kubernetes.io/projected/498b6c97-b2b2-4a70-9886-86d2fad1852f-kube-api-access-rxx9g\") pod \"498b6c97-b2b2-4a70-9886-86d2fad1852f\" (UID: \"498b6c97-b2b2-4a70-9886-86d2fad1852f\") " Oct 01 15:40:00 crc kubenswrapper[4869]: I1001 15:40:00.228890 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/498b6c97-b2b2-4a70-9886-86d2fad1852f-bootstrap-combined-ca-bundle" (OuterVolumeSpecName: "bootstrap-combined-ca-bundle") pod "498b6c97-b2b2-4a70-9886-86d2fad1852f" (UID: "498b6c97-b2b2-4a70-9886-86d2fad1852f"). InnerVolumeSpecName "bootstrap-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 15:40:00 crc kubenswrapper[4869]: I1001 15:40:00.229200 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/498b6c97-b2b2-4a70-9886-86d2fad1852f-kube-api-access-rxx9g" (OuterVolumeSpecName: "kube-api-access-rxx9g") pod "498b6c97-b2b2-4a70-9886-86d2fad1852f" (UID: "498b6c97-b2b2-4a70-9886-86d2fad1852f"). InnerVolumeSpecName "kube-api-access-rxx9g". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 15:40:00 crc kubenswrapper[4869]: I1001 15:40:00.231934 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/498b6c97-b2b2-4a70-9886-86d2fad1852f-ceph" (OuterVolumeSpecName: "ceph") pod "498b6c97-b2b2-4a70-9886-86d2fad1852f" (UID: "498b6c97-b2b2-4a70-9886-86d2fad1852f"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 15:40:00 crc kubenswrapper[4869]: I1001 15:40:00.259846 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/498b6c97-b2b2-4a70-9886-86d2fad1852f-inventory" (OuterVolumeSpecName: "inventory") pod "498b6c97-b2b2-4a70-9886-86d2fad1852f" (UID: "498b6c97-b2b2-4a70-9886-86d2fad1852f"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 15:40:00 crc kubenswrapper[4869]: I1001 15:40:00.286678 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/498b6c97-b2b2-4a70-9886-86d2fad1852f-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "498b6c97-b2b2-4a70-9886-86d2fad1852f" (UID: "498b6c97-b2b2-4a70-9886-86d2fad1852f"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 15:40:00 crc kubenswrapper[4869]: I1001 15:40:00.327211 4869 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/498b6c97-b2b2-4a70-9886-86d2fad1852f-ssh-key\") on node \"crc\" DevicePath \"\"" Oct 01 15:40:00 crc kubenswrapper[4869]: I1001 15:40:00.327290 4869 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/498b6c97-b2b2-4a70-9886-86d2fad1852f-inventory\") on node \"crc\" DevicePath \"\"" Oct 01 15:40:00 crc kubenswrapper[4869]: I1001 15:40:00.327311 4869 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/498b6c97-b2b2-4a70-9886-86d2fad1852f-ceph\") on node \"crc\" DevicePath \"\"" Oct 01 15:40:00 crc kubenswrapper[4869]: I1001 15:40:00.327332 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rxx9g\" (UniqueName: \"kubernetes.io/projected/498b6c97-b2b2-4a70-9886-86d2fad1852f-kube-api-access-rxx9g\") on node \"crc\" DevicePath \"\"" Oct 01 15:40:00 crc kubenswrapper[4869]: I1001 15:40:00.327355 4869 reconciler_common.go:293] "Volume detached for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/498b6c97-b2b2-4a70-9886-86d2fad1852f-bootstrap-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 01 15:40:00 crc kubenswrapper[4869]: I1001 15:40:00.595141 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-4fzsw" event={"ID":"498b6c97-b2b2-4a70-9886-86d2fad1852f","Type":"ContainerDied","Data":"eeca8728ed6b2d6a1739522f5837489a396b41158fcf78a3579763b76610e6de"} Oct 01 15:40:00 crc kubenswrapper[4869]: I1001 15:40:00.595192 4869 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="eeca8728ed6b2d6a1739522f5837489a396b41158fcf78a3579763b76610e6de" Oct 01 15:40:00 crc kubenswrapper[4869]: I1001 15:40:00.595198 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-4fzsw" Oct 01 15:40:00 crc kubenswrapper[4869]: I1001 15:40:00.703532 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/configure-network-edpm-deployment-openstack-edpm-ipam-h46vg"] Oct 01 15:40:00 crc kubenswrapper[4869]: E1001 15:40:00.703992 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4c293cc8-e2fd-494e-beae-c1c7a97d8504" containerName="extract-utilities" Oct 01 15:40:00 crc kubenswrapper[4869]: I1001 15:40:00.704013 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="4c293cc8-e2fd-494e-beae-c1c7a97d8504" containerName="extract-utilities" Oct 01 15:40:00 crc kubenswrapper[4869]: E1001 15:40:00.704022 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4c293cc8-e2fd-494e-beae-c1c7a97d8504" containerName="extract-content" Oct 01 15:40:00 crc kubenswrapper[4869]: I1001 15:40:00.704031 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="4c293cc8-e2fd-494e-beae-c1c7a97d8504" containerName="extract-content" Oct 01 15:40:00 crc kubenswrapper[4869]: E1001 15:40:00.704049 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4c293cc8-e2fd-494e-beae-c1c7a97d8504" containerName="registry-server" Oct 01 15:40:00 crc kubenswrapper[4869]: I1001 15:40:00.704057 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="4c293cc8-e2fd-494e-beae-c1c7a97d8504" containerName="registry-server" Oct 01 15:40:00 crc kubenswrapper[4869]: E1001 15:40:00.704080 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="498b6c97-b2b2-4a70-9886-86d2fad1852f" containerName="bootstrap-edpm-deployment-openstack-edpm-ipam" Oct 01 15:40:00 crc kubenswrapper[4869]: I1001 15:40:00.704088 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="498b6c97-b2b2-4a70-9886-86d2fad1852f" containerName="bootstrap-edpm-deployment-openstack-edpm-ipam" Oct 01 15:40:00 crc kubenswrapper[4869]: I1001 15:40:00.704327 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="498b6c97-b2b2-4a70-9886-86d2fad1852f" containerName="bootstrap-edpm-deployment-openstack-edpm-ipam" Oct 01 15:40:00 crc kubenswrapper[4869]: I1001 15:40:00.704369 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="4c293cc8-e2fd-494e-beae-c1c7a97d8504" containerName="registry-server" Oct 01 15:40:00 crc kubenswrapper[4869]: I1001 15:40:00.705106 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-h46vg" Oct 01 15:40:00 crc kubenswrapper[4869]: I1001 15:40:00.707389 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceph-conf-files" Oct 01 15:40:00 crc kubenswrapper[4869]: I1001 15:40:00.707447 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Oct 01 15:40:00 crc kubenswrapper[4869]: I1001 15:40:00.707615 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Oct 01 15:40:00 crc kubenswrapper[4869]: I1001 15:40:00.707979 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Oct 01 15:40:00 crc kubenswrapper[4869]: I1001 15:40:00.709066 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-cjg8g" Oct 01 15:40:00 crc kubenswrapper[4869]: I1001 15:40:00.725729 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/configure-network-edpm-deployment-openstack-edpm-ipam-h46vg"] Oct 01 15:40:00 crc kubenswrapper[4869]: I1001 15:40:00.835241 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/3836336e-8f74-48bb-a28b-ed64d526085b-ceph\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-h46vg\" (UID: \"3836336e-8f74-48bb-a28b-ed64d526085b\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-h46vg" Oct 01 15:40:00 crc kubenswrapper[4869]: I1001 15:40:00.835298 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/3836336e-8f74-48bb-a28b-ed64d526085b-ssh-key\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-h46vg\" (UID: \"3836336e-8f74-48bb-a28b-ed64d526085b\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-h46vg" Oct 01 15:40:00 crc kubenswrapper[4869]: I1001 15:40:00.835370 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/3836336e-8f74-48bb-a28b-ed64d526085b-inventory\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-h46vg\" (UID: \"3836336e-8f74-48bb-a28b-ed64d526085b\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-h46vg" Oct 01 15:40:00 crc kubenswrapper[4869]: I1001 15:40:00.835435 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tgnn2\" (UniqueName: \"kubernetes.io/projected/3836336e-8f74-48bb-a28b-ed64d526085b-kube-api-access-tgnn2\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-h46vg\" (UID: \"3836336e-8f74-48bb-a28b-ed64d526085b\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-h46vg" Oct 01 15:40:00 crc kubenswrapper[4869]: I1001 15:40:00.937269 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/3836336e-8f74-48bb-a28b-ed64d526085b-ceph\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-h46vg\" (UID: \"3836336e-8f74-48bb-a28b-ed64d526085b\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-h46vg" Oct 01 15:40:00 crc kubenswrapper[4869]: I1001 15:40:00.937325 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/3836336e-8f74-48bb-a28b-ed64d526085b-ssh-key\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-h46vg\" (UID: \"3836336e-8f74-48bb-a28b-ed64d526085b\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-h46vg" Oct 01 15:40:00 crc kubenswrapper[4869]: I1001 15:40:00.937396 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/3836336e-8f74-48bb-a28b-ed64d526085b-inventory\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-h46vg\" (UID: \"3836336e-8f74-48bb-a28b-ed64d526085b\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-h46vg" Oct 01 15:40:00 crc kubenswrapper[4869]: I1001 15:40:00.937437 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tgnn2\" (UniqueName: \"kubernetes.io/projected/3836336e-8f74-48bb-a28b-ed64d526085b-kube-api-access-tgnn2\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-h46vg\" (UID: \"3836336e-8f74-48bb-a28b-ed64d526085b\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-h46vg" Oct 01 15:40:00 crc kubenswrapper[4869]: I1001 15:40:00.942176 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/3836336e-8f74-48bb-a28b-ed64d526085b-inventory\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-h46vg\" (UID: \"3836336e-8f74-48bb-a28b-ed64d526085b\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-h46vg" Oct 01 15:40:00 crc kubenswrapper[4869]: I1001 15:40:00.942319 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/3836336e-8f74-48bb-a28b-ed64d526085b-ssh-key\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-h46vg\" (UID: \"3836336e-8f74-48bb-a28b-ed64d526085b\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-h46vg" Oct 01 15:40:00 crc kubenswrapper[4869]: I1001 15:40:00.942609 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/3836336e-8f74-48bb-a28b-ed64d526085b-ceph\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-h46vg\" (UID: \"3836336e-8f74-48bb-a28b-ed64d526085b\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-h46vg" Oct 01 15:40:00 crc kubenswrapper[4869]: I1001 15:40:00.956649 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tgnn2\" (UniqueName: \"kubernetes.io/projected/3836336e-8f74-48bb-a28b-ed64d526085b-kube-api-access-tgnn2\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-h46vg\" (UID: \"3836336e-8f74-48bb-a28b-ed64d526085b\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-h46vg" Oct 01 15:40:01 crc kubenswrapper[4869]: I1001 15:40:01.020684 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-h46vg" Oct 01 15:40:01 crc kubenswrapper[4869]: I1001 15:40:01.542316 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/configure-network-edpm-deployment-openstack-edpm-ipam-h46vg"] Oct 01 15:40:01 crc kubenswrapper[4869]: I1001 15:40:01.608041 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-h46vg" event={"ID":"3836336e-8f74-48bb-a28b-ed64d526085b","Type":"ContainerStarted","Data":"f56110524c97994519b45bb5b80fc11611f91864ebfc1fa72fc299c6cb5ca961"} Oct 01 15:40:03 crc kubenswrapper[4869]: I1001 15:40:03.631346 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-h46vg" event={"ID":"3836336e-8f74-48bb-a28b-ed64d526085b","Type":"ContainerStarted","Data":"9b547613f7287d78d0eca5a455e3c40ae035bfc038588c5478285bbfa08e5efd"} Oct 01 15:40:03 crc kubenswrapper[4869]: I1001 15:40:03.658184 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-h46vg" podStartSLOduration=2.253534637 podStartE2EDuration="3.658165653s" podCreationTimestamp="2025-10-01 15:40:00 +0000 UTC" firstStartedPulling="2025-10-01 15:40:01.551767328 +0000 UTC m=+2110.698610454" lastFinishedPulling="2025-10-01 15:40:02.956398354 +0000 UTC m=+2112.103241470" observedRunningTime="2025-10-01 15:40:03.648207763 +0000 UTC m=+2112.795050889" watchObservedRunningTime="2025-10-01 15:40:03.658165653 +0000 UTC m=+2112.805008779" Oct 01 15:40:13 crc kubenswrapper[4869]: I1001 15:40:13.354567 4869 patch_prober.go:28] interesting pod/machine-config-daemon-c86m8 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 01 15:40:13 crc kubenswrapper[4869]: I1001 15:40:13.355070 4869 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-c86m8" podUID="a4b64f7f-0b03-4f47-965b-9fde048b735c" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 01 15:40:13 crc kubenswrapper[4869]: I1001 15:40:13.355118 4869 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-c86m8" Oct 01 15:40:13 crc kubenswrapper[4869]: I1001 15:40:13.355586 4869 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"850fa9657d55ab61ed48a042dba1eb165240f62a294e8f87d32c9a3206247a54"} pod="openshift-machine-config-operator/machine-config-daemon-c86m8" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Oct 01 15:40:13 crc kubenswrapper[4869]: I1001 15:40:13.355646 4869 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-c86m8" podUID="a4b64f7f-0b03-4f47-965b-9fde048b735c" containerName="machine-config-daemon" containerID="cri-o://850fa9657d55ab61ed48a042dba1eb165240f62a294e8f87d32c9a3206247a54" gracePeriod=600 Oct 01 15:40:13 crc kubenswrapper[4869]: E1001 15:40:13.491124 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-c86m8_openshift-machine-config-operator(a4b64f7f-0b03-4f47-965b-9fde048b735c)\"" pod="openshift-machine-config-operator/machine-config-daemon-c86m8" podUID="a4b64f7f-0b03-4f47-965b-9fde048b735c" Oct 01 15:40:13 crc kubenswrapper[4869]: I1001 15:40:13.734498 4869 generic.go:334] "Generic (PLEG): container finished" podID="a4b64f7f-0b03-4f47-965b-9fde048b735c" containerID="850fa9657d55ab61ed48a042dba1eb165240f62a294e8f87d32c9a3206247a54" exitCode=0 Oct 01 15:40:13 crc kubenswrapper[4869]: I1001 15:40:13.734606 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-c86m8" event={"ID":"a4b64f7f-0b03-4f47-965b-9fde048b735c","Type":"ContainerDied","Data":"850fa9657d55ab61ed48a042dba1eb165240f62a294e8f87d32c9a3206247a54"} Oct 01 15:40:13 crc kubenswrapper[4869]: I1001 15:40:13.734850 4869 scope.go:117] "RemoveContainer" containerID="9a99d03d53eede2552e3857831adcef848702c91bd2e41e6690813d14315bcda" Oct 01 15:40:13 crc kubenswrapper[4869]: I1001 15:40:13.735667 4869 scope.go:117] "RemoveContainer" containerID="850fa9657d55ab61ed48a042dba1eb165240f62a294e8f87d32c9a3206247a54" Oct 01 15:40:13 crc kubenswrapper[4869]: E1001 15:40:13.736031 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-c86m8_openshift-machine-config-operator(a4b64f7f-0b03-4f47-965b-9fde048b735c)\"" pod="openshift-machine-config-operator/machine-config-daemon-c86m8" podUID="a4b64f7f-0b03-4f47-965b-9fde048b735c" Oct 01 15:40:26 crc kubenswrapper[4869]: I1001 15:40:26.581533 4869 scope.go:117] "RemoveContainer" containerID="850fa9657d55ab61ed48a042dba1eb165240f62a294e8f87d32c9a3206247a54" Oct 01 15:40:26 crc kubenswrapper[4869]: E1001 15:40:26.582539 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-c86m8_openshift-machine-config-operator(a4b64f7f-0b03-4f47-965b-9fde048b735c)\"" pod="openshift-machine-config-operator/machine-config-daemon-c86m8" podUID="a4b64f7f-0b03-4f47-965b-9fde048b735c" Oct 01 15:40:28 crc kubenswrapper[4869]: I1001 15:40:28.911613 4869 generic.go:334] "Generic (PLEG): container finished" podID="3836336e-8f74-48bb-a28b-ed64d526085b" containerID="9b547613f7287d78d0eca5a455e3c40ae035bfc038588c5478285bbfa08e5efd" exitCode=0 Oct 01 15:40:28 crc kubenswrapper[4869]: I1001 15:40:28.911749 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-h46vg" event={"ID":"3836336e-8f74-48bb-a28b-ed64d526085b","Type":"ContainerDied","Data":"9b547613f7287d78d0eca5a455e3c40ae035bfc038588c5478285bbfa08e5efd"} Oct 01 15:40:30 crc kubenswrapper[4869]: I1001 15:40:30.365119 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-h46vg" Oct 01 15:40:30 crc kubenswrapper[4869]: I1001 15:40:30.448642 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/3836336e-8f74-48bb-a28b-ed64d526085b-ceph\") pod \"3836336e-8f74-48bb-a28b-ed64d526085b\" (UID: \"3836336e-8f74-48bb-a28b-ed64d526085b\") " Oct 01 15:40:30 crc kubenswrapper[4869]: I1001 15:40:30.448714 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tgnn2\" (UniqueName: \"kubernetes.io/projected/3836336e-8f74-48bb-a28b-ed64d526085b-kube-api-access-tgnn2\") pod \"3836336e-8f74-48bb-a28b-ed64d526085b\" (UID: \"3836336e-8f74-48bb-a28b-ed64d526085b\") " Oct 01 15:40:30 crc kubenswrapper[4869]: I1001 15:40:30.448776 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/3836336e-8f74-48bb-a28b-ed64d526085b-inventory\") pod \"3836336e-8f74-48bb-a28b-ed64d526085b\" (UID: \"3836336e-8f74-48bb-a28b-ed64d526085b\") " Oct 01 15:40:30 crc kubenswrapper[4869]: I1001 15:40:30.448880 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/3836336e-8f74-48bb-a28b-ed64d526085b-ssh-key\") pod \"3836336e-8f74-48bb-a28b-ed64d526085b\" (UID: \"3836336e-8f74-48bb-a28b-ed64d526085b\") " Oct 01 15:40:30 crc kubenswrapper[4869]: I1001 15:40:30.454982 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3836336e-8f74-48bb-a28b-ed64d526085b-ceph" (OuterVolumeSpecName: "ceph") pod "3836336e-8f74-48bb-a28b-ed64d526085b" (UID: "3836336e-8f74-48bb-a28b-ed64d526085b"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 15:40:30 crc kubenswrapper[4869]: I1001 15:40:30.455030 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3836336e-8f74-48bb-a28b-ed64d526085b-kube-api-access-tgnn2" (OuterVolumeSpecName: "kube-api-access-tgnn2") pod "3836336e-8f74-48bb-a28b-ed64d526085b" (UID: "3836336e-8f74-48bb-a28b-ed64d526085b"). InnerVolumeSpecName "kube-api-access-tgnn2". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 15:40:30 crc kubenswrapper[4869]: I1001 15:40:30.476668 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3836336e-8f74-48bb-a28b-ed64d526085b-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "3836336e-8f74-48bb-a28b-ed64d526085b" (UID: "3836336e-8f74-48bb-a28b-ed64d526085b"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 15:40:30 crc kubenswrapper[4869]: I1001 15:40:30.477499 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3836336e-8f74-48bb-a28b-ed64d526085b-inventory" (OuterVolumeSpecName: "inventory") pod "3836336e-8f74-48bb-a28b-ed64d526085b" (UID: "3836336e-8f74-48bb-a28b-ed64d526085b"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 15:40:30 crc kubenswrapper[4869]: I1001 15:40:30.550600 4869 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/3836336e-8f74-48bb-a28b-ed64d526085b-ceph\") on node \"crc\" DevicePath \"\"" Oct 01 15:40:30 crc kubenswrapper[4869]: I1001 15:40:30.550636 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tgnn2\" (UniqueName: \"kubernetes.io/projected/3836336e-8f74-48bb-a28b-ed64d526085b-kube-api-access-tgnn2\") on node \"crc\" DevicePath \"\"" Oct 01 15:40:30 crc kubenswrapper[4869]: I1001 15:40:30.550648 4869 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/3836336e-8f74-48bb-a28b-ed64d526085b-inventory\") on node \"crc\" DevicePath \"\"" Oct 01 15:40:30 crc kubenswrapper[4869]: I1001 15:40:30.550657 4869 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/3836336e-8f74-48bb-a28b-ed64d526085b-ssh-key\") on node \"crc\" DevicePath \"\"" Oct 01 15:40:30 crc kubenswrapper[4869]: I1001 15:40:30.932751 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-h46vg" event={"ID":"3836336e-8f74-48bb-a28b-ed64d526085b","Type":"ContainerDied","Data":"f56110524c97994519b45bb5b80fc11611f91864ebfc1fa72fc299c6cb5ca961"} Oct 01 15:40:30 crc kubenswrapper[4869]: I1001 15:40:30.933213 4869 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="f56110524c97994519b45bb5b80fc11611f91864ebfc1fa72fc299c6cb5ca961" Oct 01 15:40:30 crc kubenswrapper[4869]: I1001 15:40:30.932798 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-h46vg" Oct 01 15:40:31 crc kubenswrapper[4869]: I1001 15:40:31.036708 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/validate-network-edpm-deployment-openstack-edpm-ipam-gthv5"] Oct 01 15:40:31 crc kubenswrapper[4869]: E1001 15:40:31.037117 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3836336e-8f74-48bb-a28b-ed64d526085b" containerName="configure-network-edpm-deployment-openstack-edpm-ipam" Oct 01 15:40:31 crc kubenswrapper[4869]: I1001 15:40:31.037139 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="3836336e-8f74-48bb-a28b-ed64d526085b" containerName="configure-network-edpm-deployment-openstack-edpm-ipam" Oct 01 15:40:31 crc kubenswrapper[4869]: I1001 15:40:31.041629 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="3836336e-8f74-48bb-a28b-ed64d526085b" containerName="configure-network-edpm-deployment-openstack-edpm-ipam" Oct 01 15:40:31 crc kubenswrapper[4869]: I1001 15:40:31.042451 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-gthv5" Oct 01 15:40:31 crc kubenswrapper[4869]: I1001 15:40:31.044564 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Oct 01 15:40:31 crc kubenswrapper[4869]: I1001 15:40:31.044685 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Oct 01 15:40:31 crc kubenswrapper[4869]: I1001 15:40:31.044735 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceph-conf-files" Oct 01 15:40:31 crc kubenswrapper[4869]: I1001 15:40:31.044907 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Oct 01 15:40:31 crc kubenswrapper[4869]: I1001 15:40:31.045363 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-cjg8g" Oct 01 15:40:31 crc kubenswrapper[4869]: I1001 15:40:31.060612 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/validate-network-edpm-deployment-openstack-edpm-ipam-gthv5"] Oct 01 15:40:31 crc kubenswrapper[4869]: I1001 15:40:31.162413 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/76589bac-382f-430e-83f3-ff32e9634017-ssh-key\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-gthv5\" (UID: \"76589bac-382f-430e-83f3-ff32e9634017\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-gthv5" Oct 01 15:40:31 crc kubenswrapper[4869]: I1001 15:40:31.162485 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/76589bac-382f-430e-83f3-ff32e9634017-inventory\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-gthv5\" (UID: \"76589bac-382f-430e-83f3-ff32e9634017\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-gthv5" Oct 01 15:40:31 crc kubenswrapper[4869]: I1001 15:40:31.162562 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-v8sr9\" (UniqueName: \"kubernetes.io/projected/76589bac-382f-430e-83f3-ff32e9634017-kube-api-access-v8sr9\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-gthv5\" (UID: \"76589bac-382f-430e-83f3-ff32e9634017\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-gthv5" Oct 01 15:40:31 crc kubenswrapper[4869]: I1001 15:40:31.162632 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/76589bac-382f-430e-83f3-ff32e9634017-ceph\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-gthv5\" (UID: \"76589bac-382f-430e-83f3-ff32e9634017\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-gthv5" Oct 01 15:40:31 crc kubenswrapper[4869]: I1001 15:40:31.264432 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/76589bac-382f-430e-83f3-ff32e9634017-inventory\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-gthv5\" (UID: \"76589bac-382f-430e-83f3-ff32e9634017\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-gthv5" Oct 01 15:40:31 crc kubenswrapper[4869]: I1001 15:40:31.264568 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-v8sr9\" (UniqueName: \"kubernetes.io/projected/76589bac-382f-430e-83f3-ff32e9634017-kube-api-access-v8sr9\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-gthv5\" (UID: \"76589bac-382f-430e-83f3-ff32e9634017\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-gthv5" Oct 01 15:40:31 crc kubenswrapper[4869]: I1001 15:40:31.265384 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/76589bac-382f-430e-83f3-ff32e9634017-ceph\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-gthv5\" (UID: \"76589bac-382f-430e-83f3-ff32e9634017\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-gthv5" Oct 01 15:40:31 crc kubenswrapper[4869]: I1001 15:40:31.265511 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/76589bac-382f-430e-83f3-ff32e9634017-ssh-key\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-gthv5\" (UID: \"76589bac-382f-430e-83f3-ff32e9634017\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-gthv5" Oct 01 15:40:31 crc kubenswrapper[4869]: I1001 15:40:31.269635 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/76589bac-382f-430e-83f3-ff32e9634017-ssh-key\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-gthv5\" (UID: \"76589bac-382f-430e-83f3-ff32e9634017\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-gthv5" Oct 01 15:40:31 crc kubenswrapper[4869]: I1001 15:40:31.269818 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/76589bac-382f-430e-83f3-ff32e9634017-inventory\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-gthv5\" (UID: \"76589bac-382f-430e-83f3-ff32e9634017\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-gthv5" Oct 01 15:40:31 crc kubenswrapper[4869]: I1001 15:40:31.269839 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/76589bac-382f-430e-83f3-ff32e9634017-ceph\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-gthv5\" (UID: \"76589bac-382f-430e-83f3-ff32e9634017\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-gthv5" Oct 01 15:40:31 crc kubenswrapper[4869]: I1001 15:40:31.282921 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-v8sr9\" (UniqueName: \"kubernetes.io/projected/76589bac-382f-430e-83f3-ff32e9634017-kube-api-access-v8sr9\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-gthv5\" (UID: \"76589bac-382f-430e-83f3-ff32e9634017\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-gthv5" Oct 01 15:40:31 crc kubenswrapper[4869]: I1001 15:40:31.361921 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-gthv5" Oct 01 15:40:31 crc kubenswrapper[4869]: I1001 15:40:31.703114 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/validate-network-edpm-deployment-openstack-edpm-ipam-gthv5"] Oct 01 15:40:31 crc kubenswrapper[4869]: I1001 15:40:31.939324 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-gthv5" event={"ID":"76589bac-382f-430e-83f3-ff32e9634017","Type":"ContainerStarted","Data":"b0c9c92c66f3900bba00f7b836de83ca81ef74458f9abb9d1c4d84ab81a3f215"} Oct 01 15:40:33 crc kubenswrapper[4869]: I1001 15:40:33.963719 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-gthv5" event={"ID":"76589bac-382f-430e-83f3-ff32e9634017","Type":"ContainerStarted","Data":"e9a9d5249fed292edc22e3aa1fbfec074c3ad6307572d0949d463a4ed5c15827"} Oct 01 15:40:33 crc kubenswrapper[4869]: I1001 15:40:33.990953 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-gthv5" podStartSLOduration=2.157740326 podStartE2EDuration="2.990930773s" podCreationTimestamp="2025-10-01 15:40:31 +0000 UTC" firstStartedPulling="2025-10-01 15:40:31.705312418 +0000 UTC m=+2140.852155534" lastFinishedPulling="2025-10-01 15:40:32.538502825 +0000 UTC m=+2141.685345981" observedRunningTime="2025-10-01 15:40:33.982776538 +0000 UTC m=+2143.129619664" watchObservedRunningTime="2025-10-01 15:40:33.990930773 +0000 UTC m=+2143.137773899" Oct 01 15:40:38 crc kubenswrapper[4869]: I1001 15:40:38.004873 4869 generic.go:334] "Generic (PLEG): container finished" podID="76589bac-382f-430e-83f3-ff32e9634017" containerID="e9a9d5249fed292edc22e3aa1fbfec074c3ad6307572d0949d463a4ed5c15827" exitCode=0 Oct 01 15:40:38 crc kubenswrapper[4869]: I1001 15:40:38.004980 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-gthv5" event={"ID":"76589bac-382f-430e-83f3-ff32e9634017","Type":"ContainerDied","Data":"e9a9d5249fed292edc22e3aa1fbfec074c3ad6307572d0949d463a4ed5c15827"} Oct 01 15:40:39 crc kubenswrapper[4869]: I1001 15:40:39.426593 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-gthv5" Oct 01 15:40:39 crc kubenswrapper[4869]: I1001 15:40:39.525827 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/76589bac-382f-430e-83f3-ff32e9634017-ceph\") pod \"76589bac-382f-430e-83f3-ff32e9634017\" (UID: \"76589bac-382f-430e-83f3-ff32e9634017\") " Oct 01 15:40:39 crc kubenswrapper[4869]: I1001 15:40:39.525929 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/76589bac-382f-430e-83f3-ff32e9634017-inventory\") pod \"76589bac-382f-430e-83f3-ff32e9634017\" (UID: \"76589bac-382f-430e-83f3-ff32e9634017\") " Oct 01 15:40:39 crc kubenswrapper[4869]: I1001 15:40:39.526825 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/76589bac-382f-430e-83f3-ff32e9634017-ssh-key\") pod \"76589bac-382f-430e-83f3-ff32e9634017\" (UID: \"76589bac-382f-430e-83f3-ff32e9634017\") " Oct 01 15:40:39 crc kubenswrapper[4869]: I1001 15:40:39.527055 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-v8sr9\" (UniqueName: \"kubernetes.io/projected/76589bac-382f-430e-83f3-ff32e9634017-kube-api-access-v8sr9\") pod \"76589bac-382f-430e-83f3-ff32e9634017\" (UID: \"76589bac-382f-430e-83f3-ff32e9634017\") " Oct 01 15:40:39 crc kubenswrapper[4869]: I1001 15:40:39.533333 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/76589bac-382f-430e-83f3-ff32e9634017-kube-api-access-v8sr9" (OuterVolumeSpecName: "kube-api-access-v8sr9") pod "76589bac-382f-430e-83f3-ff32e9634017" (UID: "76589bac-382f-430e-83f3-ff32e9634017"). InnerVolumeSpecName "kube-api-access-v8sr9". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 15:40:39 crc kubenswrapper[4869]: I1001 15:40:39.533447 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/76589bac-382f-430e-83f3-ff32e9634017-ceph" (OuterVolumeSpecName: "ceph") pod "76589bac-382f-430e-83f3-ff32e9634017" (UID: "76589bac-382f-430e-83f3-ff32e9634017"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 15:40:39 crc kubenswrapper[4869]: I1001 15:40:39.555107 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/76589bac-382f-430e-83f3-ff32e9634017-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "76589bac-382f-430e-83f3-ff32e9634017" (UID: "76589bac-382f-430e-83f3-ff32e9634017"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 15:40:39 crc kubenswrapper[4869]: I1001 15:40:39.558237 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/76589bac-382f-430e-83f3-ff32e9634017-inventory" (OuterVolumeSpecName: "inventory") pod "76589bac-382f-430e-83f3-ff32e9634017" (UID: "76589bac-382f-430e-83f3-ff32e9634017"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 15:40:39 crc kubenswrapper[4869]: I1001 15:40:39.581822 4869 scope.go:117] "RemoveContainer" containerID="850fa9657d55ab61ed48a042dba1eb165240f62a294e8f87d32c9a3206247a54" Oct 01 15:40:39 crc kubenswrapper[4869]: E1001 15:40:39.582307 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-c86m8_openshift-machine-config-operator(a4b64f7f-0b03-4f47-965b-9fde048b735c)\"" pod="openshift-machine-config-operator/machine-config-daemon-c86m8" podUID="a4b64f7f-0b03-4f47-965b-9fde048b735c" Oct 01 15:40:39 crc kubenswrapper[4869]: I1001 15:40:39.631045 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-v8sr9\" (UniqueName: \"kubernetes.io/projected/76589bac-382f-430e-83f3-ff32e9634017-kube-api-access-v8sr9\") on node \"crc\" DevicePath \"\"" Oct 01 15:40:39 crc kubenswrapper[4869]: I1001 15:40:39.631096 4869 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/76589bac-382f-430e-83f3-ff32e9634017-ceph\") on node \"crc\" DevicePath \"\"" Oct 01 15:40:39 crc kubenswrapper[4869]: I1001 15:40:39.631117 4869 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/76589bac-382f-430e-83f3-ff32e9634017-inventory\") on node \"crc\" DevicePath \"\"" Oct 01 15:40:39 crc kubenswrapper[4869]: I1001 15:40:39.631138 4869 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/76589bac-382f-430e-83f3-ff32e9634017-ssh-key\") on node \"crc\" DevicePath \"\"" Oct 01 15:40:40 crc kubenswrapper[4869]: I1001 15:40:40.026165 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-gthv5" Oct 01 15:40:40 crc kubenswrapper[4869]: I1001 15:40:40.026410 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-gthv5" event={"ID":"76589bac-382f-430e-83f3-ff32e9634017","Type":"ContainerDied","Data":"b0c9c92c66f3900bba00f7b836de83ca81ef74458f9abb9d1c4d84ab81a3f215"} Oct 01 15:40:40 crc kubenswrapper[4869]: I1001 15:40:40.026549 4869 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="b0c9c92c66f3900bba00f7b836de83ca81ef74458f9abb9d1c4d84ab81a3f215" Oct 01 15:40:40 crc kubenswrapper[4869]: I1001 15:40:40.124973 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/install-os-edpm-deployment-openstack-edpm-ipam-64bv5"] Oct 01 15:40:40 crc kubenswrapper[4869]: E1001 15:40:40.125455 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="76589bac-382f-430e-83f3-ff32e9634017" containerName="validate-network-edpm-deployment-openstack-edpm-ipam" Oct 01 15:40:40 crc kubenswrapper[4869]: I1001 15:40:40.125474 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="76589bac-382f-430e-83f3-ff32e9634017" containerName="validate-network-edpm-deployment-openstack-edpm-ipam" Oct 01 15:40:40 crc kubenswrapper[4869]: I1001 15:40:40.125660 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="76589bac-382f-430e-83f3-ff32e9634017" containerName="validate-network-edpm-deployment-openstack-edpm-ipam" Oct 01 15:40:40 crc kubenswrapper[4869]: I1001 15:40:40.126534 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-64bv5" Oct 01 15:40:40 crc kubenswrapper[4869]: I1001 15:40:40.133650 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Oct 01 15:40:40 crc kubenswrapper[4869]: I1001 15:40:40.133715 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceph-conf-files" Oct 01 15:40:40 crc kubenswrapper[4869]: I1001 15:40:40.133772 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-cjg8g" Oct 01 15:40:40 crc kubenswrapper[4869]: I1001 15:40:40.133879 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Oct 01 15:40:40 crc kubenswrapper[4869]: I1001 15:40:40.134031 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Oct 01 15:40:40 crc kubenswrapper[4869]: I1001 15:40:40.136199 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/install-os-edpm-deployment-openstack-edpm-ipam-64bv5"] Oct 01 15:40:40 crc kubenswrapper[4869]: I1001 15:40:40.241945 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xl7vf\" (UniqueName: \"kubernetes.io/projected/9c034182-bd4b-4756-b834-c66984c690bf-kube-api-access-xl7vf\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-64bv5\" (UID: \"9c034182-bd4b-4756-b834-c66984c690bf\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-64bv5" Oct 01 15:40:40 crc kubenswrapper[4869]: I1001 15:40:40.242001 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/9c034182-bd4b-4756-b834-c66984c690bf-inventory\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-64bv5\" (UID: \"9c034182-bd4b-4756-b834-c66984c690bf\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-64bv5" Oct 01 15:40:40 crc kubenswrapper[4869]: I1001 15:40:40.242097 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/9c034182-bd4b-4756-b834-c66984c690bf-ssh-key\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-64bv5\" (UID: \"9c034182-bd4b-4756-b834-c66984c690bf\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-64bv5" Oct 01 15:40:40 crc kubenswrapper[4869]: I1001 15:40:40.242164 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/9c034182-bd4b-4756-b834-c66984c690bf-ceph\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-64bv5\" (UID: \"9c034182-bd4b-4756-b834-c66984c690bf\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-64bv5" Oct 01 15:40:40 crc kubenswrapper[4869]: I1001 15:40:40.344447 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/9c034182-bd4b-4756-b834-c66984c690bf-ssh-key\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-64bv5\" (UID: \"9c034182-bd4b-4756-b834-c66984c690bf\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-64bv5" Oct 01 15:40:40 crc kubenswrapper[4869]: I1001 15:40:40.344597 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/9c034182-bd4b-4756-b834-c66984c690bf-ceph\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-64bv5\" (UID: \"9c034182-bd4b-4756-b834-c66984c690bf\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-64bv5" Oct 01 15:40:40 crc kubenswrapper[4869]: I1001 15:40:40.344740 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xl7vf\" (UniqueName: \"kubernetes.io/projected/9c034182-bd4b-4756-b834-c66984c690bf-kube-api-access-xl7vf\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-64bv5\" (UID: \"9c034182-bd4b-4756-b834-c66984c690bf\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-64bv5" Oct 01 15:40:40 crc kubenswrapper[4869]: I1001 15:40:40.344785 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/9c034182-bd4b-4756-b834-c66984c690bf-inventory\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-64bv5\" (UID: \"9c034182-bd4b-4756-b834-c66984c690bf\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-64bv5" Oct 01 15:40:40 crc kubenswrapper[4869]: I1001 15:40:40.355664 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/9c034182-bd4b-4756-b834-c66984c690bf-ssh-key\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-64bv5\" (UID: \"9c034182-bd4b-4756-b834-c66984c690bf\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-64bv5" Oct 01 15:40:40 crc kubenswrapper[4869]: I1001 15:40:40.355926 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/9c034182-bd4b-4756-b834-c66984c690bf-inventory\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-64bv5\" (UID: \"9c034182-bd4b-4756-b834-c66984c690bf\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-64bv5" Oct 01 15:40:40 crc kubenswrapper[4869]: I1001 15:40:40.356458 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/9c034182-bd4b-4756-b834-c66984c690bf-ceph\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-64bv5\" (UID: \"9c034182-bd4b-4756-b834-c66984c690bf\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-64bv5" Oct 01 15:40:40 crc kubenswrapper[4869]: I1001 15:40:40.376482 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xl7vf\" (UniqueName: \"kubernetes.io/projected/9c034182-bd4b-4756-b834-c66984c690bf-kube-api-access-xl7vf\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-64bv5\" (UID: \"9c034182-bd4b-4756-b834-c66984c690bf\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-64bv5" Oct 01 15:40:40 crc kubenswrapper[4869]: I1001 15:40:40.441993 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-64bv5" Oct 01 15:40:40 crc kubenswrapper[4869]: I1001 15:40:40.957116 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/install-os-edpm-deployment-openstack-edpm-ipam-64bv5"] Oct 01 15:40:41 crc kubenswrapper[4869]: I1001 15:40:41.034590 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-64bv5" event={"ID":"9c034182-bd4b-4756-b834-c66984c690bf","Type":"ContainerStarted","Data":"6db9a688b87d5790822b686f5e06f4e26625b639155ff6a522b5a0453d53604f"} Oct 01 15:40:43 crc kubenswrapper[4869]: I1001 15:40:43.051608 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-64bv5" event={"ID":"9c034182-bd4b-4756-b834-c66984c690bf","Type":"ContainerStarted","Data":"299c5cb65f71196c82fa5b81253db31f1af5ef8c9aff997fcf7d6e4fb2ad157f"} Oct 01 15:40:43 crc kubenswrapper[4869]: I1001 15:40:43.071231 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-64bv5" podStartSLOduration=1.9378958590000002 podStartE2EDuration="3.071211898s" podCreationTimestamp="2025-10-01 15:40:40 +0000 UTC" firstStartedPulling="2025-10-01 15:40:40.962288059 +0000 UTC m=+2150.109131195" lastFinishedPulling="2025-10-01 15:40:42.095604108 +0000 UTC m=+2151.242447234" observedRunningTime="2025-10-01 15:40:43.069610158 +0000 UTC m=+2152.216453314" watchObservedRunningTime="2025-10-01 15:40:43.071211898 +0000 UTC m=+2152.218055024" Oct 01 15:40:54 crc kubenswrapper[4869]: I1001 15:40:54.582864 4869 scope.go:117] "RemoveContainer" containerID="850fa9657d55ab61ed48a042dba1eb165240f62a294e8f87d32c9a3206247a54" Oct 01 15:40:54 crc kubenswrapper[4869]: E1001 15:40:54.584077 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-c86m8_openshift-machine-config-operator(a4b64f7f-0b03-4f47-965b-9fde048b735c)\"" pod="openshift-machine-config-operator/machine-config-daemon-c86m8" podUID="a4b64f7f-0b03-4f47-965b-9fde048b735c" Oct 01 15:41:05 crc kubenswrapper[4869]: I1001 15:41:05.581386 4869 scope.go:117] "RemoveContainer" containerID="850fa9657d55ab61ed48a042dba1eb165240f62a294e8f87d32c9a3206247a54" Oct 01 15:41:05 crc kubenswrapper[4869]: E1001 15:41:05.582383 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-c86m8_openshift-machine-config-operator(a4b64f7f-0b03-4f47-965b-9fde048b735c)\"" pod="openshift-machine-config-operator/machine-config-daemon-c86m8" podUID="a4b64f7f-0b03-4f47-965b-9fde048b735c" Oct 01 15:41:18 crc kubenswrapper[4869]: I1001 15:41:18.380001 4869 generic.go:334] "Generic (PLEG): container finished" podID="9c034182-bd4b-4756-b834-c66984c690bf" containerID="299c5cb65f71196c82fa5b81253db31f1af5ef8c9aff997fcf7d6e4fb2ad157f" exitCode=0 Oct 01 15:41:18 crc kubenswrapper[4869]: I1001 15:41:18.380098 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-64bv5" event={"ID":"9c034182-bd4b-4756-b834-c66984c690bf","Type":"ContainerDied","Data":"299c5cb65f71196c82fa5b81253db31f1af5ef8c9aff997fcf7d6e4fb2ad157f"} Oct 01 15:41:19 crc kubenswrapper[4869]: I1001 15:41:19.768200 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-64bv5" Oct 01 15:41:19 crc kubenswrapper[4869]: I1001 15:41:19.921848 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/9c034182-bd4b-4756-b834-c66984c690bf-ssh-key\") pod \"9c034182-bd4b-4756-b834-c66984c690bf\" (UID: \"9c034182-bd4b-4756-b834-c66984c690bf\") " Oct 01 15:41:19 crc kubenswrapper[4869]: I1001 15:41:19.921967 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/9c034182-bd4b-4756-b834-c66984c690bf-ceph\") pod \"9c034182-bd4b-4756-b834-c66984c690bf\" (UID: \"9c034182-bd4b-4756-b834-c66984c690bf\") " Oct 01 15:41:19 crc kubenswrapper[4869]: I1001 15:41:19.922013 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/9c034182-bd4b-4756-b834-c66984c690bf-inventory\") pod \"9c034182-bd4b-4756-b834-c66984c690bf\" (UID: \"9c034182-bd4b-4756-b834-c66984c690bf\") " Oct 01 15:41:19 crc kubenswrapper[4869]: I1001 15:41:19.922045 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xl7vf\" (UniqueName: \"kubernetes.io/projected/9c034182-bd4b-4756-b834-c66984c690bf-kube-api-access-xl7vf\") pod \"9c034182-bd4b-4756-b834-c66984c690bf\" (UID: \"9c034182-bd4b-4756-b834-c66984c690bf\") " Oct 01 15:41:19 crc kubenswrapper[4869]: I1001 15:41:19.927556 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9c034182-bd4b-4756-b834-c66984c690bf-ceph" (OuterVolumeSpecName: "ceph") pod "9c034182-bd4b-4756-b834-c66984c690bf" (UID: "9c034182-bd4b-4756-b834-c66984c690bf"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 15:41:19 crc kubenswrapper[4869]: I1001 15:41:19.927803 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9c034182-bd4b-4756-b834-c66984c690bf-kube-api-access-xl7vf" (OuterVolumeSpecName: "kube-api-access-xl7vf") pod "9c034182-bd4b-4756-b834-c66984c690bf" (UID: "9c034182-bd4b-4756-b834-c66984c690bf"). InnerVolumeSpecName "kube-api-access-xl7vf". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 15:41:19 crc kubenswrapper[4869]: I1001 15:41:19.947979 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9c034182-bd4b-4756-b834-c66984c690bf-inventory" (OuterVolumeSpecName: "inventory") pod "9c034182-bd4b-4756-b834-c66984c690bf" (UID: "9c034182-bd4b-4756-b834-c66984c690bf"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 15:41:19 crc kubenswrapper[4869]: I1001 15:41:19.953873 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9c034182-bd4b-4756-b834-c66984c690bf-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "9c034182-bd4b-4756-b834-c66984c690bf" (UID: "9c034182-bd4b-4756-b834-c66984c690bf"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 15:41:20 crc kubenswrapper[4869]: I1001 15:41:20.023989 4869 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/9c034182-bd4b-4756-b834-c66984c690bf-ssh-key\") on node \"crc\" DevicePath \"\"" Oct 01 15:41:20 crc kubenswrapper[4869]: I1001 15:41:20.024226 4869 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/9c034182-bd4b-4756-b834-c66984c690bf-ceph\") on node \"crc\" DevicePath \"\"" Oct 01 15:41:20 crc kubenswrapper[4869]: I1001 15:41:20.024357 4869 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/9c034182-bd4b-4756-b834-c66984c690bf-inventory\") on node \"crc\" DevicePath \"\"" Oct 01 15:41:20 crc kubenswrapper[4869]: I1001 15:41:20.024441 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xl7vf\" (UniqueName: \"kubernetes.io/projected/9c034182-bd4b-4756-b834-c66984c690bf-kube-api-access-xl7vf\") on node \"crc\" DevicePath \"\"" Oct 01 15:41:20 crc kubenswrapper[4869]: I1001 15:41:20.399663 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-64bv5" event={"ID":"9c034182-bd4b-4756-b834-c66984c690bf","Type":"ContainerDied","Data":"6db9a688b87d5790822b686f5e06f4e26625b639155ff6a522b5a0453d53604f"} Oct 01 15:41:20 crc kubenswrapper[4869]: I1001 15:41:20.399896 4869 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="6db9a688b87d5790822b686f5e06f4e26625b639155ff6a522b5a0453d53604f" Oct 01 15:41:20 crc kubenswrapper[4869]: I1001 15:41:20.399917 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-64bv5" Oct 01 15:41:20 crc kubenswrapper[4869]: I1001 15:41:20.484599 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-jwnpq"] Oct 01 15:41:20 crc kubenswrapper[4869]: E1001 15:41:20.484940 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9c034182-bd4b-4756-b834-c66984c690bf" containerName="install-os-edpm-deployment-openstack-edpm-ipam" Oct 01 15:41:20 crc kubenswrapper[4869]: I1001 15:41:20.484962 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="9c034182-bd4b-4756-b834-c66984c690bf" containerName="install-os-edpm-deployment-openstack-edpm-ipam" Oct 01 15:41:20 crc kubenswrapper[4869]: I1001 15:41:20.485141 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="9c034182-bd4b-4756-b834-c66984c690bf" containerName="install-os-edpm-deployment-openstack-edpm-ipam" Oct 01 15:41:20 crc kubenswrapper[4869]: I1001 15:41:20.485740 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-jwnpq" Oct 01 15:41:20 crc kubenswrapper[4869]: I1001 15:41:20.487830 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Oct 01 15:41:20 crc kubenswrapper[4869]: I1001 15:41:20.488221 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-cjg8g" Oct 01 15:41:20 crc kubenswrapper[4869]: I1001 15:41:20.488382 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Oct 01 15:41:20 crc kubenswrapper[4869]: I1001 15:41:20.488544 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceph-conf-files" Oct 01 15:41:20 crc kubenswrapper[4869]: I1001 15:41:20.488597 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Oct 01 15:41:20 crc kubenswrapper[4869]: I1001 15:41:20.535771 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-jwnpq"] Oct 01 15:41:20 crc kubenswrapper[4869]: I1001 15:41:20.580753 4869 scope.go:117] "RemoveContainer" containerID="850fa9657d55ab61ed48a042dba1eb165240f62a294e8f87d32c9a3206247a54" Oct 01 15:41:20 crc kubenswrapper[4869]: E1001 15:41:20.581357 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-c86m8_openshift-machine-config-operator(a4b64f7f-0b03-4f47-965b-9fde048b735c)\"" pod="openshift-machine-config-operator/machine-config-daemon-c86m8" podUID="a4b64f7f-0b03-4f47-965b-9fde048b735c" Oct 01 15:41:20 crc kubenswrapper[4869]: I1001 15:41:20.634541 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dtp7t\" (UniqueName: \"kubernetes.io/projected/b062c1f1-4b36-476f-8c2e-ca3b1a7e709c-kube-api-access-dtp7t\") pod \"ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-jwnpq\" (UID: \"b062c1f1-4b36-476f-8c2e-ca3b1a7e709c\") " pod="openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-jwnpq" Oct 01 15:41:20 crc kubenswrapper[4869]: I1001 15:41:20.634658 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/b062c1f1-4b36-476f-8c2e-ca3b1a7e709c-ssh-key\") pod \"ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-jwnpq\" (UID: \"b062c1f1-4b36-476f-8c2e-ca3b1a7e709c\") " pod="openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-jwnpq" Oct 01 15:41:20 crc kubenswrapper[4869]: I1001 15:41:20.634804 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/b062c1f1-4b36-476f-8c2e-ca3b1a7e709c-ceph\") pod \"ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-jwnpq\" (UID: \"b062c1f1-4b36-476f-8c2e-ca3b1a7e709c\") " pod="openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-jwnpq" Oct 01 15:41:20 crc kubenswrapper[4869]: I1001 15:41:20.634848 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/b062c1f1-4b36-476f-8c2e-ca3b1a7e709c-inventory\") pod \"ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-jwnpq\" (UID: \"b062c1f1-4b36-476f-8c2e-ca3b1a7e709c\") " pod="openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-jwnpq" Oct 01 15:41:20 crc kubenswrapper[4869]: I1001 15:41:20.736838 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/b062c1f1-4b36-476f-8c2e-ca3b1a7e709c-ssh-key\") pod \"ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-jwnpq\" (UID: \"b062c1f1-4b36-476f-8c2e-ca3b1a7e709c\") " pod="openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-jwnpq" Oct 01 15:41:20 crc kubenswrapper[4869]: I1001 15:41:20.737292 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/b062c1f1-4b36-476f-8c2e-ca3b1a7e709c-ceph\") pod \"ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-jwnpq\" (UID: \"b062c1f1-4b36-476f-8c2e-ca3b1a7e709c\") " pod="openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-jwnpq" Oct 01 15:41:20 crc kubenswrapper[4869]: I1001 15:41:20.737336 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/b062c1f1-4b36-476f-8c2e-ca3b1a7e709c-inventory\") pod \"ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-jwnpq\" (UID: \"b062c1f1-4b36-476f-8c2e-ca3b1a7e709c\") " pod="openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-jwnpq" Oct 01 15:41:20 crc kubenswrapper[4869]: I1001 15:41:20.737393 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dtp7t\" (UniqueName: \"kubernetes.io/projected/b062c1f1-4b36-476f-8c2e-ca3b1a7e709c-kube-api-access-dtp7t\") pod \"ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-jwnpq\" (UID: \"b062c1f1-4b36-476f-8c2e-ca3b1a7e709c\") " pod="openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-jwnpq" Oct 01 15:41:20 crc kubenswrapper[4869]: I1001 15:41:20.742679 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/b062c1f1-4b36-476f-8c2e-ca3b1a7e709c-ssh-key\") pod \"ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-jwnpq\" (UID: \"b062c1f1-4b36-476f-8c2e-ca3b1a7e709c\") " pod="openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-jwnpq" Oct 01 15:41:20 crc kubenswrapper[4869]: I1001 15:41:20.744230 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/b062c1f1-4b36-476f-8c2e-ca3b1a7e709c-ceph\") pod \"ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-jwnpq\" (UID: \"b062c1f1-4b36-476f-8c2e-ca3b1a7e709c\") " pod="openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-jwnpq" Oct 01 15:41:20 crc kubenswrapper[4869]: I1001 15:41:20.745888 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/b062c1f1-4b36-476f-8c2e-ca3b1a7e709c-inventory\") pod \"ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-jwnpq\" (UID: \"b062c1f1-4b36-476f-8c2e-ca3b1a7e709c\") " pod="openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-jwnpq" Oct 01 15:41:20 crc kubenswrapper[4869]: I1001 15:41:20.755206 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dtp7t\" (UniqueName: \"kubernetes.io/projected/b062c1f1-4b36-476f-8c2e-ca3b1a7e709c-kube-api-access-dtp7t\") pod \"ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-jwnpq\" (UID: \"b062c1f1-4b36-476f-8c2e-ca3b1a7e709c\") " pod="openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-jwnpq" Oct 01 15:41:20 crc kubenswrapper[4869]: I1001 15:41:20.800177 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-jwnpq" Oct 01 15:41:21 crc kubenswrapper[4869]: I1001 15:41:21.290944 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-jwnpq"] Oct 01 15:41:21 crc kubenswrapper[4869]: I1001 15:41:21.407968 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-jwnpq" event={"ID":"b062c1f1-4b36-476f-8c2e-ca3b1a7e709c","Type":"ContainerStarted","Data":"c1bf629fe3f71b2002484f61f3e206f43252296a520d2448db34be8b2eef2bee"} Oct 01 15:41:22 crc kubenswrapper[4869]: I1001 15:41:22.416833 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-jwnpq" event={"ID":"b062c1f1-4b36-476f-8c2e-ca3b1a7e709c","Type":"ContainerStarted","Data":"214e028eae54eb90218d3da742d99d32eba9444ac27564641f9d49ca92bbb60a"} Oct 01 15:41:22 crc kubenswrapper[4869]: I1001 15:41:22.451976 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-jwnpq" podStartSLOduration=1.863966013 podStartE2EDuration="2.451918418s" podCreationTimestamp="2025-10-01 15:41:20 +0000 UTC" firstStartedPulling="2025-10-01 15:41:21.299044107 +0000 UTC m=+2190.445887223" lastFinishedPulling="2025-10-01 15:41:21.886996502 +0000 UTC m=+2191.033839628" observedRunningTime="2025-10-01 15:41:22.441760703 +0000 UTC m=+2191.588603859" watchObservedRunningTime="2025-10-01 15:41:22.451918418 +0000 UTC m=+2191.598761564" Oct 01 15:41:26 crc kubenswrapper[4869]: I1001 15:41:26.460435 4869 generic.go:334] "Generic (PLEG): container finished" podID="b062c1f1-4b36-476f-8c2e-ca3b1a7e709c" containerID="214e028eae54eb90218d3da742d99d32eba9444ac27564641f9d49ca92bbb60a" exitCode=0 Oct 01 15:41:26 crc kubenswrapper[4869]: I1001 15:41:26.460539 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-jwnpq" event={"ID":"b062c1f1-4b36-476f-8c2e-ca3b1a7e709c","Type":"ContainerDied","Data":"214e028eae54eb90218d3da742d99d32eba9444ac27564641f9d49ca92bbb60a"} Oct 01 15:41:27 crc kubenswrapper[4869]: I1001 15:41:27.923241 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-jwnpq" Oct 01 15:41:27 crc kubenswrapper[4869]: I1001 15:41:27.981669 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dtp7t\" (UniqueName: \"kubernetes.io/projected/b062c1f1-4b36-476f-8c2e-ca3b1a7e709c-kube-api-access-dtp7t\") pod \"b062c1f1-4b36-476f-8c2e-ca3b1a7e709c\" (UID: \"b062c1f1-4b36-476f-8c2e-ca3b1a7e709c\") " Oct 01 15:41:27 crc kubenswrapper[4869]: I1001 15:41:27.981782 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/b062c1f1-4b36-476f-8c2e-ca3b1a7e709c-ceph\") pod \"b062c1f1-4b36-476f-8c2e-ca3b1a7e709c\" (UID: \"b062c1f1-4b36-476f-8c2e-ca3b1a7e709c\") " Oct 01 15:41:27 crc kubenswrapper[4869]: I1001 15:41:27.981884 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/b062c1f1-4b36-476f-8c2e-ca3b1a7e709c-inventory\") pod \"b062c1f1-4b36-476f-8c2e-ca3b1a7e709c\" (UID: \"b062c1f1-4b36-476f-8c2e-ca3b1a7e709c\") " Oct 01 15:41:27 crc kubenswrapper[4869]: I1001 15:41:27.981961 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/b062c1f1-4b36-476f-8c2e-ca3b1a7e709c-ssh-key\") pod \"b062c1f1-4b36-476f-8c2e-ca3b1a7e709c\" (UID: \"b062c1f1-4b36-476f-8c2e-ca3b1a7e709c\") " Oct 01 15:41:27 crc kubenswrapper[4869]: I1001 15:41:27.988031 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b062c1f1-4b36-476f-8c2e-ca3b1a7e709c-ceph" (OuterVolumeSpecName: "ceph") pod "b062c1f1-4b36-476f-8c2e-ca3b1a7e709c" (UID: "b062c1f1-4b36-476f-8c2e-ca3b1a7e709c"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 15:41:27 crc kubenswrapper[4869]: I1001 15:41:27.989017 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b062c1f1-4b36-476f-8c2e-ca3b1a7e709c-kube-api-access-dtp7t" (OuterVolumeSpecName: "kube-api-access-dtp7t") pod "b062c1f1-4b36-476f-8c2e-ca3b1a7e709c" (UID: "b062c1f1-4b36-476f-8c2e-ca3b1a7e709c"). InnerVolumeSpecName "kube-api-access-dtp7t". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 15:41:28 crc kubenswrapper[4869]: I1001 15:41:28.007756 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b062c1f1-4b36-476f-8c2e-ca3b1a7e709c-inventory" (OuterVolumeSpecName: "inventory") pod "b062c1f1-4b36-476f-8c2e-ca3b1a7e709c" (UID: "b062c1f1-4b36-476f-8c2e-ca3b1a7e709c"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 15:41:28 crc kubenswrapper[4869]: I1001 15:41:28.009548 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b062c1f1-4b36-476f-8c2e-ca3b1a7e709c-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "b062c1f1-4b36-476f-8c2e-ca3b1a7e709c" (UID: "b062c1f1-4b36-476f-8c2e-ca3b1a7e709c"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 15:41:28 crc kubenswrapper[4869]: I1001 15:41:28.084384 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dtp7t\" (UniqueName: \"kubernetes.io/projected/b062c1f1-4b36-476f-8c2e-ca3b1a7e709c-kube-api-access-dtp7t\") on node \"crc\" DevicePath \"\"" Oct 01 15:41:28 crc kubenswrapper[4869]: I1001 15:41:28.084683 4869 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/b062c1f1-4b36-476f-8c2e-ca3b1a7e709c-ceph\") on node \"crc\" DevicePath \"\"" Oct 01 15:41:28 crc kubenswrapper[4869]: I1001 15:41:28.084799 4869 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/b062c1f1-4b36-476f-8c2e-ca3b1a7e709c-inventory\") on node \"crc\" DevicePath \"\"" Oct 01 15:41:28 crc kubenswrapper[4869]: I1001 15:41:28.084901 4869 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/b062c1f1-4b36-476f-8c2e-ca3b1a7e709c-ssh-key\") on node \"crc\" DevicePath \"\"" Oct 01 15:41:28 crc kubenswrapper[4869]: I1001 15:41:28.482399 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-jwnpq" event={"ID":"b062c1f1-4b36-476f-8c2e-ca3b1a7e709c","Type":"ContainerDied","Data":"c1bf629fe3f71b2002484f61f3e206f43252296a520d2448db34be8b2eef2bee"} Oct 01 15:41:28 crc kubenswrapper[4869]: I1001 15:41:28.482450 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-jwnpq" Oct 01 15:41:28 crc kubenswrapper[4869]: I1001 15:41:28.482457 4869 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="c1bf629fe3f71b2002484f61f3e206f43252296a520d2448db34be8b2eef2bee" Oct 01 15:41:28 crc kubenswrapper[4869]: I1001 15:41:28.587112 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/configure-os-edpm-deployment-openstack-edpm-ipam-cjbfj"] Oct 01 15:41:28 crc kubenswrapper[4869]: E1001 15:41:28.587537 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b062c1f1-4b36-476f-8c2e-ca3b1a7e709c" containerName="ceph-hci-pre-edpm-deployment-openstack-edpm-ipam" Oct 01 15:41:28 crc kubenswrapper[4869]: I1001 15:41:28.587557 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="b062c1f1-4b36-476f-8c2e-ca3b1a7e709c" containerName="ceph-hci-pre-edpm-deployment-openstack-edpm-ipam" Oct 01 15:41:28 crc kubenswrapper[4869]: I1001 15:41:28.587734 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="b062c1f1-4b36-476f-8c2e-ca3b1a7e709c" containerName="ceph-hci-pre-edpm-deployment-openstack-edpm-ipam" Oct 01 15:41:28 crc kubenswrapper[4869]: I1001 15:41:28.588364 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-cjbfj" Oct 01 15:41:28 crc kubenswrapper[4869]: I1001 15:41:28.590711 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceph-conf-files" Oct 01 15:41:28 crc kubenswrapper[4869]: I1001 15:41:28.590897 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-cjg8g" Oct 01 15:41:28 crc kubenswrapper[4869]: I1001 15:41:28.591013 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Oct 01 15:41:28 crc kubenswrapper[4869]: I1001 15:41:28.591138 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Oct 01 15:41:28 crc kubenswrapper[4869]: I1001 15:41:28.592839 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Oct 01 15:41:28 crc kubenswrapper[4869]: I1001 15:41:28.603363 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/configure-os-edpm-deployment-openstack-edpm-ipam-cjbfj"] Oct 01 15:41:28 crc kubenswrapper[4869]: I1001 15:41:28.696401 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/9fb1424f-221b-46db-98b8-71a60daace2d-inventory\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-cjbfj\" (UID: \"9fb1424f-221b-46db-98b8-71a60daace2d\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-cjbfj" Oct 01 15:41:28 crc kubenswrapper[4869]: I1001 15:41:28.696457 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-c4b2z\" (UniqueName: \"kubernetes.io/projected/9fb1424f-221b-46db-98b8-71a60daace2d-kube-api-access-c4b2z\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-cjbfj\" (UID: \"9fb1424f-221b-46db-98b8-71a60daace2d\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-cjbfj" Oct 01 15:41:28 crc kubenswrapper[4869]: I1001 15:41:28.696686 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/9fb1424f-221b-46db-98b8-71a60daace2d-ssh-key\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-cjbfj\" (UID: \"9fb1424f-221b-46db-98b8-71a60daace2d\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-cjbfj" Oct 01 15:41:28 crc kubenswrapper[4869]: I1001 15:41:28.696866 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/9fb1424f-221b-46db-98b8-71a60daace2d-ceph\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-cjbfj\" (UID: \"9fb1424f-221b-46db-98b8-71a60daace2d\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-cjbfj" Oct 01 15:41:28 crc kubenswrapper[4869]: I1001 15:41:28.798402 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/9fb1424f-221b-46db-98b8-71a60daace2d-inventory\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-cjbfj\" (UID: \"9fb1424f-221b-46db-98b8-71a60daace2d\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-cjbfj" Oct 01 15:41:28 crc kubenswrapper[4869]: I1001 15:41:28.798459 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-c4b2z\" (UniqueName: \"kubernetes.io/projected/9fb1424f-221b-46db-98b8-71a60daace2d-kube-api-access-c4b2z\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-cjbfj\" (UID: \"9fb1424f-221b-46db-98b8-71a60daace2d\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-cjbfj" Oct 01 15:41:28 crc kubenswrapper[4869]: I1001 15:41:28.798554 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/9fb1424f-221b-46db-98b8-71a60daace2d-ssh-key\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-cjbfj\" (UID: \"9fb1424f-221b-46db-98b8-71a60daace2d\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-cjbfj" Oct 01 15:41:28 crc kubenswrapper[4869]: I1001 15:41:28.798590 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/9fb1424f-221b-46db-98b8-71a60daace2d-ceph\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-cjbfj\" (UID: \"9fb1424f-221b-46db-98b8-71a60daace2d\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-cjbfj" Oct 01 15:41:28 crc kubenswrapper[4869]: I1001 15:41:28.805665 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/9fb1424f-221b-46db-98b8-71a60daace2d-ssh-key\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-cjbfj\" (UID: \"9fb1424f-221b-46db-98b8-71a60daace2d\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-cjbfj" Oct 01 15:41:28 crc kubenswrapper[4869]: I1001 15:41:28.805733 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/9fb1424f-221b-46db-98b8-71a60daace2d-ceph\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-cjbfj\" (UID: \"9fb1424f-221b-46db-98b8-71a60daace2d\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-cjbfj" Oct 01 15:41:28 crc kubenswrapper[4869]: I1001 15:41:28.805825 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/9fb1424f-221b-46db-98b8-71a60daace2d-inventory\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-cjbfj\" (UID: \"9fb1424f-221b-46db-98b8-71a60daace2d\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-cjbfj" Oct 01 15:41:28 crc kubenswrapper[4869]: I1001 15:41:28.819718 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-c4b2z\" (UniqueName: \"kubernetes.io/projected/9fb1424f-221b-46db-98b8-71a60daace2d-kube-api-access-c4b2z\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-cjbfj\" (UID: \"9fb1424f-221b-46db-98b8-71a60daace2d\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-cjbfj" Oct 01 15:41:28 crc kubenswrapper[4869]: I1001 15:41:28.913560 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-cjbfj" Oct 01 15:41:29 crc kubenswrapper[4869]: I1001 15:41:29.294966 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/configure-os-edpm-deployment-openstack-edpm-ipam-cjbfj"] Oct 01 15:41:29 crc kubenswrapper[4869]: I1001 15:41:29.495544 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-cjbfj" event={"ID":"9fb1424f-221b-46db-98b8-71a60daace2d","Type":"ContainerStarted","Data":"a69567bbb2ed66ad2680fe9303852b79c061893e9ad85b575a0566cf35208051"} Oct 01 15:41:30 crc kubenswrapper[4869]: I1001 15:41:30.503410 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-cjbfj" event={"ID":"9fb1424f-221b-46db-98b8-71a60daace2d","Type":"ContainerStarted","Data":"7c8ca1b333a5b461390fe6c3840f952517370649f4e98bcb4dab863fa6edcb12"} Oct 01 15:41:30 crc kubenswrapper[4869]: I1001 15:41:30.521232 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-cjbfj" podStartSLOduration=2.005815873 podStartE2EDuration="2.521206352s" podCreationTimestamp="2025-10-01 15:41:28 +0000 UTC" firstStartedPulling="2025-10-01 15:41:29.297849708 +0000 UTC m=+2198.444692864" lastFinishedPulling="2025-10-01 15:41:29.813240187 +0000 UTC m=+2198.960083343" observedRunningTime="2025-10-01 15:41:30.519634543 +0000 UTC m=+2199.666477699" watchObservedRunningTime="2025-10-01 15:41:30.521206352 +0000 UTC m=+2199.668049488" Oct 01 15:41:34 crc kubenswrapper[4869]: I1001 15:41:34.581288 4869 scope.go:117] "RemoveContainer" containerID="850fa9657d55ab61ed48a042dba1eb165240f62a294e8f87d32c9a3206247a54" Oct 01 15:41:34 crc kubenswrapper[4869]: E1001 15:41:34.582236 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-c86m8_openshift-machine-config-operator(a4b64f7f-0b03-4f47-965b-9fde048b735c)\"" pod="openshift-machine-config-operator/machine-config-daemon-c86m8" podUID="a4b64f7f-0b03-4f47-965b-9fde048b735c" Oct 01 15:41:47 crc kubenswrapper[4869]: I1001 15:41:47.581587 4869 scope.go:117] "RemoveContainer" containerID="850fa9657d55ab61ed48a042dba1eb165240f62a294e8f87d32c9a3206247a54" Oct 01 15:41:47 crc kubenswrapper[4869]: E1001 15:41:47.582426 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-c86m8_openshift-machine-config-operator(a4b64f7f-0b03-4f47-965b-9fde048b735c)\"" pod="openshift-machine-config-operator/machine-config-daemon-c86m8" podUID="a4b64f7f-0b03-4f47-965b-9fde048b735c" Oct 01 15:41:59 crc kubenswrapper[4869]: I1001 15:41:59.581608 4869 scope.go:117] "RemoveContainer" containerID="850fa9657d55ab61ed48a042dba1eb165240f62a294e8f87d32c9a3206247a54" Oct 01 15:41:59 crc kubenswrapper[4869]: E1001 15:41:59.583206 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-c86m8_openshift-machine-config-operator(a4b64f7f-0b03-4f47-965b-9fde048b735c)\"" pod="openshift-machine-config-operator/machine-config-daemon-c86m8" podUID="a4b64f7f-0b03-4f47-965b-9fde048b735c" Oct 01 15:42:11 crc kubenswrapper[4869]: I1001 15:42:11.585295 4869 scope.go:117] "RemoveContainer" containerID="850fa9657d55ab61ed48a042dba1eb165240f62a294e8f87d32c9a3206247a54" Oct 01 15:42:11 crc kubenswrapper[4869]: E1001 15:42:11.586949 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-c86m8_openshift-machine-config-operator(a4b64f7f-0b03-4f47-965b-9fde048b735c)\"" pod="openshift-machine-config-operator/machine-config-daemon-c86m8" podUID="a4b64f7f-0b03-4f47-965b-9fde048b735c" Oct 01 15:42:13 crc kubenswrapper[4869]: I1001 15:42:13.907783 4869 generic.go:334] "Generic (PLEG): container finished" podID="9fb1424f-221b-46db-98b8-71a60daace2d" containerID="7c8ca1b333a5b461390fe6c3840f952517370649f4e98bcb4dab863fa6edcb12" exitCode=0 Oct 01 15:42:13 crc kubenswrapper[4869]: I1001 15:42:13.908074 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-cjbfj" event={"ID":"9fb1424f-221b-46db-98b8-71a60daace2d","Type":"ContainerDied","Data":"7c8ca1b333a5b461390fe6c3840f952517370649f4e98bcb4dab863fa6edcb12"} Oct 01 15:42:15 crc kubenswrapper[4869]: I1001 15:42:15.346756 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-cjbfj" Oct 01 15:42:15 crc kubenswrapper[4869]: I1001 15:42:15.476020 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/9fb1424f-221b-46db-98b8-71a60daace2d-inventory\") pod \"9fb1424f-221b-46db-98b8-71a60daace2d\" (UID: \"9fb1424f-221b-46db-98b8-71a60daace2d\") " Oct 01 15:42:15 crc kubenswrapper[4869]: I1001 15:42:15.476134 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/9fb1424f-221b-46db-98b8-71a60daace2d-ssh-key\") pod \"9fb1424f-221b-46db-98b8-71a60daace2d\" (UID: \"9fb1424f-221b-46db-98b8-71a60daace2d\") " Oct 01 15:42:15 crc kubenswrapper[4869]: I1001 15:42:15.476194 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/9fb1424f-221b-46db-98b8-71a60daace2d-ceph\") pod \"9fb1424f-221b-46db-98b8-71a60daace2d\" (UID: \"9fb1424f-221b-46db-98b8-71a60daace2d\") " Oct 01 15:42:15 crc kubenswrapper[4869]: I1001 15:42:15.476339 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-c4b2z\" (UniqueName: \"kubernetes.io/projected/9fb1424f-221b-46db-98b8-71a60daace2d-kube-api-access-c4b2z\") pod \"9fb1424f-221b-46db-98b8-71a60daace2d\" (UID: \"9fb1424f-221b-46db-98b8-71a60daace2d\") " Oct 01 15:42:15 crc kubenswrapper[4869]: I1001 15:42:15.482158 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9fb1424f-221b-46db-98b8-71a60daace2d-ceph" (OuterVolumeSpecName: "ceph") pod "9fb1424f-221b-46db-98b8-71a60daace2d" (UID: "9fb1424f-221b-46db-98b8-71a60daace2d"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 15:42:15 crc kubenswrapper[4869]: I1001 15:42:15.482184 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9fb1424f-221b-46db-98b8-71a60daace2d-kube-api-access-c4b2z" (OuterVolumeSpecName: "kube-api-access-c4b2z") pod "9fb1424f-221b-46db-98b8-71a60daace2d" (UID: "9fb1424f-221b-46db-98b8-71a60daace2d"). InnerVolumeSpecName "kube-api-access-c4b2z". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 15:42:15 crc kubenswrapper[4869]: I1001 15:42:15.502122 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9fb1424f-221b-46db-98b8-71a60daace2d-inventory" (OuterVolumeSpecName: "inventory") pod "9fb1424f-221b-46db-98b8-71a60daace2d" (UID: "9fb1424f-221b-46db-98b8-71a60daace2d"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 15:42:15 crc kubenswrapper[4869]: I1001 15:42:15.502166 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9fb1424f-221b-46db-98b8-71a60daace2d-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "9fb1424f-221b-46db-98b8-71a60daace2d" (UID: "9fb1424f-221b-46db-98b8-71a60daace2d"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 15:42:15 crc kubenswrapper[4869]: I1001 15:42:15.579139 4869 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/9fb1424f-221b-46db-98b8-71a60daace2d-ssh-key\") on node \"crc\" DevicePath \"\"" Oct 01 15:42:15 crc kubenswrapper[4869]: I1001 15:42:15.579590 4869 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/9fb1424f-221b-46db-98b8-71a60daace2d-ceph\") on node \"crc\" DevicePath \"\"" Oct 01 15:42:15 crc kubenswrapper[4869]: I1001 15:42:15.579614 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-c4b2z\" (UniqueName: \"kubernetes.io/projected/9fb1424f-221b-46db-98b8-71a60daace2d-kube-api-access-c4b2z\") on node \"crc\" DevicePath \"\"" Oct 01 15:42:15 crc kubenswrapper[4869]: I1001 15:42:15.579636 4869 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/9fb1424f-221b-46db-98b8-71a60daace2d-inventory\") on node \"crc\" DevicePath \"\"" Oct 01 15:42:15 crc kubenswrapper[4869]: I1001 15:42:15.930851 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-cjbfj" event={"ID":"9fb1424f-221b-46db-98b8-71a60daace2d","Type":"ContainerDied","Data":"a69567bbb2ed66ad2680fe9303852b79c061893e9ad85b575a0566cf35208051"} Oct 01 15:42:15 crc kubenswrapper[4869]: I1001 15:42:15.931088 4869 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="a69567bbb2ed66ad2680fe9303852b79c061893e9ad85b575a0566cf35208051" Oct 01 15:42:15 crc kubenswrapper[4869]: I1001 15:42:15.930923 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-cjbfj" Oct 01 15:42:16 crc kubenswrapper[4869]: I1001 15:42:16.046115 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ssh-known-hosts-edpm-deployment-jtzdt"] Oct 01 15:42:16 crc kubenswrapper[4869]: E1001 15:42:16.046580 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9fb1424f-221b-46db-98b8-71a60daace2d" containerName="configure-os-edpm-deployment-openstack-edpm-ipam" Oct 01 15:42:16 crc kubenswrapper[4869]: I1001 15:42:16.046599 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="9fb1424f-221b-46db-98b8-71a60daace2d" containerName="configure-os-edpm-deployment-openstack-edpm-ipam" Oct 01 15:42:16 crc kubenswrapper[4869]: I1001 15:42:16.046784 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="9fb1424f-221b-46db-98b8-71a60daace2d" containerName="configure-os-edpm-deployment-openstack-edpm-ipam" Oct 01 15:42:16 crc kubenswrapper[4869]: I1001 15:42:16.048994 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ssh-known-hosts-edpm-deployment-jtzdt" Oct 01 15:42:16 crc kubenswrapper[4869]: I1001 15:42:16.051453 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Oct 01 15:42:16 crc kubenswrapper[4869]: I1001 15:42:16.051660 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-cjg8g" Oct 01 15:42:16 crc kubenswrapper[4869]: I1001 15:42:16.051757 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Oct 01 15:42:16 crc kubenswrapper[4869]: I1001 15:42:16.051856 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceph-conf-files" Oct 01 15:42:16 crc kubenswrapper[4869]: I1001 15:42:16.053895 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ssh-known-hosts-edpm-deployment-jtzdt"] Oct 01 15:42:16 crc kubenswrapper[4869]: I1001 15:42:16.058363 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Oct 01 15:42:16 crc kubenswrapper[4869]: I1001 15:42:16.088828 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/66ab3eff-a1be-4cee-b460-39ac1d384491-ceph\") pod \"ssh-known-hosts-edpm-deployment-jtzdt\" (UID: \"66ab3eff-a1be-4cee-b460-39ac1d384491\") " pod="openstack/ssh-known-hosts-edpm-deployment-jtzdt" Oct 01 15:42:16 crc kubenswrapper[4869]: I1001 15:42:16.088864 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory-0\" (UniqueName: \"kubernetes.io/secret/66ab3eff-a1be-4cee-b460-39ac1d384491-inventory-0\") pod \"ssh-known-hosts-edpm-deployment-jtzdt\" (UID: \"66ab3eff-a1be-4cee-b460-39ac1d384491\") " pod="openstack/ssh-known-hosts-edpm-deployment-jtzdt" Oct 01 15:42:16 crc kubenswrapper[4869]: I1001 15:42:16.088954 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-t887r\" (UniqueName: \"kubernetes.io/projected/66ab3eff-a1be-4cee-b460-39ac1d384491-kube-api-access-t887r\") pod \"ssh-known-hosts-edpm-deployment-jtzdt\" (UID: \"66ab3eff-a1be-4cee-b460-39ac1d384491\") " pod="openstack/ssh-known-hosts-edpm-deployment-jtzdt" Oct 01 15:42:16 crc kubenswrapper[4869]: I1001 15:42:16.089022 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/66ab3eff-a1be-4cee-b460-39ac1d384491-ssh-key-openstack-edpm-ipam\") pod \"ssh-known-hosts-edpm-deployment-jtzdt\" (UID: \"66ab3eff-a1be-4cee-b460-39ac1d384491\") " pod="openstack/ssh-known-hosts-edpm-deployment-jtzdt" Oct 01 15:42:16 crc kubenswrapper[4869]: I1001 15:42:16.189717 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/66ab3eff-a1be-4cee-b460-39ac1d384491-ssh-key-openstack-edpm-ipam\") pod \"ssh-known-hosts-edpm-deployment-jtzdt\" (UID: \"66ab3eff-a1be-4cee-b460-39ac1d384491\") " pod="openstack/ssh-known-hosts-edpm-deployment-jtzdt" Oct 01 15:42:16 crc kubenswrapper[4869]: I1001 15:42:16.189864 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory-0\" (UniqueName: \"kubernetes.io/secret/66ab3eff-a1be-4cee-b460-39ac1d384491-inventory-0\") pod \"ssh-known-hosts-edpm-deployment-jtzdt\" (UID: \"66ab3eff-a1be-4cee-b460-39ac1d384491\") " pod="openstack/ssh-known-hosts-edpm-deployment-jtzdt" Oct 01 15:42:16 crc kubenswrapper[4869]: I1001 15:42:16.189885 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/66ab3eff-a1be-4cee-b460-39ac1d384491-ceph\") pod \"ssh-known-hosts-edpm-deployment-jtzdt\" (UID: \"66ab3eff-a1be-4cee-b460-39ac1d384491\") " pod="openstack/ssh-known-hosts-edpm-deployment-jtzdt" Oct 01 15:42:16 crc kubenswrapper[4869]: I1001 15:42:16.189923 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-t887r\" (UniqueName: \"kubernetes.io/projected/66ab3eff-a1be-4cee-b460-39ac1d384491-kube-api-access-t887r\") pod \"ssh-known-hosts-edpm-deployment-jtzdt\" (UID: \"66ab3eff-a1be-4cee-b460-39ac1d384491\") " pod="openstack/ssh-known-hosts-edpm-deployment-jtzdt" Oct 01 15:42:16 crc kubenswrapper[4869]: I1001 15:42:16.193482 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory-0\" (UniqueName: \"kubernetes.io/secret/66ab3eff-a1be-4cee-b460-39ac1d384491-inventory-0\") pod \"ssh-known-hosts-edpm-deployment-jtzdt\" (UID: \"66ab3eff-a1be-4cee-b460-39ac1d384491\") " pod="openstack/ssh-known-hosts-edpm-deployment-jtzdt" Oct 01 15:42:16 crc kubenswrapper[4869]: I1001 15:42:16.195309 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/66ab3eff-a1be-4cee-b460-39ac1d384491-ceph\") pod \"ssh-known-hosts-edpm-deployment-jtzdt\" (UID: \"66ab3eff-a1be-4cee-b460-39ac1d384491\") " pod="openstack/ssh-known-hosts-edpm-deployment-jtzdt" Oct 01 15:42:16 crc kubenswrapper[4869]: I1001 15:42:16.199821 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/66ab3eff-a1be-4cee-b460-39ac1d384491-ssh-key-openstack-edpm-ipam\") pod \"ssh-known-hosts-edpm-deployment-jtzdt\" (UID: \"66ab3eff-a1be-4cee-b460-39ac1d384491\") " pod="openstack/ssh-known-hosts-edpm-deployment-jtzdt" Oct 01 15:42:16 crc kubenswrapper[4869]: I1001 15:42:16.213624 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-t887r\" (UniqueName: \"kubernetes.io/projected/66ab3eff-a1be-4cee-b460-39ac1d384491-kube-api-access-t887r\") pod \"ssh-known-hosts-edpm-deployment-jtzdt\" (UID: \"66ab3eff-a1be-4cee-b460-39ac1d384491\") " pod="openstack/ssh-known-hosts-edpm-deployment-jtzdt" Oct 01 15:42:16 crc kubenswrapper[4869]: I1001 15:42:16.370418 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ssh-known-hosts-edpm-deployment-jtzdt" Oct 01 15:42:16 crc kubenswrapper[4869]: I1001 15:42:16.949207 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ssh-known-hosts-edpm-deployment-jtzdt"] Oct 01 15:42:16 crc kubenswrapper[4869]: W1001 15:42:16.953952 4869 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod66ab3eff_a1be_4cee_b460_39ac1d384491.slice/crio-f843762cc30711c1e81c564c6e41ca7ce5e966b1de899a1d856cf5b49641a6ba WatchSource:0}: Error finding container f843762cc30711c1e81c564c6e41ca7ce5e966b1de899a1d856cf5b49641a6ba: Status 404 returned error can't find the container with id f843762cc30711c1e81c564c6e41ca7ce5e966b1de899a1d856cf5b49641a6ba Oct 01 15:42:17 crc kubenswrapper[4869]: I1001 15:42:17.947166 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ssh-known-hosts-edpm-deployment-jtzdt" event={"ID":"66ab3eff-a1be-4cee-b460-39ac1d384491","Type":"ContainerStarted","Data":"dc7020f5cb70c71f2c5f892913e6e132e8cbad7f4ee8b89d978b767f3d8ad63c"} Oct 01 15:42:17 crc kubenswrapper[4869]: I1001 15:42:17.948820 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ssh-known-hosts-edpm-deployment-jtzdt" event={"ID":"66ab3eff-a1be-4cee-b460-39ac1d384491","Type":"ContainerStarted","Data":"f843762cc30711c1e81c564c6e41ca7ce5e966b1de899a1d856cf5b49641a6ba"} Oct 01 15:42:17 crc kubenswrapper[4869]: I1001 15:42:17.963815 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ssh-known-hosts-edpm-deployment-jtzdt" podStartSLOduration=1.473356085 podStartE2EDuration="1.963794577s" podCreationTimestamp="2025-10-01 15:42:16 +0000 UTC" firstStartedPulling="2025-10-01 15:42:16.965987165 +0000 UTC m=+2246.112830271" lastFinishedPulling="2025-10-01 15:42:17.456425647 +0000 UTC m=+2246.603268763" observedRunningTime="2025-10-01 15:42:17.963661033 +0000 UTC m=+2247.110504149" watchObservedRunningTime="2025-10-01 15:42:17.963794577 +0000 UTC m=+2247.110637703" Oct 01 15:42:23 crc kubenswrapper[4869]: I1001 15:42:23.581883 4869 scope.go:117] "RemoveContainer" containerID="850fa9657d55ab61ed48a042dba1eb165240f62a294e8f87d32c9a3206247a54" Oct 01 15:42:23 crc kubenswrapper[4869]: E1001 15:42:23.582741 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-c86m8_openshift-machine-config-operator(a4b64f7f-0b03-4f47-965b-9fde048b735c)\"" pod="openshift-machine-config-operator/machine-config-daemon-c86m8" podUID="a4b64f7f-0b03-4f47-965b-9fde048b735c" Oct 01 15:42:28 crc kubenswrapper[4869]: I1001 15:42:28.065086 4869 generic.go:334] "Generic (PLEG): container finished" podID="66ab3eff-a1be-4cee-b460-39ac1d384491" containerID="dc7020f5cb70c71f2c5f892913e6e132e8cbad7f4ee8b89d978b767f3d8ad63c" exitCode=0 Oct 01 15:42:28 crc kubenswrapper[4869]: I1001 15:42:28.065202 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ssh-known-hosts-edpm-deployment-jtzdt" event={"ID":"66ab3eff-a1be-4cee-b460-39ac1d384491","Type":"ContainerDied","Data":"dc7020f5cb70c71f2c5f892913e6e132e8cbad7f4ee8b89d978b767f3d8ad63c"} Oct 01 15:42:29 crc kubenswrapper[4869]: I1001 15:42:29.544108 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ssh-known-hosts-edpm-deployment-jtzdt" Oct 01 15:42:29 crc kubenswrapper[4869]: I1001 15:42:29.663432 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-t887r\" (UniqueName: \"kubernetes.io/projected/66ab3eff-a1be-4cee-b460-39ac1d384491-kube-api-access-t887r\") pod \"66ab3eff-a1be-4cee-b460-39ac1d384491\" (UID: \"66ab3eff-a1be-4cee-b460-39ac1d384491\") " Oct 01 15:42:29 crc kubenswrapper[4869]: I1001 15:42:29.663580 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/66ab3eff-a1be-4cee-b460-39ac1d384491-ceph\") pod \"66ab3eff-a1be-4cee-b460-39ac1d384491\" (UID: \"66ab3eff-a1be-4cee-b460-39ac1d384491\") " Oct 01 15:42:29 crc kubenswrapper[4869]: I1001 15:42:29.663605 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory-0\" (UniqueName: \"kubernetes.io/secret/66ab3eff-a1be-4cee-b460-39ac1d384491-inventory-0\") pod \"66ab3eff-a1be-4cee-b460-39ac1d384491\" (UID: \"66ab3eff-a1be-4cee-b460-39ac1d384491\") " Oct 01 15:42:29 crc kubenswrapper[4869]: I1001 15:42:29.663627 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/66ab3eff-a1be-4cee-b460-39ac1d384491-ssh-key-openstack-edpm-ipam\") pod \"66ab3eff-a1be-4cee-b460-39ac1d384491\" (UID: \"66ab3eff-a1be-4cee-b460-39ac1d384491\") " Oct 01 15:42:29 crc kubenswrapper[4869]: I1001 15:42:29.669499 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/66ab3eff-a1be-4cee-b460-39ac1d384491-ceph" (OuterVolumeSpecName: "ceph") pod "66ab3eff-a1be-4cee-b460-39ac1d384491" (UID: "66ab3eff-a1be-4cee-b460-39ac1d384491"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 15:42:29 crc kubenswrapper[4869]: I1001 15:42:29.670753 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/66ab3eff-a1be-4cee-b460-39ac1d384491-kube-api-access-t887r" (OuterVolumeSpecName: "kube-api-access-t887r") pod "66ab3eff-a1be-4cee-b460-39ac1d384491" (UID: "66ab3eff-a1be-4cee-b460-39ac1d384491"). InnerVolumeSpecName "kube-api-access-t887r". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 15:42:29 crc kubenswrapper[4869]: I1001 15:42:29.695324 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/66ab3eff-a1be-4cee-b460-39ac1d384491-inventory-0" (OuterVolumeSpecName: "inventory-0") pod "66ab3eff-a1be-4cee-b460-39ac1d384491" (UID: "66ab3eff-a1be-4cee-b460-39ac1d384491"). InnerVolumeSpecName "inventory-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 15:42:29 crc kubenswrapper[4869]: I1001 15:42:29.698078 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/66ab3eff-a1be-4cee-b460-39ac1d384491-ssh-key-openstack-edpm-ipam" (OuterVolumeSpecName: "ssh-key-openstack-edpm-ipam") pod "66ab3eff-a1be-4cee-b460-39ac1d384491" (UID: "66ab3eff-a1be-4cee-b460-39ac1d384491"). InnerVolumeSpecName "ssh-key-openstack-edpm-ipam". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 15:42:29 crc kubenswrapper[4869]: I1001 15:42:29.766092 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-t887r\" (UniqueName: \"kubernetes.io/projected/66ab3eff-a1be-4cee-b460-39ac1d384491-kube-api-access-t887r\") on node \"crc\" DevicePath \"\"" Oct 01 15:42:29 crc kubenswrapper[4869]: I1001 15:42:29.766143 4869 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/66ab3eff-a1be-4cee-b460-39ac1d384491-ceph\") on node \"crc\" DevicePath \"\"" Oct 01 15:42:29 crc kubenswrapper[4869]: I1001 15:42:29.766165 4869 reconciler_common.go:293] "Volume detached for volume \"inventory-0\" (UniqueName: \"kubernetes.io/secret/66ab3eff-a1be-4cee-b460-39ac1d384491-inventory-0\") on node \"crc\" DevicePath \"\"" Oct 01 15:42:29 crc kubenswrapper[4869]: I1001 15:42:29.766184 4869 reconciler_common.go:293] "Volume detached for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/66ab3eff-a1be-4cee-b460-39ac1d384491-ssh-key-openstack-edpm-ipam\") on node \"crc\" DevicePath \"\"" Oct 01 15:42:30 crc kubenswrapper[4869]: I1001 15:42:30.090673 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ssh-known-hosts-edpm-deployment-jtzdt" event={"ID":"66ab3eff-a1be-4cee-b460-39ac1d384491","Type":"ContainerDied","Data":"f843762cc30711c1e81c564c6e41ca7ce5e966b1de899a1d856cf5b49641a6ba"} Oct 01 15:42:30 crc kubenswrapper[4869]: I1001 15:42:30.091028 4869 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="f843762cc30711c1e81c564c6e41ca7ce5e966b1de899a1d856cf5b49641a6ba" Oct 01 15:42:30 crc kubenswrapper[4869]: I1001 15:42:30.090781 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ssh-known-hosts-edpm-deployment-jtzdt" Oct 01 15:42:30 crc kubenswrapper[4869]: I1001 15:42:30.162519 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/run-os-edpm-deployment-openstack-edpm-ipam-gv92z"] Oct 01 15:42:30 crc kubenswrapper[4869]: E1001 15:42:30.162884 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="66ab3eff-a1be-4cee-b460-39ac1d384491" containerName="ssh-known-hosts-edpm-deployment" Oct 01 15:42:30 crc kubenswrapper[4869]: I1001 15:42:30.162901 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="66ab3eff-a1be-4cee-b460-39ac1d384491" containerName="ssh-known-hosts-edpm-deployment" Oct 01 15:42:30 crc kubenswrapper[4869]: I1001 15:42:30.163073 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="66ab3eff-a1be-4cee-b460-39ac1d384491" containerName="ssh-known-hosts-edpm-deployment" Oct 01 15:42:30 crc kubenswrapper[4869]: I1001 15:42:30.163645 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-gv92z" Oct 01 15:42:30 crc kubenswrapper[4869]: I1001 15:42:30.165556 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Oct 01 15:42:30 crc kubenswrapper[4869]: I1001 15:42:30.166353 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceph-conf-files" Oct 01 15:42:30 crc kubenswrapper[4869]: I1001 15:42:30.166497 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Oct 01 15:42:30 crc kubenswrapper[4869]: I1001 15:42:30.167082 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Oct 01 15:42:30 crc kubenswrapper[4869]: I1001 15:42:30.168295 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-cjg8g" Oct 01 15:42:30 crc kubenswrapper[4869]: I1001 15:42:30.214513 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/run-os-edpm-deployment-openstack-edpm-ipam-gv92z"] Oct 01 15:42:30 crc kubenswrapper[4869]: I1001 15:42:30.276810 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cfp6p\" (UniqueName: \"kubernetes.io/projected/b61ff6db-ea0f-458d-a0cf-8f65e610a90e-kube-api-access-cfp6p\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-gv92z\" (UID: \"b61ff6db-ea0f-458d-a0cf-8f65e610a90e\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-gv92z" Oct 01 15:42:30 crc kubenswrapper[4869]: I1001 15:42:30.276893 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/b61ff6db-ea0f-458d-a0cf-8f65e610a90e-ssh-key\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-gv92z\" (UID: \"b61ff6db-ea0f-458d-a0cf-8f65e610a90e\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-gv92z" Oct 01 15:42:30 crc kubenswrapper[4869]: I1001 15:42:30.277073 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/b61ff6db-ea0f-458d-a0cf-8f65e610a90e-ceph\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-gv92z\" (UID: \"b61ff6db-ea0f-458d-a0cf-8f65e610a90e\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-gv92z" Oct 01 15:42:30 crc kubenswrapper[4869]: I1001 15:42:30.277132 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/b61ff6db-ea0f-458d-a0cf-8f65e610a90e-inventory\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-gv92z\" (UID: \"b61ff6db-ea0f-458d-a0cf-8f65e610a90e\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-gv92z" Oct 01 15:42:30 crc kubenswrapper[4869]: I1001 15:42:30.378561 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/b61ff6db-ea0f-458d-a0cf-8f65e610a90e-ceph\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-gv92z\" (UID: \"b61ff6db-ea0f-458d-a0cf-8f65e610a90e\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-gv92z" Oct 01 15:42:30 crc kubenswrapper[4869]: I1001 15:42:30.378651 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/b61ff6db-ea0f-458d-a0cf-8f65e610a90e-inventory\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-gv92z\" (UID: \"b61ff6db-ea0f-458d-a0cf-8f65e610a90e\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-gv92z" Oct 01 15:42:30 crc kubenswrapper[4869]: I1001 15:42:30.378688 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cfp6p\" (UniqueName: \"kubernetes.io/projected/b61ff6db-ea0f-458d-a0cf-8f65e610a90e-kube-api-access-cfp6p\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-gv92z\" (UID: \"b61ff6db-ea0f-458d-a0cf-8f65e610a90e\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-gv92z" Oct 01 15:42:30 crc kubenswrapper[4869]: I1001 15:42:30.378725 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/b61ff6db-ea0f-458d-a0cf-8f65e610a90e-ssh-key\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-gv92z\" (UID: \"b61ff6db-ea0f-458d-a0cf-8f65e610a90e\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-gv92z" Oct 01 15:42:30 crc kubenswrapper[4869]: I1001 15:42:30.384979 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/b61ff6db-ea0f-458d-a0cf-8f65e610a90e-inventory\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-gv92z\" (UID: \"b61ff6db-ea0f-458d-a0cf-8f65e610a90e\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-gv92z" Oct 01 15:42:30 crc kubenswrapper[4869]: I1001 15:42:30.387134 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/b61ff6db-ea0f-458d-a0cf-8f65e610a90e-ceph\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-gv92z\" (UID: \"b61ff6db-ea0f-458d-a0cf-8f65e610a90e\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-gv92z" Oct 01 15:42:30 crc kubenswrapper[4869]: I1001 15:42:30.392720 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/b61ff6db-ea0f-458d-a0cf-8f65e610a90e-ssh-key\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-gv92z\" (UID: \"b61ff6db-ea0f-458d-a0cf-8f65e610a90e\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-gv92z" Oct 01 15:42:30 crc kubenswrapper[4869]: I1001 15:42:30.402789 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cfp6p\" (UniqueName: \"kubernetes.io/projected/b61ff6db-ea0f-458d-a0cf-8f65e610a90e-kube-api-access-cfp6p\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-gv92z\" (UID: \"b61ff6db-ea0f-458d-a0cf-8f65e610a90e\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-gv92z" Oct 01 15:42:30 crc kubenswrapper[4869]: I1001 15:42:30.485916 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-gv92z" Oct 01 15:42:30 crc kubenswrapper[4869]: I1001 15:42:30.993015 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/run-os-edpm-deployment-openstack-edpm-ipam-gv92z"] Oct 01 15:42:30 crc kubenswrapper[4869]: W1001 15:42:30.999605 4869 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podb61ff6db_ea0f_458d_a0cf_8f65e610a90e.slice/crio-0c2b4de961ad994a1c4b72c4fb42c99b4c0adcb712cd38bf9e1b005ee42d5f26 WatchSource:0}: Error finding container 0c2b4de961ad994a1c4b72c4fb42c99b4c0adcb712cd38bf9e1b005ee42d5f26: Status 404 returned error can't find the container with id 0c2b4de961ad994a1c4b72c4fb42c99b4c0adcb712cd38bf9e1b005ee42d5f26 Oct 01 15:42:31 crc kubenswrapper[4869]: I1001 15:42:31.101411 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-gv92z" event={"ID":"b61ff6db-ea0f-458d-a0cf-8f65e610a90e","Type":"ContainerStarted","Data":"0c2b4de961ad994a1c4b72c4fb42c99b4c0adcb712cd38bf9e1b005ee42d5f26"} Oct 01 15:42:32 crc kubenswrapper[4869]: I1001 15:42:32.119456 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-gv92z" event={"ID":"b61ff6db-ea0f-458d-a0cf-8f65e610a90e","Type":"ContainerStarted","Data":"3ca17d5cdbd3d8a1b97f647460f51fc84ee587dd7d204b78bf64ec0ec9cf99a5"} Oct 01 15:42:32 crc kubenswrapper[4869]: I1001 15:42:32.150419 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-gv92z" podStartSLOduration=1.426261765 podStartE2EDuration="2.150400153s" podCreationTimestamp="2025-10-01 15:42:30 +0000 UTC" firstStartedPulling="2025-10-01 15:42:31.002203454 +0000 UTC m=+2260.149046580" lastFinishedPulling="2025-10-01 15:42:31.726341852 +0000 UTC m=+2260.873184968" observedRunningTime="2025-10-01 15:42:32.139949859 +0000 UTC m=+2261.286792985" watchObservedRunningTime="2025-10-01 15:42:32.150400153 +0000 UTC m=+2261.297243279" Oct 01 15:42:36 crc kubenswrapper[4869]: I1001 15:42:36.582007 4869 scope.go:117] "RemoveContainer" containerID="850fa9657d55ab61ed48a042dba1eb165240f62a294e8f87d32c9a3206247a54" Oct 01 15:42:36 crc kubenswrapper[4869]: E1001 15:42:36.583137 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-c86m8_openshift-machine-config-operator(a4b64f7f-0b03-4f47-965b-9fde048b735c)\"" pod="openshift-machine-config-operator/machine-config-daemon-c86m8" podUID="a4b64f7f-0b03-4f47-965b-9fde048b735c" Oct 01 15:42:40 crc kubenswrapper[4869]: I1001 15:42:40.192956 4869 generic.go:334] "Generic (PLEG): container finished" podID="b61ff6db-ea0f-458d-a0cf-8f65e610a90e" containerID="3ca17d5cdbd3d8a1b97f647460f51fc84ee587dd7d204b78bf64ec0ec9cf99a5" exitCode=0 Oct 01 15:42:40 crc kubenswrapper[4869]: I1001 15:42:40.193033 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-gv92z" event={"ID":"b61ff6db-ea0f-458d-a0cf-8f65e610a90e","Type":"ContainerDied","Data":"3ca17d5cdbd3d8a1b97f647460f51fc84ee587dd7d204b78bf64ec0ec9cf99a5"} Oct 01 15:42:41 crc kubenswrapper[4869]: I1001 15:42:41.694997 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-gv92z" Oct 01 15:42:41 crc kubenswrapper[4869]: I1001 15:42:41.889916 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/b61ff6db-ea0f-458d-a0cf-8f65e610a90e-inventory\") pod \"b61ff6db-ea0f-458d-a0cf-8f65e610a90e\" (UID: \"b61ff6db-ea0f-458d-a0cf-8f65e610a90e\") " Oct 01 15:42:41 crc kubenswrapper[4869]: I1001 15:42:41.889999 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/b61ff6db-ea0f-458d-a0cf-8f65e610a90e-ceph\") pod \"b61ff6db-ea0f-458d-a0cf-8f65e610a90e\" (UID: \"b61ff6db-ea0f-458d-a0cf-8f65e610a90e\") " Oct 01 15:42:41 crc kubenswrapper[4869]: I1001 15:42:41.890112 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/b61ff6db-ea0f-458d-a0cf-8f65e610a90e-ssh-key\") pod \"b61ff6db-ea0f-458d-a0cf-8f65e610a90e\" (UID: \"b61ff6db-ea0f-458d-a0cf-8f65e610a90e\") " Oct 01 15:42:41 crc kubenswrapper[4869]: I1001 15:42:41.890166 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cfp6p\" (UniqueName: \"kubernetes.io/projected/b61ff6db-ea0f-458d-a0cf-8f65e610a90e-kube-api-access-cfp6p\") pod \"b61ff6db-ea0f-458d-a0cf-8f65e610a90e\" (UID: \"b61ff6db-ea0f-458d-a0cf-8f65e610a90e\") " Oct 01 15:42:41 crc kubenswrapper[4869]: I1001 15:42:41.896620 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b61ff6db-ea0f-458d-a0cf-8f65e610a90e-kube-api-access-cfp6p" (OuterVolumeSpecName: "kube-api-access-cfp6p") pod "b61ff6db-ea0f-458d-a0cf-8f65e610a90e" (UID: "b61ff6db-ea0f-458d-a0cf-8f65e610a90e"). InnerVolumeSpecName "kube-api-access-cfp6p". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 15:42:41 crc kubenswrapper[4869]: I1001 15:42:41.896659 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b61ff6db-ea0f-458d-a0cf-8f65e610a90e-ceph" (OuterVolumeSpecName: "ceph") pod "b61ff6db-ea0f-458d-a0cf-8f65e610a90e" (UID: "b61ff6db-ea0f-458d-a0cf-8f65e610a90e"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 15:42:41 crc kubenswrapper[4869]: I1001 15:42:41.918317 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b61ff6db-ea0f-458d-a0cf-8f65e610a90e-inventory" (OuterVolumeSpecName: "inventory") pod "b61ff6db-ea0f-458d-a0cf-8f65e610a90e" (UID: "b61ff6db-ea0f-458d-a0cf-8f65e610a90e"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 15:42:41 crc kubenswrapper[4869]: I1001 15:42:41.922203 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b61ff6db-ea0f-458d-a0cf-8f65e610a90e-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "b61ff6db-ea0f-458d-a0cf-8f65e610a90e" (UID: "b61ff6db-ea0f-458d-a0cf-8f65e610a90e"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 15:42:41 crc kubenswrapper[4869]: I1001 15:42:41.997058 4869 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/b61ff6db-ea0f-458d-a0cf-8f65e610a90e-inventory\") on node \"crc\" DevicePath \"\"" Oct 01 15:42:41 crc kubenswrapper[4869]: I1001 15:42:41.997106 4869 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/b61ff6db-ea0f-458d-a0cf-8f65e610a90e-ceph\") on node \"crc\" DevicePath \"\"" Oct 01 15:42:41 crc kubenswrapper[4869]: I1001 15:42:41.997117 4869 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/b61ff6db-ea0f-458d-a0cf-8f65e610a90e-ssh-key\") on node \"crc\" DevicePath \"\"" Oct 01 15:42:41 crc kubenswrapper[4869]: I1001 15:42:41.997130 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cfp6p\" (UniqueName: \"kubernetes.io/projected/b61ff6db-ea0f-458d-a0cf-8f65e610a90e-kube-api-access-cfp6p\") on node \"crc\" DevicePath \"\"" Oct 01 15:42:42 crc kubenswrapper[4869]: I1001 15:42:42.227807 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-gv92z" event={"ID":"b61ff6db-ea0f-458d-a0cf-8f65e610a90e","Type":"ContainerDied","Data":"0c2b4de961ad994a1c4b72c4fb42c99b4c0adcb712cd38bf9e1b005ee42d5f26"} Oct 01 15:42:42 crc kubenswrapper[4869]: I1001 15:42:42.228145 4869 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="0c2b4de961ad994a1c4b72c4fb42c99b4c0adcb712cd38bf9e1b005ee42d5f26" Oct 01 15:42:42 crc kubenswrapper[4869]: I1001 15:42:42.227877 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-gv92z" Oct 01 15:42:42 crc kubenswrapper[4869]: I1001 15:42:42.289958 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-6l9z8"] Oct 01 15:42:42 crc kubenswrapper[4869]: E1001 15:42:42.290336 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b61ff6db-ea0f-458d-a0cf-8f65e610a90e" containerName="run-os-edpm-deployment-openstack-edpm-ipam" Oct 01 15:42:42 crc kubenswrapper[4869]: I1001 15:42:42.290354 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="b61ff6db-ea0f-458d-a0cf-8f65e610a90e" containerName="run-os-edpm-deployment-openstack-edpm-ipam" Oct 01 15:42:42 crc kubenswrapper[4869]: I1001 15:42:42.290525 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="b61ff6db-ea0f-458d-a0cf-8f65e610a90e" containerName="run-os-edpm-deployment-openstack-edpm-ipam" Oct 01 15:42:42 crc kubenswrapper[4869]: I1001 15:42:42.291101 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-6l9z8" Oct 01 15:42:42 crc kubenswrapper[4869]: I1001 15:42:42.293032 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Oct 01 15:42:42 crc kubenswrapper[4869]: I1001 15:42:42.293291 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Oct 01 15:42:42 crc kubenswrapper[4869]: I1001 15:42:42.293456 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Oct 01 15:42:42 crc kubenswrapper[4869]: I1001 15:42:42.294842 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-cjg8g" Oct 01 15:42:42 crc kubenswrapper[4869]: I1001 15:42:42.294858 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceph-conf-files" Oct 01 15:42:42 crc kubenswrapper[4869]: I1001 15:42:42.302632 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/e438986b-3250-42b7-a8d6-910aae80e576-inventory\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-6l9z8\" (UID: \"e438986b-3250-42b7-a8d6-910aae80e576\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-6l9z8" Oct 01 15:42:42 crc kubenswrapper[4869]: I1001 15:42:42.302695 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/e438986b-3250-42b7-a8d6-910aae80e576-ssh-key\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-6l9z8\" (UID: \"e438986b-3250-42b7-a8d6-910aae80e576\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-6l9z8" Oct 01 15:42:42 crc kubenswrapper[4869]: I1001 15:42:42.302835 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dm77s\" (UniqueName: \"kubernetes.io/projected/e438986b-3250-42b7-a8d6-910aae80e576-kube-api-access-dm77s\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-6l9z8\" (UID: \"e438986b-3250-42b7-a8d6-910aae80e576\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-6l9z8" Oct 01 15:42:42 crc kubenswrapper[4869]: I1001 15:42:42.302897 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/e438986b-3250-42b7-a8d6-910aae80e576-ceph\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-6l9z8\" (UID: \"e438986b-3250-42b7-a8d6-910aae80e576\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-6l9z8" Oct 01 15:42:42 crc kubenswrapper[4869]: I1001 15:42:42.305293 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-6l9z8"] Oct 01 15:42:42 crc kubenswrapper[4869]: I1001 15:42:42.404689 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dm77s\" (UniqueName: \"kubernetes.io/projected/e438986b-3250-42b7-a8d6-910aae80e576-kube-api-access-dm77s\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-6l9z8\" (UID: \"e438986b-3250-42b7-a8d6-910aae80e576\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-6l9z8" Oct 01 15:42:42 crc kubenswrapper[4869]: I1001 15:42:42.404756 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/e438986b-3250-42b7-a8d6-910aae80e576-ceph\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-6l9z8\" (UID: \"e438986b-3250-42b7-a8d6-910aae80e576\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-6l9z8" Oct 01 15:42:42 crc kubenswrapper[4869]: I1001 15:42:42.404874 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/e438986b-3250-42b7-a8d6-910aae80e576-inventory\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-6l9z8\" (UID: \"e438986b-3250-42b7-a8d6-910aae80e576\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-6l9z8" Oct 01 15:42:42 crc kubenswrapper[4869]: I1001 15:42:42.404902 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/e438986b-3250-42b7-a8d6-910aae80e576-ssh-key\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-6l9z8\" (UID: \"e438986b-3250-42b7-a8d6-910aae80e576\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-6l9z8" Oct 01 15:42:42 crc kubenswrapper[4869]: I1001 15:42:42.409562 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/e438986b-3250-42b7-a8d6-910aae80e576-ssh-key\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-6l9z8\" (UID: \"e438986b-3250-42b7-a8d6-910aae80e576\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-6l9z8" Oct 01 15:42:42 crc kubenswrapper[4869]: I1001 15:42:42.409631 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/e438986b-3250-42b7-a8d6-910aae80e576-ceph\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-6l9z8\" (UID: \"e438986b-3250-42b7-a8d6-910aae80e576\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-6l9z8" Oct 01 15:42:42 crc kubenswrapper[4869]: I1001 15:42:42.410043 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/e438986b-3250-42b7-a8d6-910aae80e576-inventory\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-6l9z8\" (UID: \"e438986b-3250-42b7-a8d6-910aae80e576\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-6l9z8" Oct 01 15:42:42 crc kubenswrapper[4869]: I1001 15:42:42.421724 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dm77s\" (UniqueName: \"kubernetes.io/projected/e438986b-3250-42b7-a8d6-910aae80e576-kube-api-access-dm77s\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-6l9z8\" (UID: \"e438986b-3250-42b7-a8d6-910aae80e576\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-6l9z8" Oct 01 15:42:42 crc kubenswrapper[4869]: I1001 15:42:42.608126 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-6l9z8" Oct 01 15:42:43 crc kubenswrapper[4869]: I1001 15:42:43.158045 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-6l9z8"] Oct 01 15:42:43 crc kubenswrapper[4869]: I1001 15:42:43.238003 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-6l9z8" event={"ID":"e438986b-3250-42b7-a8d6-910aae80e576","Type":"ContainerStarted","Data":"052ced0f5aed367b6f18c625ea1d04c5ba0d54d91d6ee47f1cf5f53eb1379214"} Oct 01 15:42:44 crc kubenswrapper[4869]: I1001 15:42:44.248032 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-6l9z8" event={"ID":"e438986b-3250-42b7-a8d6-910aae80e576","Type":"ContainerStarted","Data":"26e8d8a75b89990336b2c9bf5295b171ffcacd94511bab84d5064b7fa2cc8534"} Oct 01 15:42:44 crc kubenswrapper[4869]: I1001 15:42:44.271882 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-6l9z8" podStartSLOduration=1.776795124 podStartE2EDuration="2.271838032s" podCreationTimestamp="2025-10-01 15:42:42 +0000 UTC" firstStartedPulling="2025-10-01 15:42:43.158140702 +0000 UTC m=+2272.304983828" lastFinishedPulling="2025-10-01 15:42:43.65318359 +0000 UTC m=+2272.800026736" observedRunningTime="2025-10-01 15:42:44.263117083 +0000 UTC m=+2273.409960209" watchObservedRunningTime="2025-10-01 15:42:44.271838032 +0000 UTC m=+2273.418681158" Oct 01 15:42:48 crc kubenswrapper[4869]: I1001 15:42:48.581670 4869 scope.go:117] "RemoveContainer" containerID="850fa9657d55ab61ed48a042dba1eb165240f62a294e8f87d32c9a3206247a54" Oct 01 15:42:48 crc kubenswrapper[4869]: E1001 15:42:48.583086 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-c86m8_openshift-machine-config-operator(a4b64f7f-0b03-4f47-965b-9fde048b735c)\"" pod="openshift-machine-config-operator/machine-config-daemon-c86m8" podUID="a4b64f7f-0b03-4f47-965b-9fde048b735c" Oct 01 15:42:54 crc kubenswrapper[4869]: I1001 15:42:54.358327 4869 generic.go:334] "Generic (PLEG): container finished" podID="e438986b-3250-42b7-a8d6-910aae80e576" containerID="26e8d8a75b89990336b2c9bf5295b171ffcacd94511bab84d5064b7fa2cc8534" exitCode=0 Oct 01 15:42:54 crc kubenswrapper[4869]: I1001 15:42:54.358400 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-6l9z8" event={"ID":"e438986b-3250-42b7-a8d6-910aae80e576","Type":"ContainerDied","Data":"26e8d8a75b89990336b2c9bf5295b171ffcacd94511bab84d5064b7fa2cc8534"} Oct 01 15:42:55 crc kubenswrapper[4869]: I1001 15:42:55.796915 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-6l9z8" Oct 01 15:42:55 crc kubenswrapper[4869]: I1001 15:42:55.898781 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/e438986b-3250-42b7-a8d6-910aae80e576-inventory\") pod \"e438986b-3250-42b7-a8d6-910aae80e576\" (UID: \"e438986b-3250-42b7-a8d6-910aae80e576\") " Oct 01 15:42:55 crc kubenswrapper[4869]: I1001 15:42:55.898895 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/e438986b-3250-42b7-a8d6-910aae80e576-ceph\") pod \"e438986b-3250-42b7-a8d6-910aae80e576\" (UID: \"e438986b-3250-42b7-a8d6-910aae80e576\") " Oct 01 15:42:55 crc kubenswrapper[4869]: I1001 15:42:55.898989 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/e438986b-3250-42b7-a8d6-910aae80e576-ssh-key\") pod \"e438986b-3250-42b7-a8d6-910aae80e576\" (UID: \"e438986b-3250-42b7-a8d6-910aae80e576\") " Oct 01 15:42:55 crc kubenswrapper[4869]: I1001 15:42:55.899058 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dm77s\" (UniqueName: \"kubernetes.io/projected/e438986b-3250-42b7-a8d6-910aae80e576-kube-api-access-dm77s\") pod \"e438986b-3250-42b7-a8d6-910aae80e576\" (UID: \"e438986b-3250-42b7-a8d6-910aae80e576\") " Oct 01 15:42:55 crc kubenswrapper[4869]: I1001 15:42:55.906424 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e438986b-3250-42b7-a8d6-910aae80e576-ceph" (OuterVolumeSpecName: "ceph") pod "e438986b-3250-42b7-a8d6-910aae80e576" (UID: "e438986b-3250-42b7-a8d6-910aae80e576"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 15:42:55 crc kubenswrapper[4869]: I1001 15:42:55.906444 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e438986b-3250-42b7-a8d6-910aae80e576-kube-api-access-dm77s" (OuterVolumeSpecName: "kube-api-access-dm77s") pod "e438986b-3250-42b7-a8d6-910aae80e576" (UID: "e438986b-3250-42b7-a8d6-910aae80e576"). InnerVolumeSpecName "kube-api-access-dm77s". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 15:42:55 crc kubenswrapper[4869]: I1001 15:42:55.923908 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e438986b-3250-42b7-a8d6-910aae80e576-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "e438986b-3250-42b7-a8d6-910aae80e576" (UID: "e438986b-3250-42b7-a8d6-910aae80e576"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 15:42:55 crc kubenswrapper[4869]: I1001 15:42:55.924369 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e438986b-3250-42b7-a8d6-910aae80e576-inventory" (OuterVolumeSpecName: "inventory") pod "e438986b-3250-42b7-a8d6-910aae80e576" (UID: "e438986b-3250-42b7-a8d6-910aae80e576"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 15:42:56 crc kubenswrapper[4869]: I1001 15:42:56.002055 4869 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/e438986b-3250-42b7-a8d6-910aae80e576-inventory\") on node \"crc\" DevicePath \"\"" Oct 01 15:42:56 crc kubenswrapper[4869]: I1001 15:42:56.002442 4869 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/e438986b-3250-42b7-a8d6-910aae80e576-ceph\") on node \"crc\" DevicePath \"\"" Oct 01 15:42:56 crc kubenswrapper[4869]: I1001 15:42:56.002537 4869 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/e438986b-3250-42b7-a8d6-910aae80e576-ssh-key\") on node \"crc\" DevicePath \"\"" Oct 01 15:42:56 crc kubenswrapper[4869]: I1001 15:42:56.002620 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dm77s\" (UniqueName: \"kubernetes.io/projected/e438986b-3250-42b7-a8d6-910aae80e576-kube-api-access-dm77s\") on node \"crc\" DevicePath \"\"" Oct 01 15:42:56 crc kubenswrapper[4869]: I1001 15:42:56.382373 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-6l9z8" event={"ID":"e438986b-3250-42b7-a8d6-910aae80e576","Type":"ContainerDied","Data":"052ced0f5aed367b6f18c625ea1d04c5ba0d54d91d6ee47f1cf5f53eb1379214"} Oct 01 15:42:56 crc kubenswrapper[4869]: I1001 15:42:56.382628 4869 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="052ced0f5aed367b6f18c625ea1d04c5ba0d54d91d6ee47f1cf5f53eb1379214" Oct 01 15:42:56 crc kubenswrapper[4869]: I1001 15:42:56.382508 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-6l9z8" Oct 01 15:42:56 crc kubenswrapper[4869]: I1001 15:42:56.470494 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/install-certs-edpm-deployment-openstack-edpm-ipam-d9zw8"] Oct 01 15:42:56 crc kubenswrapper[4869]: E1001 15:42:56.470957 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e438986b-3250-42b7-a8d6-910aae80e576" containerName="reboot-os-edpm-deployment-openstack-edpm-ipam" Oct 01 15:42:56 crc kubenswrapper[4869]: I1001 15:42:56.470981 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="e438986b-3250-42b7-a8d6-910aae80e576" containerName="reboot-os-edpm-deployment-openstack-edpm-ipam" Oct 01 15:42:56 crc kubenswrapper[4869]: I1001 15:42:56.471224 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="e438986b-3250-42b7-a8d6-910aae80e576" containerName="reboot-os-edpm-deployment-openstack-edpm-ipam" Oct 01 15:42:56 crc kubenswrapper[4869]: I1001 15:42:56.471967 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-d9zw8" Oct 01 15:42:56 crc kubenswrapper[4869]: I1001 15:42:56.474600 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Oct 01 15:42:56 crc kubenswrapper[4869]: I1001 15:42:56.474804 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-libvirt-default-certs-0" Oct 01 15:42:56 crc kubenswrapper[4869]: I1001 15:42:56.474807 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Oct 01 15:42:56 crc kubenswrapper[4869]: I1001 15:42:56.474601 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-cjg8g" Oct 01 15:42:56 crc kubenswrapper[4869]: I1001 15:42:56.474991 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceph-conf-files" Oct 01 15:42:56 crc kubenswrapper[4869]: I1001 15:42:56.475077 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-ovn-default-certs-0" Oct 01 15:42:56 crc kubenswrapper[4869]: I1001 15:42:56.475109 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-neutron-metadata-default-certs-0" Oct 01 15:42:56 crc kubenswrapper[4869]: I1001 15:42:56.475279 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Oct 01 15:42:56 crc kubenswrapper[4869]: I1001 15:42:56.487160 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/install-certs-edpm-deployment-openstack-edpm-ipam-d9zw8"] Oct 01 15:42:56 crc kubenswrapper[4869]: I1001 15:42:56.612623 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/ae422061-7694-44f9-a6ea-d6ce97da502d-ssh-key\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-d9zw8\" (UID: \"ae422061-7694-44f9-a6ea-d6ce97da502d\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-d9zw8" Oct 01 15:42:56 crc kubenswrapper[4869]: I1001 15:42:56.612683 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/ae422061-7694-44f9-a6ea-d6ce97da502d-inventory\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-d9zw8\" (UID: \"ae422061-7694-44f9-a6ea-d6ce97da502d\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-d9zw8" Oct 01 15:42:56 crc kubenswrapper[4869]: I1001 15:42:56.612720 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ae422061-7694-44f9-a6ea-d6ce97da502d-ovn-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-d9zw8\" (UID: \"ae422061-7694-44f9-a6ea-d6ce97da502d\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-d9zw8" Oct 01 15:42:56 crc kubenswrapper[4869]: I1001 15:42:56.613407 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/ae422061-7694-44f9-a6ea-d6ce97da502d-ceph\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-d9zw8\" (UID: \"ae422061-7694-44f9-a6ea-d6ce97da502d\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-d9zw8" Oct 01 15:42:56 crc kubenswrapper[4869]: I1001 15:42:56.613520 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ae422061-7694-44f9-a6ea-d6ce97da502d-libvirt-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-d9zw8\" (UID: \"ae422061-7694-44f9-a6ea-d6ce97da502d\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-d9zw8" Oct 01 15:42:56 crc kubenswrapper[4869]: I1001 15:42:56.613660 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-edpm-ipam-libvirt-default-certs-0\" (UniqueName: \"kubernetes.io/projected/ae422061-7694-44f9-a6ea-d6ce97da502d-openstack-edpm-ipam-libvirt-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-d9zw8\" (UID: \"ae422061-7694-44f9-a6ea-d6ce97da502d\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-d9zw8" Oct 01 15:42:56 crc kubenswrapper[4869]: I1001 15:42:56.613816 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8r67t\" (UniqueName: \"kubernetes.io/projected/ae422061-7694-44f9-a6ea-d6ce97da502d-kube-api-access-8r67t\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-d9zw8\" (UID: \"ae422061-7694-44f9-a6ea-d6ce97da502d\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-d9zw8" Oct 01 15:42:56 crc kubenswrapper[4869]: I1001 15:42:56.614164 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ae422061-7694-44f9-a6ea-d6ce97da502d-nova-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-d9zw8\" (UID: \"ae422061-7694-44f9-a6ea-d6ce97da502d\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-d9zw8" Oct 01 15:42:56 crc kubenswrapper[4869]: I1001 15:42:56.614508 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ae422061-7694-44f9-a6ea-d6ce97da502d-repo-setup-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-d9zw8\" (UID: \"ae422061-7694-44f9-a6ea-d6ce97da502d\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-d9zw8" Oct 01 15:42:56 crc kubenswrapper[4869]: I1001 15:42:56.614631 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-edpm-ipam-neutron-metadata-default-certs-0\" (UniqueName: \"kubernetes.io/projected/ae422061-7694-44f9-a6ea-d6ce97da502d-openstack-edpm-ipam-neutron-metadata-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-d9zw8\" (UID: \"ae422061-7694-44f9-a6ea-d6ce97da502d\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-d9zw8" Oct 01 15:42:56 crc kubenswrapper[4869]: I1001 15:42:56.614693 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-edpm-ipam-ovn-default-certs-0\" (UniqueName: \"kubernetes.io/projected/ae422061-7694-44f9-a6ea-d6ce97da502d-openstack-edpm-ipam-ovn-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-d9zw8\" (UID: \"ae422061-7694-44f9-a6ea-d6ce97da502d\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-d9zw8" Oct 01 15:42:56 crc kubenswrapper[4869]: I1001 15:42:56.614792 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ae422061-7694-44f9-a6ea-d6ce97da502d-bootstrap-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-d9zw8\" (UID: \"ae422061-7694-44f9-a6ea-d6ce97da502d\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-d9zw8" Oct 01 15:42:56 crc kubenswrapper[4869]: I1001 15:42:56.614848 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ae422061-7694-44f9-a6ea-d6ce97da502d-neutron-metadata-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-d9zw8\" (UID: \"ae422061-7694-44f9-a6ea-d6ce97da502d\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-d9zw8" Oct 01 15:42:56 crc kubenswrapper[4869]: I1001 15:42:56.716419 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/ae422061-7694-44f9-a6ea-d6ce97da502d-ceph\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-d9zw8\" (UID: \"ae422061-7694-44f9-a6ea-d6ce97da502d\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-d9zw8" Oct 01 15:42:56 crc kubenswrapper[4869]: I1001 15:42:56.716546 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ae422061-7694-44f9-a6ea-d6ce97da502d-libvirt-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-d9zw8\" (UID: \"ae422061-7694-44f9-a6ea-d6ce97da502d\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-d9zw8" Oct 01 15:42:56 crc kubenswrapper[4869]: I1001 15:42:56.716687 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-edpm-ipam-libvirt-default-certs-0\" (UniqueName: \"kubernetes.io/projected/ae422061-7694-44f9-a6ea-d6ce97da502d-openstack-edpm-ipam-libvirt-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-d9zw8\" (UID: \"ae422061-7694-44f9-a6ea-d6ce97da502d\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-d9zw8" Oct 01 15:42:56 crc kubenswrapper[4869]: I1001 15:42:56.716747 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8r67t\" (UniqueName: \"kubernetes.io/projected/ae422061-7694-44f9-a6ea-d6ce97da502d-kube-api-access-8r67t\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-d9zw8\" (UID: \"ae422061-7694-44f9-a6ea-d6ce97da502d\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-d9zw8" Oct 01 15:42:56 crc kubenswrapper[4869]: I1001 15:42:56.716785 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ae422061-7694-44f9-a6ea-d6ce97da502d-nova-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-d9zw8\" (UID: \"ae422061-7694-44f9-a6ea-d6ce97da502d\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-d9zw8" Oct 01 15:42:56 crc kubenswrapper[4869]: I1001 15:42:56.716831 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ae422061-7694-44f9-a6ea-d6ce97da502d-repo-setup-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-d9zw8\" (UID: \"ae422061-7694-44f9-a6ea-d6ce97da502d\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-d9zw8" Oct 01 15:42:56 crc kubenswrapper[4869]: I1001 15:42:56.716889 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-edpm-ipam-neutron-metadata-default-certs-0\" (UniqueName: \"kubernetes.io/projected/ae422061-7694-44f9-a6ea-d6ce97da502d-openstack-edpm-ipam-neutron-metadata-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-d9zw8\" (UID: \"ae422061-7694-44f9-a6ea-d6ce97da502d\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-d9zw8" Oct 01 15:42:56 crc kubenswrapper[4869]: I1001 15:42:56.716939 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-edpm-ipam-ovn-default-certs-0\" (UniqueName: \"kubernetes.io/projected/ae422061-7694-44f9-a6ea-d6ce97da502d-openstack-edpm-ipam-ovn-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-d9zw8\" (UID: \"ae422061-7694-44f9-a6ea-d6ce97da502d\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-d9zw8" Oct 01 15:42:56 crc kubenswrapper[4869]: I1001 15:42:56.716983 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ae422061-7694-44f9-a6ea-d6ce97da502d-bootstrap-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-d9zw8\" (UID: \"ae422061-7694-44f9-a6ea-d6ce97da502d\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-d9zw8" Oct 01 15:42:56 crc kubenswrapper[4869]: I1001 15:42:56.717038 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ae422061-7694-44f9-a6ea-d6ce97da502d-neutron-metadata-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-d9zw8\" (UID: \"ae422061-7694-44f9-a6ea-d6ce97da502d\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-d9zw8" Oct 01 15:42:56 crc kubenswrapper[4869]: I1001 15:42:56.717149 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/ae422061-7694-44f9-a6ea-d6ce97da502d-ssh-key\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-d9zw8\" (UID: \"ae422061-7694-44f9-a6ea-d6ce97da502d\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-d9zw8" Oct 01 15:42:56 crc kubenswrapper[4869]: I1001 15:42:56.717187 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/ae422061-7694-44f9-a6ea-d6ce97da502d-inventory\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-d9zw8\" (UID: \"ae422061-7694-44f9-a6ea-d6ce97da502d\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-d9zw8" Oct 01 15:42:56 crc kubenswrapper[4869]: I1001 15:42:56.717218 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ae422061-7694-44f9-a6ea-d6ce97da502d-ovn-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-d9zw8\" (UID: \"ae422061-7694-44f9-a6ea-d6ce97da502d\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-d9zw8" Oct 01 15:42:56 crc kubenswrapper[4869]: I1001 15:42:56.722129 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-edpm-ipam-neutron-metadata-default-certs-0\" (UniqueName: \"kubernetes.io/projected/ae422061-7694-44f9-a6ea-d6ce97da502d-openstack-edpm-ipam-neutron-metadata-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-d9zw8\" (UID: \"ae422061-7694-44f9-a6ea-d6ce97da502d\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-d9zw8" Oct 01 15:42:56 crc kubenswrapper[4869]: I1001 15:42:56.722905 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-edpm-ipam-ovn-default-certs-0\" (UniqueName: \"kubernetes.io/projected/ae422061-7694-44f9-a6ea-d6ce97da502d-openstack-edpm-ipam-ovn-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-d9zw8\" (UID: \"ae422061-7694-44f9-a6ea-d6ce97da502d\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-d9zw8" Oct 01 15:42:56 crc kubenswrapper[4869]: I1001 15:42:56.724077 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/ae422061-7694-44f9-a6ea-d6ce97da502d-inventory\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-d9zw8\" (UID: \"ae422061-7694-44f9-a6ea-d6ce97da502d\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-d9zw8" Oct 01 15:42:56 crc kubenswrapper[4869]: I1001 15:42:56.724732 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ae422061-7694-44f9-a6ea-d6ce97da502d-ovn-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-d9zw8\" (UID: \"ae422061-7694-44f9-a6ea-d6ce97da502d\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-d9zw8" Oct 01 15:42:56 crc kubenswrapper[4869]: I1001 15:42:56.725169 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ae422061-7694-44f9-a6ea-d6ce97da502d-neutron-metadata-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-d9zw8\" (UID: \"ae422061-7694-44f9-a6ea-d6ce97da502d\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-d9zw8" Oct 01 15:42:56 crc kubenswrapper[4869]: I1001 15:42:56.726509 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/ae422061-7694-44f9-a6ea-d6ce97da502d-ceph\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-d9zw8\" (UID: \"ae422061-7694-44f9-a6ea-d6ce97da502d\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-d9zw8" Oct 01 15:42:56 crc kubenswrapper[4869]: I1001 15:42:56.726552 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ae422061-7694-44f9-a6ea-d6ce97da502d-libvirt-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-d9zw8\" (UID: \"ae422061-7694-44f9-a6ea-d6ce97da502d\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-d9zw8" Oct 01 15:42:56 crc kubenswrapper[4869]: I1001 15:42:56.727451 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ae422061-7694-44f9-a6ea-d6ce97da502d-repo-setup-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-d9zw8\" (UID: \"ae422061-7694-44f9-a6ea-d6ce97da502d\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-d9zw8" Oct 01 15:42:56 crc kubenswrapper[4869]: I1001 15:42:56.727524 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ae422061-7694-44f9-a6ea-d6ce97da502d-nova-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-d9zw8\" (UID: \"ae422061-7694-44f9-a6ea-d6ce97da502d\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-d9zw8" Oct 01 15:42:56 crc kubenswrapper[4869]: I1001 15:42:56.727983 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-edpm-ipam-libvirt-default-certs-0\" (UniqueName: \"kubernetes.io/projected/ae422061-7694-44f9-a6ea-d6ce97da502d-openstack-edpm-ipam-libvirt-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-d9zw8\" (UID: \"ae422061-7694-44f9-a6ea-d6ce97da502d\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-d9zw8" Oct 01 15:42:56 crc kubenswrapper[4869]: I1001 15:42:56.729323 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ae422061-7694-44f9-a6ea-d6ce97da502d-bootstrap-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-d9zw8\" (UID: \"ae422061-7694-44f9-a6ea-d6ce97da502d\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-d9zw8" Oct 01 15:42:56 crc kubenswrapper[4869]: I1001 15:42:56.736300 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/ae422061-7694-44f9-a6ea-d6ce97da502d-ssh-key\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-d9zw8\" (UID: \"ae422061-7694-44f9-a6ea-d6ce97da502d\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-d9zw8" Oct 01 15:42:56 crc kubenswrapper[4869]: I1001 15:42:56.741487 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8r67t\" (UniqueName: \"kubernetes.io/projected/ae422061-7694-44f9-a6ea-d6ce97da502d-kube-api-access-8r67t\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-d9zw8\" (UID: \"ae422061-7694-44f9-a6ea-d6ce97da502d\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-d9zw8" Oct 01 15:42:56 crc kubenswrapper[4869]: I1001 15:42:56.794337 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-d9zw8" Oct 01 15:42:57 crc kubenswrapper[4869]: I1001 15:42:57.405034 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/install-certs-edpm-deployment-openstack-edpm-ipam-d9zw8"] Oct 01 15:42:57 crc kubenswrapper[4869]: I1001 15:42:57.416899 4869 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Oct 01 15:42:58 crc kubenswrapper[4869]: I1001 15:42:58.415744 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-d9zw8" event={"ID":"ae422061-7694-44f9-a6ea-d6ce97da502d","Type":"ContainerStarted","Data":"5d40af18b81ebccd22918df9f8e2275d2d66fd4f10bf06308b97d7963a2ec05d"} Oct 01 15:42:59 crc kubenswrapper[4869]: I1001 15:42:59.427246 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-d9zw8" event={"ID":"ae422061-7694-44f9-a6ea-d6ce97da502d","Type":"ContainerStarted","Data":"66e5a21eaa145f000c90ff9a0cce71bdecaf787d92e48c334df28bcbb25c21f9"} Oct 01 15:42:59 crc kubenswrapper[4869]: I1001 15:42:59.452158 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-d9zw8" podStartSLOduration=2.398889049 podStartE2EDuration="3.452126137s" podCreationTimestamp="2025-10-01 15:42:56 +0000 UTC" firstStartedPulling="2025-10-01 15:42:57.416495305 +0000 UTC m=+2286.563338441" lastFinishedPulling="2025-10-01 15:42:58.469732363 +0000 UTC m=+2287.616575529" observedRunningTime="2025-10-01 15:42:59.446780683 +0000 UTC m=+2288.593623879" watchObservedRunningTime="2025-10-01 15:42:59.452126137 +0000 UTC m=+2288.598969283" Oct 01 15:42:59 crc kubenswrapper[4869]: I1001 15:42:59.591744 4869 scope.go:117] "RemoveContainer" containerID="850fa9657d55ab61ed48a042dba1eb165240f62a294e8f87d32c9a3206247a54" Oct 01 15:42:59 crc kubenswrapper[4869]: E1001 15:42:59.592221 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-c86m8_openshift-machine-config-operator(a4b64f7f-0b03-4f47-965b-9fde048b735c)\"" pod="openshift-machine-config-operator/machine-config-daemon-c86m8" podUID="a4b64f7f-0b03-4f47-965b-9fde048b735c" Oct 01 15:43:14 crc kubenswrapper[4869]: I1001 15:43:14.581901 4869 scope.go:117] "RemoveContainer" containerID="850fa9657d55ab61ed48a042dba1eb165240f62a294e8f87d32c9a3206247a54" Oct 01 15:43:14 crc kubenswrapper[4869]: E1001 15:43:14.582916 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-c86m8_openshift-machine-config-operator(a4b64f7f-0b03-4f47-965b-9fde048b735c)\"" pod="openshift-machine-config-operator/machine-config-daemon-c86m8" podUID="a4b64f7f-0b03-4f47-965b-9fde048b735c" Oct 01 15:43:15 crc kubenswrapper[4869]: I1001 15:43:15.521285 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-j7c5q"] Oct 01 15:43:15 crc kubenswrapper[4869]: I1001 15:43:15.523570 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-j7c5q" Oct 01 15:43:15 crc kubenswrapper[4869]: I1001 15:43:15.545376 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-j7c5q"] Oct 01 15:43:15 crc kubenswrapper[4869]: I1001 15:43:15.695016 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6wvvt\" (UniqueName: \"kubernetes.io/projected/ce90130d-e4b0-42cd-bbc0-12a161b6dec1-kube-api-access-6wvvt\") pod \"redhat-operators-j7c5q\" (UID: \"ce90130d-e4b0-42cd-bbc0-12a161b6dec1\") " pod="openshift-marketplace/redhat-operators-j7c5q" Oct 01 15:43:15 crc kubenswrapper[4869]: I1001 15:43:15.695135 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ce90130d-e4b0-42cd-bbc0-12a161b6dec1-catalog-content\") pod \"redhat-operators-j7c5q\" (UID: \"ce90130d-e4b0-42cd-bbc0-12a161b6dec1\") " pod="openshift-marketplace/redhat-operators-j7c5q" Oct 01 15:43:15 crc kubenswrapper[4869]: I1001 15:43:15.695464 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ce90130d-e4b0-42cd-bbc0-12a161b6dec1-utilities\") pod \"redhat-operators-j7c5q\" (UID: \"ce90130d-e4b0-42cd-bbc0-12a161b6dec1\") " pod="openshift-marketplace/redhat-operators-j7c5q" Oct 01 15:43:15 crc kubenswrapper[4869]: I1001 15:43:15.796998 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ce90130d-e4b0-42cd-bbc0-12a161b6dec1-catalog-content\") pod \"redhat-operators-j7c5q\" (UID: \"ce90130d-e4b0-42cd-bbc0-12a161b6dec1\") " pod="openshift-marketplace/redhat-operators-j7c5q" Oct 01 15:43:15 crc kubenswrapper[4869]: I1001 15:43:15.797216 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ce90130d-e4b0-42cd-bbc0-12a161b6dec1-utilities\") pod \"redhat-operators-j7c5q\" (UID: \"ce90130d-e4b0-42cd-bbc0-12a161b6dec1\") " pod="openshift-marketplace/redhat-operators-j7c5q" Oct 01 15:43:15 crc kubenswrapper[4869]: I1001 15:43:15.797392 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6wvvt\" (UniqueName: \"kubernetes.io/projected/ce90130d-e4b0-42cd-bbc0-12a161b6dec1-kube-api-access-6wvvt\") pod \"redhat-operators-j7c5q\" (UID: \"ce90130d-e4b0-42cd-bbc0-12a161b6dec1\") " pod="openshift-marketplace/redhat-operators-j7c5q" Oct 01 15:43:15 crc kubenswrapper[4869]: I1001 15:43:15.797564 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ce90130d-e4b0-42cd-bbc0-12a161b6dec1-catalog-content\") pod \"redhat-operators-j7c5q\" (UID: \"ce90130d-e4b0-42cd-bbc0-12a161b6dec1\") " pod="openshift-marketplace/redhat-operators-j7c5q" Oct 01 15:43:15 crc kubenswrapper[4869]: I1001 15:43:15.797736 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ce90130d-e4b0-42cd-bbc0-12a161b6dec1-utilities\") pod \"redhat-operators-j7c5q\" (UID: \"ce90130d-e4b0-42cd-bbc0-12a161b6dec1\") " pod="openshift-marketplace/redhat-operators-j7c5q" Oct 01 15:43:15 crc kubenswrapper[4869]: I1001 15:43:15.823932 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6wvvt\" (UniqueName: \"kubernetes.io/projected/ce90130d-e4b0-42cd-bbc0-12a161b6dec1-kube-api-access-6wvvt\") pod \"redhat-operators-j7c5q\" (UID: \"ce90130d-e4b0-42cd-bbc0-12a161b6dec1\") " pod="openshift-marketplace/redhat-operators-j7c5q" Oct 01 15:43:15 crc kubenswrapper[4869]: I1001 15:43:15.847905 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-j7c5q" Oct 01 15:43:16 crc kubenswrapper[4869]: I1001 15:43:16.319089 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-j7c5q"] Oct 01 15:43:16 crc kubenswrapper[4869]: I1001 15:43:16.619293 4869 generic.go:334] "Generic (PLEG): container finished" podID="ce90130d-e4b0-42cd-bbc0-12a161b6dec1" containerID="ec24266605b970c5e7f12fb5e5ef4bd4e27158d860de37ab45dd4c57081b1ba0" exitCode=0 Oct 01 15:43:16 crc kubenswrapper[4869]: I1001 15:43:16.619339 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-j7c5q" event={"ID":"ce90130d-e4b0-42cd-bbc0-12a161b6dec1","Type":"ContainerDied","Data":"ec24266605b970c5e7f12fb5e5ef4bd4e27158d860de37ab45dd4c57081b1ba0"} Oct 01 15:43:16 crc kubenswrapper[4869]: I1001 15:43:16.619363 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-j7c5q" event={"ID":"ce90130d-e4b0-42cd-bbc0-12a161b6dec1","Type":"ContainerStarted","Data":"0900cd9d5602a8eb97266d206c623122e7be6a9fd52949be849fca3bd6c1280c"} Oct 01 15:43:21 crc kubenswrapper[4869]: I1001 15:43:21.681492 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-j7c5q" event={"ID":"ce90130d-e4b0-42cd-bbc0-12a161b6dec1","Type":"ContainerStarted","Data":"b5e42215fc6e29bf19eb3820874711d8db25f71287b4fa992ecaa36391e969f3"} Oct 01 15:43:22 crc kubenswrapper[4869]: I1001 15:43:22.693749 4869 generic.go:334] "Generic (PLEG): container finished" podID="ce90130d-e4b0-42cd-bbc0-12a161b6dec1" containerID="b5e42215fc6e29bf19eb3820874711d8db25f71287b4fa992ecaa36391e969f3" exitCode=0 Oct 01 15:43:22 crc kubenswrapper[4869]: I1001 15:43:22.693795 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-j7c5q" event={"ID":"ce90130d-e4b0-42cd-bbc0-12a161b6dec1","Type":"ContainerDied","Data":"b5e42215fc6e29bf19eb3820874711d8db25f71287b4fa992ecaa36391e969f3"} Oct 01 15:43:23 crc kubenswrapper[4869]: I1001 15:43:23.703191 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-j7c5q" event={"ID":"ce90130d-e4b0-42cd-bbc0-12a161b6dec1","Type":"ContainerStarted","Data":"1ccc91f4108c5902856954a303dbba99cde15ff7137d2a3243808476c2672b7d"} Oct 01 15:43:23 crc kubenswrapper[4869]: I1001 15:43:23.725617 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-j7c5q" podStartSLOduration=2.039163842 podStartE2EDuration="8.725594601s" podCreationTimestamp="2025-10-01 15:43:15 +0000 UTC" firstStartedPulling="2025-10-01 15:43:16.621188784 +0000 UTC m=+2305.768031900" lastFinishedPulling="2025-10-01 15:43:23.307619533 +0000 UTC m=+2312.454462659" observedRunningTime="2025-10-01 15:43:23.720449381 +0000 UTC m=+2312.867292507" watchObservedRunningTime="2025-10-01 15:43:23.725594601 +0000 UTC m=+2312.872437717" Oct 01 15:43:25 crc kubenswrapper[4869]: I1001 15:43:25.582503 4869 scope.go:117] "RemoveContainer" containerID="850fa9657d55ab61ed48a042dba1eb165240f62a294e8f87d32c9a3206247a54" Oct 01 15:43:25 crc kubenswrapper[4869]: E1001 15:43:25.582918 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-c86m8_openshift-machine-config-operator(a4b64f7f-0b03-4f47-965b-9fde048b735c)\"" pod="openshift-machine-config-operator/machine-config-daemon-c86m8" podUID="a4b64f7f-0b03-4f47-965b-9fde048b735c" Oct 01 15:43:25 crc kubenswrapper[4869]: I1001 15:43:25.848101 4869 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-j7c5q" Oct 01 15:43:25 crc kubenswrapper[4869]: I1001 15:43:25.848165 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-j7c5q" Oct 01 15:43:26 crc kubenswrapper[4869]: I1001 15:43:26.896686 4869 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-j7c5q" podUID="ce90130d-e4b0-42cd-bbc0-12a161b6dec1" containerName="registry-server" probeResult="failure" output=< Oct 01 15:43:26 crc kubenswrapper[4869]: timeout: failed to connect service ":50051" within 1s Oct 01 15:43:26 crc kubenswrapper[4869]: > Oct 01 15:43:30 crc kubenswrapper[4869]: I1001 15:43:30.769363 4869 generic.go:334] "Generic (PLEG): container finished" podID="ae422061-7694-44f9-a6ea-d6ce97da502d" containerID="66e5a21eaa145f000c90ff9a0cce71bdecaf787d92e48c334df28bcbb25c21f9" exitCode=0 Oct 01 15:43:30 crc kubenswrapper[4869]: I1001 15:43:30.769430 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-d9zw8" event={"ID":"ae422061-7694-44f9-a6ea-d6ce97da502d","Type":"ContainerDied","Data":"66e5a21eaa145f000c90ff9a0cce71bdecaf787d92e48c334df28bcbb25c21f9"} Oct 01 15:43:32 crc kubenswrapper[4869]: I1001 15:43:32.227787 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-d9zw8" Oct 01 15:43:32 crc kubenswrapper[4869]: I1001 15:43:32.348809 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ae422061-7694-44f9-a6ea-d6ce97da502d-bootstrap-combined-ca-bundle\") pod \"ae422061-7694-44f9-a6ea-d6ce97da502d\" (UID: \"ae422061-7694-44f9-a6ea-d6ce97da502d\") " Oct 01 15:43:32 crc kubenswrapper[4869]: I1001 15:43:32.348974 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ae422061-7694-44f9-a6ea-d6ce97da502d-neutron-metadata-combined-ca-bundle\") pod \"ae422061-7694-44f9-a6ea-d6ce97da502d\" (UID: \"ae422061-7694-44f9-a6ea-d6ce97da502d\") " Oct 01 15:43:32 crc kubenswrapper[4869]: I1001 15:43:32.349027 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/ae422061-7694-44f9-a6ea-d6ce97da502d-ceph\") pod \"ae422061-7694-44f9-a6ea-d6ce97da502d\" (UID: \"ae422061-7694-44f9-a6ea-d6ce97da502d\") " Oct 01 15:43:32 crc kubenswrapper[4869]: I1001 15:43:32.349092 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ae422061-7694-44f9-a6ea-d6ce97da502d-nova-combined-ca-bundle\") pod \"ae422061-7694-44f9-a6ea-d6ce97da502d\" (UID: \"ae422061-7694-44f9-a6ea-d6ce97da502d\") " Oct 01 15:43:32 crc kubenswrapper[4869]: I1001 15:43:32.349123 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ae422061-7694-44f9-a6ea-d6ce97da502d-ovn-combined-ca-bundle\") pod \"ae422061-7694-44f9-a6ea-d6ce97da502d\" (UID: \"ae422061-7694-44f9-a6ea-d6ce97da502d\") " Oct 01 15:43:32 crc kubenswrapper[4869]: I1001 15:43:32.349194 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/ae422061-7694-44f9-a6ea-d6ce97da502d-inventory\") pod \"ae422061-7694-44f9-a6ea-d6ce97da502d\" (UID: \"ae422061-7694-44f9-a6ea-d6ce97da502d\") " Oct 01 15:43:32 crc kubenswrapper[4869]: I1001 15:43:32.349230 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-edpm-ipam-neutron-metadata-default-certs-0\" (UniqueName: \"kubernetes.io/projected/ae422061-7694-44f9-a6ea-d6ce97da502d-openstack-edpm-ipam-neutron-metadata-default-certs-0\") pod \"ae422061-7694-44f9-a6ea-d6ce97da502d\" (UID: \"ae422061-7694-44f9-a6ea-d6ce97da502d\") " Oct 01 15:43:32 crc kubenswrapper[4869]: I1001 15:43:32.349311 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/ae422061-7694-44f9-a6ea-d6ce97da502d-ssh-key\") pod \"ae422061-7694-44f9-a6ea-d6ce97da502d\" (UID: \"ae422061-7694-44f9-a6ea-d6ce97da502d\") " Oct 01 15:43:32 crc kubenswrapper[4869]: I1001 15:43:32.349368 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-edpm-ipam-ovn-default-certs-0\" (UniqueName: \"kubernetes.io/projected/ae422061-7694-44f9-a6ea-d6ce97da502d-openstack-edpm-ipam-ovn-default-certs-0\") pod \"ae422061-7694-44f9-a6ea-d6ce97da502d\" (UID: \"ae422061-7694-44f9-a6ea-d6ce97da502d\") " Oct 01 15:43:32 crc kubenswrapper[4869]: I1001 15:43:32.349404 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-edpm-ipam-libvirt-default-certs-0\" (UniqueName: \"kubernetes.io/projected/ae422061-7694-44f9-a6ea-d6ce97da502d-openstack-edpm-ipam-libvirt-default-certs-0\") pod \"ae422061-7694-44f9-a6ea-d6ce97da502d\" (UID: \"ae422061-7694-44f9-a6ea-d6ce97da502d\") " Oct 01 15:43:32 crc kubenswrapper[4869]: I1001 15:43:32.349463 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8r67t\" (UniqueName: \"kubernetes.io/projected/ae422061-7694-44f9-a6ea-d6ce97da502d-kube-api-access-8r67t\") pod \"ae422061-7694-44f9-a6ea-d6ce97da502d\" (UID: \"ae422061-7694-44f9-a6ea-d6ce97da502d\") " Oct 01 15:43:32 crc kubenswrapper[4869]: I1001 15:43:32.349492 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ae422061-7694-44f9-a6ea-d6ce97da502d-repo-setup-combined-ca-bundle\") pod \"ae422061-7694-44f9-a6ea-d6ce97da502d\" (UID: \"ae422061-7694-44f9-a6ea-d6ce97da502d\") " Oct 01 15:43:32 crc kubenswrapper[4869]: I1001 15:43:32.349571 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ae422061-7694-44f9-a6ea-d6ce97da502d-libvirt-combined-ca-bundle\") pod \"ae422061-7694-44f9-a6ea-d6ce97da502d\" (UID: \"ae422061-7694-44f9-a6ea-d6ce97da502d\") " Oct 01 15:43:32 crc kubenswrapper[4869]: I1001 15:43:32.355790 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ae422061-7694-44f9-a6ea-d6ce97da502d-bootstrap-combined-ca-bundle" (OuterVolumeSpecName: "bootstrap-combined-ca-bundle") pod "ae422061-7694-44f9-a6ea-d6ce97da502d" (UID: "ae422061-7694-44f9-a6ea-d6ce97da502d"). InnerVolumeSpecName "bootstrap-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 15:43:32 crc kubenswrapper[4869]: I1001 15:43:32.357037 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ae422061-7694-44f9-a6ea-d6ce97da502d-repo-setup-combined-ca-bundle" (OuterVolumeSpecName: "repo-setup-combined-ca-bundle") pod "ae422061-7694-44f9-a6ea-d6ce97da502d" (UID: "ae422061-7694-44f9-a6ea-d6ce97da502d"). InnerVolumeSpecName "repo-setup-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 15:43:32 crc kubenswrapper[4869]: I1001 15:43:32.357075 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ae422061-7694-44f9-a6ea-d6ce97da502d-openstack-edpm-ipam-neutron-metadata-default-certs-0" (OuterVolumeSpecName: "openstack-edpm-ipam-neutron-metadata-default-certs-0") pod "ae422061-7694-44f9-a6ea-d6ce97da502d" (UID: "ae422061-7694-44f9-a6ea-d6ce97da502d"). InnerVolumeSpecName "openstack-edpm-ipam-neutron-metadata-default-certs-0". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 15:43:32 crc kubenswrapper[4869]: I1001 15:43:32.357056 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ae422061-7694-44f9-a6ea-d6ce97da502d-kube-api-access-8r67t" (OuterVolumeSpecName: "kube-api-access-8r67t") pod "ae422061-7694-44f9-a6ea-d6ce97da502d" (UID: "ae422061-7694-44f9-a6ea-d6ce97da502d"). InnerVolumeSpecName "kube-api-access-8r67t". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 15:43:32 crc kubenswrapper[4869]: I1001 15:43:32.357174 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ae422061-7694-44f9-a6ea-d6ce97da502d-libvirt-combined-ca-bundle" (OuterVolumeSpecName: "libvirt-combined-ca-bundle") pod "ae422061-7694-44f9-a6ea-d6ce97da502d" (UID: "ae422061-7694-44f9-a6ea-d6ce97da502d"). InnerVolumeSpecName "libvirt-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 15:43:32 crc kubenswrapper[4869]: I1001 15:43:32.357324 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ae422061-7694-44f9-a6ea-d6ce97da502d-ceph" (OuterVolumeSpecName: "ceph") pod "ae422061-7694-44f9-a6ea-d6ce97da502d" (UID: "ae422061-7694-44f9-a6ea-d6ce97da502d"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 15:43:32 crc kubenswrapper[4869]: I1001 15:43:32.357887 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ae422061-7694-44f9-a6ea-d6ce97da502d-neutron-metadata-combined-ca-bundle" (OuterVolumeSpecName: "neutron-metadata-combined-ca-bundle") pod "ae422061-7694-44f9-a6ea-d6ce97da502d" (UID: "ae422061-7694-44f9-a6ea-d6ce97da502d"). InnerVolumeSpecName "neutron-metadata-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 15:43:32 crc kubenswrapper[4869]: I1001 15:43:32.358882 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ae422061-7694-44f9-a6ea-d6ce97da502d-ovn-combined-ca-bundle" (OuterVolumeSpecName: "ovn-combined-ca-bundle") pod "ae422061-7694-44f9-a6ea-d6ce97da502d" (UID: "ae422061-7694-44f9-a6ea-d6ce97da502d"). InnerVolumeSpecName "ovn-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 15:43:32 crc kubenswrapper[4869]: I1001 15:43:32.359069 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ae422061-7694-44f9-a6ea-d6ce97da502d-nova-combined-ca-bundle" (OuterVolumeSpecName: "nova-combined-ca-bundle") pod "ae422061-7694-44f9-a6ea-d6ce97da502d" (UID: "ae422061-7694-44f9-a6ea-d6ce97da502d"). InnerVolumeSpecName "nova-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 15:43:32 crc kubenswrapper[4869]: I1001 15:43:32.364970 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ae422061-7694-44f9-a6ea-d6ce97da502d-openstack-edpm-ipam-ovn-default-certs-0" (OuterVolumeSpecName: "openstack-edpm-ipam-ovn-default-certs-0") pod "ae422061-7694-44f9-a6ea-d6ce97da502d" (UID: "ae422061-7694-44f9-a6ea-d6ce97da502d"). InnerVolumeSpecName "openstack-edpm-ipam-ovn-default-certs-0". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 15:43:32 crc kubenswrapper[4869]: I1001 15:43:32.368151 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ae422061-7694-44f9-a6ea-d6ce97da502d-openstack-edpm-ipam-libvirt-default-certs-0" (OuterVolumeSpecName: "openstack-edpm-ipam-libvirt-default-certs-0") pod "ae422061-7694-44f9-a6ea-d6ce97da502d" (UID: "ae422061-7694-44f9-a6ea-d6ce97da502d"). InnerVolumeSpecName "openstack-edpm-ipam-libvirt-default-certs-0". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 15:43:32 crc kubenswrapper[4869]: I1001 15:43:32.388191 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ae422061-7694-44f9-a6ea-d6ce97da502d-inventory" (OuterVolumeSpecName: "inventory") pod "ae422061-7694-44f9-a6ea-d6ce97da502d" (UID: "ae422061-7694-44f9-a6ea-d6ce97da502d"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 15:43:32 crc kubenswrapper[4869]: I1001 15:43:32.390439 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ae422061-7694-44f9-a6ea-d6ce97da502d-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "ae422061-7694-44f9-a6ea-d6ce97da502d" (UID: "ae422061-7694-44f9-a6ea-d6ce97da502d"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 15:43:32 crc kubenswrapper[4869]: I1001 15:43:32.452712 4869 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/ae422061-7694-44f9-a6ea-d6ce97da502d-inventory\") on node \"crc\" DevicePath \"\"" Oct 01 15:43:32 crc kubenswrapper[4869]: I1001 15:43:32.452766 4869 reconciler_common.go:293] "Volume detached for volume \"openstack-edpm-ipam-neutron-metadata-default-certs-0\" (UniqueName: \"kubernetes.io/projected/ae422061-7694-44f9-a6ea-d6ce97da502d-openstack-edpm-ipam-neutron-metadata-default-certs-0\") on node \"crc\" DevicePath \"\"" Oct 01 15:43:32 crc kubenswrapper[4869]: I1001 15:43:32.452789 4869 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/ae422061-7694-44f9-a6ea-d6ce97da502d-ssh-key\") on node \"crc\" DevicePath \"\"" Oct 01 15:43:32 crc kubenswrapper[4869]: I1001 15:43:32.452808 4869 reconciler_common.go:293] "Volume detached for volume \"openstack-edpm-ipam-ovn-default-certs-0\" (UniqueName: \"kubernetes.io/projected/ae422061-7694-44f9-a6ea-d6ce97da502d-openstack-edpm-ipam-ovn-default-certs-0\") on node \"crc\" DevicePath \"\"" Oct 01 15:43:32 crc kubenswrapper[4869]: I1001 15:43:32.452828 4869 reconciler_common.go:293] "Volume detached for volume \"openstack-edpm-ipam-libvirt-default-certs-0\" (UniqueName: \"kubernetes.io/projected/ae422061-7694-44f9-a6ea-d6ce97da502d-openstack-edpm-ipam-libvirt-default-certs-0\") on node \"crc\" DevicePath \"\"" Oct 01 15:43:32 crc kubenswrapper[4869]: I1001 15:43:32.452849 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8r67t\" (UniqueName: \"kubernetes.io/projected/ae422061-7694-44f9-a6ea-d6ce97da502d-kube-api-access-8r67t\") on node \"crc\" DevicePath \"\"" Oct 01 15:43:32 crc kubenswrapper[4869]: I1001 15:43:32.452867 4869 reconciler_common.go:293] "Volume detached for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ae422061-7694-44f9-a6ea-d6ce97da502d-repo-setup-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 01 15:43:32 crc kubenswrapper[4869]: I1001 15:43:32.452886 4869 reconciler_common.go:293] "Volume detached for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ae422061-7694-44f9-a6ea-d6ce97da502d-libvirt-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 01 15:43:32 crc kubenswrapper[4869]: I1001 15:43:32.452904 4869 reconciler_common.go:293] "Volume detached for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ae422061-7694-44f9-a6ea-d6ce97da502d-bootstrap-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 01 15:43:32 crc kubenswrapper[4869]: I1001 15:43:32.452921 4869 reconciler_common.go:293] "Volume detached for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ae422061-7694-44f9-a6ea-d6ce97da502d-neutron-metadata-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 01 15:43:32 crc kubenswrapper[4869]: I1001 15:43:32.452939 4869 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/ae422061-7694-44f9-a6ea-d6ce97da502d-ceph\") on node \"crc\" DevicePath \"\"" Oct 01 15:43:32 crc kubenswrapper[4869]: I1001 15:43:32.452957 4869 reconciler_common.go:293] "Volume detached for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ae422061-7694-44f9-a6ea-d6ce97da502d-nova-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 01 15:43:32 crc kubenswrapper[4869]: I1001 15:43:32.452977 4869 reconciler_common.go:293] "Volume detached for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ae422061-7694-44f9-a6ea-d6ce97da502d-ovn-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 01 15:43:32 crc kubenswrapper[4869]: I1001 15:43:32.796624 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-d9zw8" event={"ID":"ae422061-7694-44f9-a6ea-d6ce97da502d","Type":"ContainerDied","Data":"5d40af18b81ebccd22918df9f8e2275d2d66fd4f10bf06308b97d7963a2ec05d"} Oct 01 15:43:32 crc kubenswrapper[4869]: I1001 15:43:32.797404 4869 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="5d40af18b81ebccd22918df9f8e2275d2d66fd4f10bf06308b97d7963a2ec05d" Oct 01 15:43:32 crc kubenswrapper[4869]: I1001 15:43:32.796722 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-d9zw8" Oct 01 15:43:33 crc kubenswrapper[4869]: I1001 15:43:33.012540 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceph-client-edpm-deployment-openstack-edpm-ipam-4gtgk"] Oct 01 15:43:33 crc kubenswrapper[4869]: E1001 15:43:33.012870 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ae422061-7694-44f9-a6ea-d6ce97da502d" containerName="install-certs-edpm-deployment-openstack-edpm-ipam" Oct 01 15:43:33 crc kubenswrapper[4869]: I1001 15:43:33.012886 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="ae422061-7694-44f9-a6ea-d6ce97da502d" containerName="install-certs-edpm-deployment-openstack-edpm-ipam" Oct 01 15:43:33 crc kubenswrapper[4869]: I1001 15:43:33.013081 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="ae422061-7694-44f9-a6ea-d6ce97da502d" containerName="install-certs-edpm-deployment-openstack-edpm-ipam" Oct 01 15:43:33 crc kubenswrapper[4869]: I1001 15:43:33.013662 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceph-client-edpm-deployment-openstack-edpm-ipam-4gtgk" Oct 01 15:43:33 crc kubenswrapper[4869]: I1001 15:43:33.016681 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceph-conf-files" Oct 01 15:43:33 crc kubenswrapper[4869]: I1001 15:43:33.016705 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Oct 01 15:43:33 crc kubenswrapper[4869]: I1001 15:43:33.017102 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Oct 01 15:43:33 crc kubenswrapper[4869]: I1001 15:43:33.017186 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Oct 01 15:43:33 crc kubenswrapper[4869]: I1001 15:43:33.017646 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-cjg8g" Oct 01 15:43:33 crc kubenswrapper[4869]: I1001 15:43:33.040935 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceph-client-edpm-deployment-openstack-edpm-ipam-4gtgk"] Oct 01 15:43:33 crc kubenswrapper[4869]: I1001 15:43:33.165414 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/6852e50e-d598-467e-8588-1aba32529660-ssh-key\") pod \"ceph-client-edpm-deployment-openstack-edpm-ipam-4gtgk\" (UID: \"6852e50e-d598-467e-8588-1aba32529660\") " pod="openstack/ceph-client-edpm-deployment-openstack-edpm-ipam-4gtgk" Oct 01 15:43:33 crc kubenswrapper[4869]: I1001 15:43:33.165776 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/6852e50e-d598-467e-8588-1aba32529660-inventory\") pod \"ceph-client-edpm-deployment-openstack-edpm-ipam-4gtgk\" (UID: \"6852e50e-d598-467e-8588-1aba32529660\") " pod="openstack/ceph-client-edpm-deployment-openstack-edpm-ipam-4gtgk" Oct 01 15:43:33 crc kubenswrapper[4869]: I1001 15:43:33.165908 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-klv2c\" (UniqueName: \"kubernetes.io/projected/6852e50e-d598-467e-8588-1aba32529660-kube-api-access-klv2c\") pod \"ceph-client-edpm-deployment-openstack-edpm-ipam-4gtgk\" (UID: \"6852e50e-d598-467e-8588-1aba32529660\") " pod="openstack/ceph-client-edpm-deployment-openstack-edpm-ipam-4gtgk" Oct 01 15:43:33 crc kubenswrapper[4869]: I1001 15:43:33.165943 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/6852e50e-d598-467e-8588-1aba32529660-ceph\") pod \"ceph-client-edpm-deployment-openstack-edpm-ipam-4gtgk\" (UID: \"6852e50e-d598-467e-8588-1aba32529660\") " pod="openstack/ceph-client-edpm-deployment-openstack-edpm-ipam-4gtgk" Oct 01 15:43:33 crc kubenswrapper[4869]: I1001 15:43:33.267897 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/6852e50e-d598-467e-8588-1aba32529660-inventory\") pod \"ceph-client-edpm-deployment-openstack-edpm-ipam-4gtgk\" (UID: \"6852e50e-d598-467e-8588-1aba32529660\") " pod="openstack/ceph-client-edpm-deployment-openstack-edpm-ipam-4gtgk" Oct 01 15:43:33 crc kubenswrapper[4869]: I1001 15:43:33.267971 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-klv2c\" (UniqueName: \"kubernetes.io/projected/6852e50e-d598-467e-8588-1aba32529660-kube-api-access-klv2c\") pod \"ceph-client-edpm-deployment-openstack-edpm-ipam-4gtgk\" (UID: \"6852e50e-d598-467e-8588-1aba32529660\") " pod="openstack/ceph-client-edpm-deployment-openstack-edpm-ipam-4gtgk" Oct 01 15:43:33 crc kubenswrapper[4869]: I1001 15:43:33.267997 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/6852e50e-d598-467e-8588-1aba32529660-ceph\") pod \"ceph-client-edpm-deployment-openstack-edpm-ipam-4gtgk\" (UID: \"6852e50e-d598-467e-8588-1aba32529660\") " pod="openstack/ceph-client-edpm-deployment-openstack-edpm-ipam-4gtgk" Oct 01 15:43:33 crc kubenswrapper[4869]: I1001 15:43:33.268076 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/6852e50e-d598-467e-8588-1aba32529660-ssh-key\") pod \"ceph-client-edpm-deployment-openstack-edpm-ipam-4gtgk\" (UID: \"6852e50e-d598-467e-8588-1aba32529660\") " pod="openstack/ceph-client-edpm-deployment-openstack-edpm-ipam-4gtgk" Oct 01 15:43:33 crc kubenswrapper[4869]: I1001 15:43:33.273058 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/6852e50e-d598-467e-8588-1aba32529660-inventory\") pod \"ceph-client-edpm-deployment-openstack-edpm-ipam-4gtgk\" (UID: \"6852e50e-d598-467e-8588-1aba32529660\") " pod="openstack/ceph-client-edpm-deployment-openstack-edpm-ipam-4gtgk" Oct 01 15:43:33 crc kubenswrapper[4869]: I1001 15:43:33.274598 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/6852e50e-d598-467e-8588-1aba32529660-ceph\") pod \"ceph-client-edpm-deployment-openstack-edpm-ipam-4gtgk\" (UID: \"6852e50e-d598-467e-8588-1aba32529660\") " pod="openstack/ceph-client-edpm-deployment-openstack-edpm-ipam-4gtgk" Oct 01 15:43:33 crc kubenswrapper[4869]: I1001 15:43:33.276739 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/6852e50e-d598-467e-8588-1aba32529660-ssh-key\") pod \"ceph-client-edpm-deployment-openstack-edpm-ipam-4gtgk\" (UID: \"6852e50e-d598-467e-8588-1aba32529660\") " pod="openstack/ceph-client-edpm-deployment-openstack-edpm-ipam-4gtgk" Oct 01 15:43:33 crc kubenswrapper[4869]: I1001 15:43:33.285553 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-klv2c\" (UniqueName: \"kubernetes.io/projected/6852e50e-d598-467e-8588-1aba32529660-kube-api-access-klv2c\") pod \"ceph-client-edpm-deployment-openstack-edpm-ipam-4gtgk\" (UID: \"6852e50e-d598-467e-8588-1aba32529660\") " pod="openstack/ceph-client-edpm-deployment-openstack-edpm-ipam-4gtgk" Oct 01 15:43:33 crc kubenswrapper[4869]: I1001 15:43:33.342832 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceph-client-edpm-deployment-openstack-edpm-ipam-4gtgk" Oct 01 15:43:33 crc kubenswrapper[4869]: I1001 15:43:33.944455 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceph-client-edpm-deployment-openstack-edpm-ipam-4gtgk"] Oct 01 15:43:33 crc kubenswrapper[4869]: W1001 15:43:33.948883 4869 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod6852e50e_d598_467e_8588_1aba32529660.slice/crio-a99b959b3d4e8700af99f2648da922ea7b44094b19b2d39fb02b1e727ca20328 WatchSource:0}: Error finding container a99b959b3d4e8700af99f2648da922ea7b44094b19b2d39fb02b1e727ca20328: Status 404 returned error can't find the container with id a99b959b3d4e8700af99f2648da922ea7b44094b19b2d39fb02b1e727ca20328 Oct 01 15:43:34 crc kubenswrapper[4869]: I1001 15:43:34.818459 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceph-client-edpm-deployment-openstack-edpm-ipam-4gtgk" event={"ID":"6852e50e-d598-467e-8588-1aba32529660","Type":"ContainerStarted","Data":"a99b959b3d4e8700af99f2648da922ea7b44094b19b2d39fb02b1e727ca20328"} Oct 01 15:43:35 crc kubenswrapper[4869]: I1001 15:43:35.830962 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceph-client-edpm-deployment-openstack-edpm-ipam-4gtgk" event={"ID":"6852e50e-d598-467e-8588-1aba32529660","Type":"ContainerStarted","Data":"b445b8edd260542ac6b33b4c32648fb83593820a0c847caa05f54c3da2b12d7e"} Oct 01 15:43:35 crc kubenswrapper[4869]: I1001 15:43:35.857884 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceph-client-edpm-deployment-openstack-edpm-ipam-4gtgk" podStartSLOduration=3.058593022 podStartE2EDuration="3.857865883s" podCreationTimestamp="2025-10-01 15:43:32 +0000 UTC" firstStartedPulling="2025-10-01 15:43:33.952429841 +0000 UTC m=+2323.099272967" lastFinishedPulling="2025-10-01 15:43:34.751702712 +0000 UTC m=+2323.898545828" observedRunningTime="2025-10-01 15:43:35.853861542 +0000 UTC m=+2325.000704688" watchObservedRunningTime="2025-10-01 15:43:35.857865883 +0000 UTC m=+2325.004709009" Oct 01 15:43:35 crc kubenswrapper[4869]: I1001 15:43:35.934791 4869 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-j7c5q" Oct 01 15:43:35 crc kubenswrapper[4869]: I1001 15:43:35.984793 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-j7c5q" Oct 01 15:43:36 crc kubenswrapper[4869]: I1001 15:43:36.175924 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-j7c5q"] Oct 01 15:43:37 crc kubenswrapper[4869]: I1001 15:43:37.848285 4869 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-j7c5q" podUID="ce90130d-e4b0-42cd-bbc0-12a161b6dec1" containerName="registry-server" containerID="cri-o://1ccc91f4108c5902856954a303dbba99cde15ff7137d2a3243808476c2672b7d" gracePeriod=2 Oct 01 15:43:38 crc kubenswrapper[4869]: I1001 15:43:38.319754 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-j7c5q" Oct 01 15:43:38 crc kubenswrapper[4869]: I1001 15:43:38.374748 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ce90130d-e4b0-42cd-bbc0-12a161b6dec1-catalog-content\") pod \"ce90130d-e4b0-42cd-bbc0-12a161b6dec1\" (UID: \"ce90130d-e4b0-42cd-bbc0-12a161b6dec1\") " Oct 01 15:43:38 crc kubenswrapper[4869]: I1001 15:43:38.374859 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6wvvt\" (UniqueName: \"kubernetes.io/projected/ce90130d-e4b0-42cd-bbc0-12a161b6dec1-kube-api-access-6wvvt\") pod \"ce90130d-e4b0-42cd-bbc0-12a161b6dec1\" (UID: \"ce90130d-e4b0-42cd-bbc0-12a161b6dec1\") " Oct 01 15:43:38 crc kubenswrapper[4869]: I1001 15:43:38.374942 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ce90130d-e4b0-42cd-bbc0-12a161b6dec1-utilities\") pod \"ce90130d-e4b0-42cd-bbc0-12a161b6dec1\" (UID: \"ce90130d-e4b0-42cd-bbc0-12a161b6dec1\") " Oct 01 15:43:38 crc kubenswrapper[4869]: I1001 15:43:38.376095 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ce90130d-e4b0-42cd-bbc0-12a161b6dec1-utilities" (OuterVolumeSpecName: "utilities") pod "ce90130d-e4b0-42cd-bbc0-12a161b6dec1" (UID: "ce90130d-e4b0-42cd-bbc0-12a161b6dec1"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 15:43:38 crc kubenswrapper[4869]: I1001 15:43:38.381576 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ce90130d-e4b0-42cd-bbc0-12a161b6dec1-kube-api-access-6wvvt" (OuterVolumeSpecName: "kube-api-access-6wvvt") pod "ce90130d-e4b0-42cd-bbc0-12a161b6dec1" (UID: "ce90130d-e4b0-42cd-bbc0-12a161b6dec1"). InnerVolumeSpecName "kube-api-access-6wvvt". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 15:43:38 crc kubenswrapper[4869]: I1001 15:43:38.475602 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ce90130d-e4b0-42cd-bbc0-12a161b6dec1-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "ce90130d-e4b0-42cd-bbc0-12a161b6dec1" (UID: "ce90130d-e4b0-42cd-bbc0-12a161b6dec1"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 15:43:38 crc kubenswrapper[4869]: I1001 15:43:38.477206 4869 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ce90130d-e4b0-42cd-bbc0-12a161b6dec1-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 01 15:43:38 crc kubenswrapper[4869]: I1001 15:43:38.477245 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6wvvt\" (UniqueName: \"kubernetes.io/projected/ce90130d-e4b0-42cd-bbc0-12a161b6dec1-kube-api-access-6wvvt\") on node \"crc\" DevicePath \"\"" Oct 01 15:43:38 crc kubenswrapper[4869]: I1001 15:43:38.477292 4869 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ce90130d-e4b0-42cd-bbc0-12a161b6dec1-utilities\") on node \"crc\" DevicePath \"\"" Oct 01 15:43:38 crc kubenswrapper[4869]: I1001 15:43:38.580860 4869 scope.go:117] "RemoveContainer" containerID="850fa9657d55ab61ed48a042dba1eb165240f62a294e8f87d32c9a3206247a54" Oct 01 15:43:38 crc kubenswrapper[4869]: E1001 15:43:38.581344 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-c86m8_openshift-machine-config-operator(a4b64f7f-0b03-4f47-965b-9fde048b735c)\"" pod="openshift-machine-config-operator/machine-config-daemon-c86m8" podUID="a4b64f7f-0b03-4f47-965b-9fde048b735c" Oct 01 15:43:38 crc kubenswrapper[4869]: I1001 15:43:38.861671 4869 generic.go:334] "Generic (PLEG): container finished" podID="ce90130d-e4b0-42cd-bbc0-12a161b6dec1" containerID="1ccc91f4108c5902856954a303dbba99cde15ff7137d2a3243808476c2672b7d" exitCode=0 Oct 01 15:43:38 crc kubenswrapper[4869]: I1001 15:43:38.861730 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-j7c5q" event={"ID":"ce90130d-e4b0-42cd-bbc0-12a161b6dec1","Type":"ContainerDied","Data":"1ccc91f4108c5902856954a303dbba99cde15ff7137d2a3243808476c2672b7d"} Oct 01 15:43:38 crc kubenswrapper[4869]: I1001 15:43:38.861766 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-j7c5q" event={"ID":"ce90130d-e4b0-42cd-bbc0-12a161b6dec1","Type":"ContainerDied","Data":"0900cd9d5602a8eb97266d206c623122e7be6a9fd52949be849fca3bd6c1280c"} Oct 01 15:43:38 crc kubenswrapper[4869]: I1001 15:43:38.861779 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-j7c5q" Oct 01 15:43:38 crc kubenswrapper[4869]: I1001 15:43:38.861791 4869 scope.go:117] "RemoveContainer" containerID="1ccc91f4108c5902856954a303dbba99cde15ff7137d2a3243808476c2672b7d" Oct 01 15:43:38 crc kubenswrapper[4869]: I1001 15:43:38.894725 4869 scope.go:117] "RemoveContainer" containerID="b5e42215fc6e29bf19eb3820874711d8db25f71287b4fa992ecaa36391e969f3" Oct 01 15:43:38 crc kubenswrapper[4869]: I1001 15:43:38.914778 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-j7c5q"] Oct 01 15:43:38 crc kubenswrapper[4869]: I1001 15:43:38.928732 4869 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-j7c5q"] Oct 01 15:43:38 crc kubenswrapper[4869]: I1001 15:43:38.937519 4869 scope.go:117] "RemoveContainer" containerID="ec24266605b970c5e7f12fb5e5ef4bd4e27158d860de37ab45dd4c57081b1ba0" Oct 01 15:43:38 crc kubenswrapper[4869]: I1001 15:43:38.975696 4869 scope.go:117] "RemoveContainer" containerID="1ccc91f4108c5902856954a303dbba99cde15ff7137d2a3243808476c2672b7d" Oct 01 15:43:38 crc kubenswrapper[4869]: E1001 15:43:38.976424 4869 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1ccc91f4108c5902856954a303dbba99cde15ff7137d2a3243808476c2672b7d\": container with ID starting with 1ccc91f4108c5902856954a303dbba99cde15ff7137d2a3243808476c2672b7d not found: ID does not exist" containerID="1ccc91f4108c5902856954a303dbba99cde15ff7137d2a3243808476c2672b7d" Oct 01 15:43:38 crc kubenswrapper[4869]: I1001 15:43:38.976468 4869 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1ccc91f4108c5902856954a303dbba99cde15ff7137d2a3243808476c2672b7d"} err="failed to get container status \"1ccc91f4108c5902856954a303dbba99cde15ff7137d2a3243808476c2672b7d\": rpc error: code = NotFound desc = could not find container \"1ccc91f4108c5902856954a303dbba99cde15ff7137d2a3243808476c2672b7d\": container with ID starting with 1ccc91f4108c5902856954a303dbba99cde15ff7137d2a3243808476c2672b7d not found: ID does not exist" Oct 01 15:43:38 crc kubenswrapper[4869]: I1001 15:43:38.976497 4869 scope.go:117] "RemoveContainer" containerID="b5e42215fc6e29bf19eb3820874711d8db25f71287b4fa992ecaa36391e969f3" Oct 01 15:43:38 crc kubenswrapper[4869]: E1001 15:43:38.976852 4869 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b5e42215fc6e29bf19eb3820874711d8db25f71287b4fa992ecaa36391e969f3\": container with ID starting with b5e42215fc6e29bf19eb3820874711d8db25f71287b4fa992ecaa36391e969f3 not found: ID does not exist" containerID="b5e42215fc6e29bf19eb3820874711d8db25f71287b4fa992ecaa36391e969f3" Oct 01 15:43:38 crc kubenswrapper[4869]: I1001 15:43:38.976873 4869 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b5e42215fc6e29bf19eb3820874711d8db25f71287b4fa992ecaa36391e969f3"} err="failed to get container status \"b5e42215fc6e29bf19eb3820874711d8db25f71287b4fa992ecaa36391e969f3\": rpc error: code = NotFound desc = could not find container \"b5e42215fc6e29bf19eb3820874711d8db25f71287b4fa992ecaa36391e969f3\": container with ID starting with b5e42215fc6e29bf19eb3820874711d8db25f71287b4fa992ecaa36391e969f3 not found: ID does not exist" Oct 01 15:43:38 crc kubenswrapper[4869]: I1001 15:43:38.976888 4869 scope.go:117] "RemoveContainer" containerID="ec24266605b970c5e7f12fb5e5ef4bd4e27158d860de37ab45dd4c57081b1ba0" Oct 01 15:43:38 crc kubenswrapper[4869]: E1001 15:43:38.977368 4869 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ec24266605b970c5e7f12fb5e5ef4bd4e27158d860de37ab45dd4c57081b1ba0\": container with ID starting with ec24266605b970c5e7f12fb5e5ef4bd4e27158d860de37ab45dd4c57081b1ba0 not found: ID does not exist" containerID="ec24266605b970c5e7f12fb5e5ef4bd4e27158d860de37ab45dd4c57081b1ba0" Oct 01 15:43:38 crc kubenswrapper[4869]: I1001 15:43:38.977423 4869 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ec24266605b970c5e7f12fb5e5ef4bd4e27158d860de37ab45dd4c57081b1ba0"} err="failed to get container status \"ec24266605b970c5e7f12fb5e5ef4bd4e27158d860de37ab45dd4c57081b1ba0\": rpc error: code = NotFound desc = could not find container \"ec24266605b970c5e7f12fb5e5ef4bd4e27158d860de37ab45dd4c57081b1ba0\": container with ID starting with ec24266605b970c5e7f12fb5e5ef4bd4e27158d860de37ab45dd4c57081b1ba0 not found: ID does not exist" Oct 01 15:43:39 crc kubenswrapper[4869]: I1001 15:43:39.600774 4869 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ce90130d-e4b0-42cd-bbc0-12a161b6dec1" path="/var/lib/kubelet/pods/ce90130d-e4b0-42cd-bbc0-12a161b6dec1/volumes" Oct 01 15:43:40 crc kubenswrapper[4869]: I1001 15:43:40.885899 4869 generic.go:334] "Generic (PLEG): container finished" podID="6852e50e-d598-467e-8588-1aba32529660" containerID="b445b8edd260542ac6b33b4c32648fb83593820a0c847caa05f54c3da2b12d7e" exitCode=0 Oct 01 15:43:40 crc kubenswrapper[4869]: I1001 15:43:40.885947 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceph-client-edpm-deployment-openstack-edpm-ipam-4gtgk" event={"ID":"6852e50e-d598-467e-8588-1aba32529660","Type":"ContainerDied","Data":"b445b8edd260542ac6b33b4c32648fb83593820a0c847caa05f54c3da2b12d7e"} Oct 01 15:43:42 crc kubenswrapper[4869]: I1001 15:43:42.358076 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceph-client-edpm-deployment-openstack-edpm-ipam-4gtgk" Oct 01 15:43:42 crc kubenswrapper[4869]: I1001 15:43:42.363127 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-klv2c\" (UniqueName: \"kubernetes.io/projected/6852e50e-d598-467e-8588-1aba32529660-kube-api-access-klv2c\") pod \"6852e50e-d598-467e-8588-1aba32529660\" (UID: \"6852e50e-d598-467e-8588-1aba32529660\") " Oct 01 15:43:42 crc kubenswrapper[4869]: I1001 15:43:42.363224 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/6852e50e-d598-467e-8588-1aba32529660-ssh-key\") pod \"6852e50e-d598-467e-8588-1aba32529660\" (UID: \"6852e50e-d598-467e-8588-1aba32529660\") " Oct 01 15:43:42 crc kubenswrapper[4869]: I1001 15:43:42.363396 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/6852e50e-d598-467e-8588-1aba32529660-inventory\") pod \"6852e50e-d598-467e-8588-1aba32529660\" (UID: \"6852e50e-d598-467e-8588-1aba32529660\") " Oct 01 15:43:42 crc kubenswrapper[4869]: I1001 15:43:42.363475 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/6852e50e-d598-467e-8588-1aba32529660-ceph\") pod \"6852e50e-d598-467e-8588-1aba32529660\" (UID: \"6852e50e-d598-467e-8588-1aba32529660\") " Oct 01 15:43:42 crc kubenswrapper[4869]: I1001 15:43:42.370152 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6852e50e-d598-467e-8588-1aba32529660-kube-api-access-klv2c" (OuterVolumeSpecName: "kube-api-access-klv2c") pod "6852e50e-d598-467e-8588-1aba32529660" (UID: "6852e50e-d598-467e-8588-1aba32529660"). InnerVolumeSpecName "kube-api-access-klv2c". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 15:43:42 crc kubenswrapper[4869]: I1001 15:43:42.370378 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6852e50e-d598-467e-8588-1aba32529660-ceph" (OuterVolumeSpecName: "ceph") pod "6852e50e-d598-467e-8588-1aba32529660" (UID: "6852e50e-d598-467e-8588-1aba32529660"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 15:43:42 crc kubenswrapper[4869]: I1001 15:43:42.394057 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6852e50e-d598-467e-8588-1aba32529660-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "6852e50e-d598-467e-8588-1aba32529660" (UID: "6852e50e-d598-467e-8588-1aba32529660"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 15:43:42 crc kubenswrapper[4869]: I1001 15:43:42.425363 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6852e50e-d598-467e-8588-1aba32529660-inventory" (OuterVolumeSpecName: "inventory") pod "6852e50e-d598-467e-8588-1aba32529660" (UID: "6852e50e-d598-467e-8588-1aba32529660"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 15:43:42 crc kubenswrapper[4869]: I1001 15:43:42.470779 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-klv2c\" (UniqueName: \"kubernetes.io/projected/6852e50e-d598-467e-8588-1aba32529660-kube-api-access-klv2c\") on node \"crc\" DevicePath \"\"" Oct 01 15:43:42 crc kubenswrapper[4869]: I1001 15:43:42.470809 4869 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/6852e50e-d598-467e-8588-1aba32529660-ssh-key\") on node \"crc\" DevicePath \"\"" Oct 01 15:43:42 crc kubenswrapper[4869]: I1001 15:43:42.470817 4869 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/6852e50e-d598-467e-8588-1aba32529660-inventory\") on node \"crc\" DevicePath \"\"" Oct 01 15:43:42 crc kubenswrapper[4869]: I1001 15:43:42.470827 4869 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/6852e50e-d598-467e-8588-1aba32529660-ceph\") on node \"crc\" DevicePath \"\"" Oct 01 15:43:42 crc kubenswrapper[4869]: I1001 15:43:42.906158 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceph-client-edpm-deployment-openstack-edpm-ipam-4gtgk" event={"ID":"6852e50e-d598-467e-8588-1aba32529660","Type":"ContainerDied","Data":"a99b959b3d4e8700af99f2648da922ea7b44094b19b2d39fb02b1e727ca20328"} Oct 01 15:43:42 crc kubenswrapper[4869]: I1001 15:43:42.906522 4869 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="a99b959b3d4e8700af99f2648da922ea7b44094b19b2d39fb02b1e727ca20328" Oct 01 15:43:42 crc kubenswrapper[4869]: I1001 15:43:42.906233 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceph-client-edpm-deployment-openstack-edpm-ipam-4gtgk" Oct 01 15:43:43 crc kubenswrapper[4869]: I1001 15:43:43.001251 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-edpm-deployment-openstack-edpm-ipam-tx4s2"] Oct 01 15:43:43 crc kubenswrapper[4869]: E1001 15:43:43.001717 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6852e50e-d598-467e-8588-1aba32529660" containerName="ceph-client-edpm-deployment-openstack-edpm-ipam" Oct 01 15:43:43 crc kubenswrapper[4869]: I1001 15:43:43.001740 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="6852e50e-d598-467e-8588-1aba32529660" containerName="ceph-client-edpm-deployment-openstack-edpm-ipam" Oct 01 15:43:43 crc kubenswrapper[4869]: E1001 15:43:43.001762 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ce90130d-e4b0-42cd-bbc0-12a161b6dec1" containerName="registry-server" Oct 01 15:43:43 crc kubenswrapper[4869]: I1001 15:43:43.001770 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="ce90130d-e4b0-42cd-bbc0-12a161b6dec1" containerName="registry-server" Oct 01 15:43:43 crc kubenswrapper[4869]: E1001 15:43:43.001792 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ce90130d-e4b0-42cd-bbc0-12a161b6dec1" containerName="extract-utilities" Oct 01 15:43:43 crc kubenswrapper[4869]: I1001 15:43:43.001800 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="ce90130d-e4b0-42cd-bbc0-12a161b6dec1" containerName="extract-utilities" Oct 01 15:43:43 crc kubenswrapper[4869]: E1001 15:43:43.001825 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ce90130d-e4b0-42cd-bbc0-12a161b6dec1" containerName="extract-content" Oct 01 15:43:43 crc kubenswrapper[4869]: I1001 15:43:43.001832 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="ce90130d-e4b0-42cd-bbc0-12a161b6dec1" containerName="extract-content" Oct 01 15:43:43 crc kubenswrapper[4869]: I1001 15:43:43.002052 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="6852e50e-d598-467e-8588-1aba32529660" containerName="ceph-client-edpm-deployment-openstack-edpm-ipam" Oct 01 15:43:43 crc kubenswrapper[4869]: I1001 15:43:43.002080 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="ce90130d-e4b0-42cd-bbc0-12a161b6dec1" containerName="registry-server" Oct 01 15:43:43 crc kubenswrapper[4869]: I1001 15:43:43.002815 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-tx4s2" Oct 01 15:43:43 crc kubenswrapper[4869]: I1001 15:43:43.007708 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceph-conf-files" Oct 01 15:43:43 crc kubenswrapper[4869]: I1001 15:43:43.008806 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovncontroller-config" Oct 01 15:43:43 crc kubenswrapper[4869]: I1001 15:43:43.008969 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Oct 01 15:43:43 crc kubenswrapper[4869]: I1001 15:43:43.009191 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-cjg8g" Oct 01 15:43:43 crc kubenswrapper[4869]: I1001 15:43:43.010653 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Oct 01 15:43:43 crc kubenswrapper[4869]: I1001 15:43:43.011692 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-edpm-deployment-openstack-edpm-ipam-tx4s2"] Oct 01 15:43:43 crc kubenswrapper[4869]: I1001 15:43:43.012326 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Oct 01 15:43:43 crc kubenswrapper[4869]: I1001 15:43:43.082327 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/f72fd252-cf04-4a98-831e-0424d9f38724-ceph\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-tx4s2\" (UID: \"f72fd252-cf04-4a98-831e-0424d9f38724\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-tx4s2" Oct 01 15:43:43 crc kubenswrapper[4869]: I1001 15:43:43.082401 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-t842k\" (UniqueName: \"kubernetes.io/projected/f72fd252-cf04-4a98-831e-0424d9f38724-kube-api-access-t842k\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-tx4s2\" (UID: \"f72fd252-cf04-4a98-831e-0424d9f38724\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-tx4s2" Oct 01 15:43:43 crc kubenswrapper[4869]: I1001 15:43:43.082431 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f72fd252-cf04-4a98-831e-0424d9f38724-ovn-combined-ca-bundle\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-tx4s2\" (UID: \"f72fd252-cf04-4a98-831e-0424d9f38724\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-tx4s2" Oct 01 15:43:43 crc kubenswrapper[4869]: I1001 15:43:43.082475 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/f72fd252-cf04-4a98-831e-0424d9f38724-inventory\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-tx4s2\" (UID: \"f72fd252-cf04-4a98-831e-0424d9f38724\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-tx4s2" Oct 01 15:43:43 crc kubenswrapper[4869]: I1001 15:43:43.082506 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/f72fd252-cf04-4a98-831e-0424d9f38724-ssh-key\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-tx4s2\" (UID: \"f72fd252-cf04-4a98-831e-0424d9f38724\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-tx4s2" Oct 01 15:43:43 crc kubenswrapper[4869]: I1001 15:43:43.082568 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovncontroller-config-0\" (UniqueName: \"kubernetes.io/configmap/f72fd252-cf04-4a98-831e-0424d9f38724-ovncontroller-config-0\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-tx4s2\" (UID: \"f72fd252-cf04-4a98-831e-0424d9f38724\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-tx4s2" Oct 01 15:43:43 crc kubenswrapper[4869]: I1001 15:43:43.184547 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/f72fd252-cf04-4a98-831e-0424d9f38724-ceph\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-tx4s2\" (UID: \"f72fd252-cf04-4a98-831e-0424d9f38724\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-tx4s2" Oct 01 15:43:43 crc kubenswrapper[4869]: I1001 15:43:43.184673 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-t842k\" (UniqueName: \"kubernetes.io/projected/f72fd252-cf04-4a98-831e-0424d9f38724-kube-api-access-t842k\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-tx4s2\" (UID: \"f72fd252-cf04-4a98-831e-0424d9f38724\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-tx4s2" Oct 01 15:43:43 crc kubenswrapper[4869]: I1001 15:43:43.184707 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f72fd252-cf04-4a98-831e-0424d9f38724-ovn-combined-ca-bundle\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-tx4s2\" (UID: \"f72fd252-cf04-4a98-831e-0424d9f38724\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-tx4s2" Oct 01 15:43:43 crc kubenswrapper[4869]: I1001 15:43:43.184757 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/f72fd252-cf04-4a98-831e-0424d9f38724-inventory\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-tx4s2\" (UID: \"f72fd252-cf04-4a98-831e-0424d9f38724\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-tx4s2" Oct 01 15:43:43 crc kubenswrapper[4869]: I1001 15:43:43.184795 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/f72fd252-cf04-4a98-831e-0424d9f38724-ssh-key\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-tx4s2\" (UID: \"f72fd252-cf04-4a98-831e-0424d9f38724\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-tx4s2" Oct 01 15:43:43 crc kubenswrapper[4869]: I1001 15:43:43.184818 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovncontroller-config-0\" (UniqueName: \"kubernetes.io/configmap/f72fd252-cf04-4a98-831e-0424d9f38724-ovncontroller-config-0\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-tx4s2\" (UID: \"f72fd252-cf04-4a98-831e-0424d9f38724\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-tx4s2" Oct 01 15:43:43 crc kubenswrapper[4869]: I1001 15:43:43.186086 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovncontroller-config-0\" (UniqueName: \"kubernetes.io/configmap/f72fd252-cf04-4a98-831e-0424d9f38724-ovncontroller-config-0\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-tx4s2\" (UID: \"f72fd252-cf04-4a98-831e-0424d9f38724\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-tx4s2" Oct 01 15:43:43 crc kubenswrapper[4869]: I1001 15:43:43.191567 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/f72fd252-cf04-4a98-831e-0424d9f38724-ceph\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-tx4s2\" (UID: \"f72fd252-cf04-4a98-831e-0424d9f38724\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-tx4s2" Oct 01 15:43:43 crc kubenswrapper[4869]: I1001 15:43:43.192280 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/f72fd252-cf04-4a98-831e-0424d9f38724-inventory\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-tx4s2\" (UID: \"f72fd252-cf04-4a98-831e-0424d9f38724\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-tx4s2" Oct 01 15:43:43 crc kubenswrapper[4869]: I1001 15:43:43.192338 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/f72fd252-cf04-4a98-831e-0424d9f38724-ssh-key\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-tx4s2\" (UID: \"f72fd252-cf04-4a98-831e-0424d9f38724\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-tx4s2" Oct 01 15:43:43 crc kubenswrapper[4869]: I1001 15:43:43.192864 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f72fd252-cf04-4a98-831e-0424d9f38724-ovn-combined-ca-bundle\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-tx4s2\" (UID: \"f72fd252-cf04-4a98-831e-0424d9f38724\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-tx4s2" Oct 01 15:43:43 crc kubenswrapper[4869]: I1001 15:43:43.203192 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-t842k\" (UniqueName: \"kubernetes.io/projected/f72fd252-cf04-4a98-831e-0424d9f38724-kube-api-access-t842k\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-tx4s2\" (UID: \"f72fd252-cf04-4a98-831e-0424d9f38724\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-tx4s2" Oct 01 15:43:43 crc kubenswrapper[4869]: I1001 15:43:43.318447 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-tx4s2" Oct 01 15:43:43 crc kubenswrapper[4869]: I1001 15:43:43.882935 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-edpm-deployment-openstack-edpm-ipam-tx4s2"] Oct 01 15:43:43 crc kubenswrapper[4869]: I1001 15:43:43.920240 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-tx4s2" event={"ID":"f72fd252-cf04-4a98-831e-0424d9f38724","Type":"ContainerStarted","Data":"ea4275a0fcc97c4d7c3dc8cfb4dbaa61d3c8d1b4fbb6cb2b3225d95901357096"} Oct 01 15:43:44 crc kubenswrapper[4869]: I1001 15:43:44.930488 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-tx4s2" event={"ID":"f72fd252-cf04-4a98-831e-0424d9f38724","Type":"ContainerStarted","Data":"69c0f2255db8ad2f520bd74bf4597e16b7761921b128f97c6bfdb898e29d8217"} Oct 01 15:43:44 crc kubenswrapper[4869]: I1001 15:43:44.951788 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-tx4s2" podStartSLOduration=2.480527761 podStartE2EDuration="2.951769788s" podCreationTimestamp="2025-10-01 15:43:42 +0000 UTC" firstStartedPulling="2025-10-01 15:43:43.891048203 +0000 UTC m=+2333.037891359" lastFinishedPulling="2025-10-01 15:43:44.36229026 +0000 UTC m=+2333.509133386" observedRunningTime="2025-10-01 15:43:44.950573158 +0000 UTC m=+2334.097416304" watchObservedRunningTime="2025-10-01 15:43:44.951769788 +0000 UTC m=+2334.098612924" Oct 01 15:43:49 crc kubenswrapper[4869]: I1001 15:43:49.581376 4869 scope.go:117] "RemoveContainer" containerID="850fa9657d55ab61ed48a042dba1eb165240f62a294e8f87d32c9a3206247a54" Oct 01 15:43:49 crc kubenswrapper[4869]: E1001 15:43:49.583739 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-c86m8_openshift-machine-config-operator(a4b64f7f-0b03-4f47-965b-9fde048b735c)\"" pod="openshift-machine-config-operator/machine-config-daemon-c86m8" podUID="a4b64f7f-0b03-4f47-965b-9fde048b735c" Oct 01 15:44:00 crc kubenswrapper[4869]: I1001 15:44:00.581477 4869 scope.go:117] "RemoveContainer" containerID="850fa9657d55ab61ed48a042dba1eb165240f62a294e8f87d32c9a3206247a54" Oct 01 15:44:00 crc kubenswrapper[4869]: E1001 15:44:00.582361 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-c86m8_openshift-machine-config-operator(a4b64f7f-0b03-4f47-965b-9fde048b735c)\"" pod="openshift-machine-config-operator/machine-config-daemon-c86m8" podUID="a4b64f7f-0b03-4f47-965b-9fde048b735c" Oct 01 15:44:11 crc kubenswrapper[4869]: I1001 15:44:11.592629 4869 scope.go:117] "RemoveContainer" containerID="850fa9657d55ab61ed48a042dba1eb165240f62a294e8f87d32c9a3206247a54" Oct 01 15:44:11 crc kubenswrapper[4869]: E1001 15:44:11.593968 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-c86m8_openshift-machine-config-operator(a4b64f7f-0b03-4f47-965b-9fde048b735c)\"" pod="openshift-machine-config-operator/machine-config-daemon-c86m8" podUID="a4b64f7f-0b03-4f47-965b-9fde048b735c" Oct 01 15:44:25 crc kubenswrapper[4869]: I1001 15:44:25.581355 4869 scope.go:117] "RemoveContainer" containerID="850fa9657d55ab61ed48a042dba1eb165240f62a294e8f87d32c9a3206247a54" Oct 01 15:44:25 crc kubenswrapper[4869]: E1001 15:44:25.582123 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-c86m8_openshift-machine-config-operator(a4b64f7f-0b03-4f47-965b-9fde048b735c)\"" pod="openshift-machine-config-operator/machine-config-daemon-c86m8" podUID="a4b64f7f-0b03-4f47-965b-9fde048b735c" Oct 01 15:44:39 crc kubenswrapper[4869]: I1001 15:44:39.581001 4869 scope.go:117] "RemoveContainer" containerID="850fa9657d55ab61ed48a042dba1eb165240f62a294e8f87d32c9a3206247a54" Oct 01 15:44:39 crc kubenswrapper[4869]: E1001 15:44:39.582027 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-c86m8_openshift-machine-config-operator(a4b64f7f-0b03-4f47-965b-9fde048b735c)\"" pod="openshift-machine-config-operator/machine-config-daemon-c86m8" podUID="a4b64f7f-0b03-4f47-965b-9fde048b735c" Oct 01 15:44:51 crc kubenswrapper[4869]: I1001 15:44:51.627784 4869 generic.go:334] "Generic (PLEG): container finished" podID="f72fd252-cf04-4a98-831e-0424d9f38724" containerID="69c0f2255db8ad2f520bd74bf4597e16b7761921b128f97c6bfdb898e29d8217" exitCode=0 Oct 01 15:44:51 crc kubenswrapper[4869]: I1001 15:44:51.628362 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-tx4s2" event={"ID":"f72fd252-cf04-4a98-831e-0424d9f38724","Type":"ContainerDied","Data":"69c0f2255db8ad2f520bd74bf4597e16b7761921b128f97c6bfdb898e29d8217"} Oct 01 15:44:53 crc kubenswrapper[4869]: I1001 15:44:53.044841 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-tx4s2" Oct 01 15:44:53 crc kubenswrapper[4869]: I1001 15:44:53.084493 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovncontroller-config-0\" (UniqueName: \"kubernetes.io/configmap/f72fd252-cf04-4a98-831e-0424d9f38724-ovncontroller-config-0\") pod \"f72fd252-cf04-4a98-831e-0424d9f38724\" (UID: \"f72fd252-cf04-4a98-831e-0424d9f38724\") " Oct 01 15:44:53 crc kubenswrapper[4869]: I1001 15:44:53.084533 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f72fd252-cf04-4a98-831e-0424d9f38724-ovn-combined-ca-bundle\") pod \"f72fd252-cf04-4a98-831e-0424d9f38724\" (UID: \"f72fd252-cf04-4a98-831e-0424d9f38724\") " Oct 01 15:44:53 crc kubenswrapper[4869]: I1001 15:44:53.090301 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f72fd252-cf04-4a98-831e-0424d9f38724-ovn-combined-ca-bundle" (OuterVolumeSpecName: "ovn-combined-ca-bundle") pod "f72fd252-cf04-4a98-831e-0424d9f38724" (UID: "f72fd252-cf04-4a98-831e-0424d9f38724"). InnerVolumeSpecName "ovn-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 15:44:53 crc kubenswrapper[4869]: I1001 15:44:53.107117 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f72fd252-cf04-4a98-831e-0424d9f38724-ovncontroller-config-0" (OuterVolumeSpecName: "ovncontroller-config-0") pod "f72fd252-cf04-4a98-831e-0424d9f38724" (UID: "f72fd252-cf04-4a98-831e-0424d9f38724"). InnerVolumeSpecName "ovncontroller-config-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 15:44:53 crc kubenswrapper[4869]: I1001 15:44:53.185328 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/f72fd252-cf04-4a98-831e-0424d9f38724-inventory\") pod \"f72fd252-cf04-4a98-831e-0424d9f38724\" (UID: \"f72fd252-cf04-4a98-831e-0424d9f38724\") " Oct 01 15:44:53 crc kubenswrapper[4869]: I1001 15:44:53.185380 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-t842k\" (UniqueName: \"kubernetes.io/projected/f72fd252-cf04-4a98-831e-0424d9f38724-kube-api-access-t842k\") pod \"f72fd252-cf04-4a98-831e-0424d9f38724\" (UID: \"f72fd252-cf04-4a98-831e-0424d9f38724\") " Oct 01 15:44:53 crc kubenswrapper[4869]: I1001 15:44:53.185475 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/f72fd252-cf04-4a98-831e-0424d9f38724-ceph\") pod \"f72fd252-cf04-4a98-831e-0424d9f38724\" (UID: \"f72fd252-cf04-4a98-831e-0424d9f38724\") " Oct 01 15:44:53 crc kubenswrapper[4869]: I1001 15:44:53.185497 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/f72fd252-cf04-4a98-831e-0424d9f38724-ssh-key\") pod \"f72fd252-cf04-4a98-831e-0424d9f38724\" (UID: \"f72fd252-cf04-4a98-831e-0424d9f38724\") " Oct 01 15:44:53 crc kubenswrapper[4869]: I1001 15:44:53.186220 4869 reconciler_common.go:293] "Volume detached for volume \"ovncontroller-config-0\" (UniqueName: \"kubernetes.io/configmap/f72fd252-cf04-4a98-831e-0424d9f38724-ovncontroller-config-0\") on node \"crc\" DevicePath \"\"" Oct 01 15:44:53 crc kubenswrapper[4869]: I1001 15:44:53.186244 4869 reconciler_common.go:293] "Volume detached for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f72fd252-cf04-4a98-831e-0424d9f38724-ovn-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 01 15:44:53 crc kubenswrapper[4869]: I1001 15:44:53.188452 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f72fd252-cf04-4a98-831e-0424d9f38724-ceph" (OuterVolumeSpecName: "ceph") pod "f72fd252-cf04-4a98-831e-0424d9f38724" (UID: "f72fd252-cf04-4a98-831e-0424d9f38724"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 15:44:53 crc kubenswrapper[4869]: I1001 15:44:53.190298 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f72fd252-cf04-4a98-831e-0424d9f38724-kube-api-access-t842k" (OuterVolumeSpecName: "kube-api-access-t842k") pod "f72fd252-cf04-4a98-831e-0424d9f38724" (UID: "f72fd252-cf04-4a98-831e-0424d9f38724"). InnerVolumeSpecName "kube-api-access-t842k". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 15:44:53 crc kubenswrapper[4869]: I1001 15:44:53.207798 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f72fd252-cf04-4a98-831e-0424d9f38724-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "f72fd252-cf04-4a98-831e-0424d9f38724" (UID: "f72fd252-cf04-4a98-831e-0424d9f38724"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 15:44:53 crc kubenswrapper[4869]: I1001 15:44:53.225706 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f72fd252-cf04-4a98-831e-0424d9f38724-inventory" (OuterVolumeSpecName: "inventory") pod "f72fd252-cf04-4a98-831e-0424d9f38724" (UID: "f72fd252-cf04-4a98-831e-0424d9f38724"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 15:44:53 crc kubenswrapper[4869]: I1001 15:44:53.288393 4869 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/f72fd252-cf04-4a98-831e-0424d9f38724-inventory\") on node \"crc\" DevicePath \"\"" Oct 01 15:44:53 crc kubenswrapper[4869]: I1001 15:44:53.288421 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-t842k\" (UniqueName: \"kubernetes.io/projected/f72fd252-cf04-4a98-831e-0424d9f38724-kube-api-access-t842k\") on node \"crc\" DevicePath \"\"" Oct 01 15:44:53 crc kubenswrapper[4869]: I1001 15:44:53.288430 4869 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/f72fd252-cf04-4a98-831e-0424d9f38724-ceph\") on node \"crc\" DevicePath \"\"" Oct 01 15:44:53 crc kubenswrapper[4869]: I1001 15:44:53.288448 4869 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/f72fd252-cf04-4a98-831e-0424d9f38724-ssh-key\") on node \"crc\" DevicePath \"\"" Oct 01 15:44:53 crc kubenswrapper[4869]: I1001 15:44:53.652006 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-tx4s2" event={"ID":"f72fd252-cf04-4a98-831e-0424d9f38724","Type":"ContainerDied","Data":"ea4275a0fcc97c4d7c3dc8cfb4dbaa61d3c8d1b4fbb6cb2b3225d95901357096"} Oct 01 15:44:53 crc kubenswrapper[4869]: I1001 15:44:53.652059 4869 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="ea4275a0fcc97c4d7c3dc8cfb4dbaa61d3c8d1b4fbb6cb2b3225d95901357096" Oct 01 15:44:53 crc kubenswrapper[4869]: I1001 15:44:53.652093 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-tx4s2" Oct 01 15:44:53 crc kubenswrapper[4869]: I1001 15:44:53.808163 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-bgt6n"] Oct 01 15:44:53 crc kubenswrapper[4869]: E1001 15:44:53.808900 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f72fd252-cf04-4a98-831e-0424d9f38724" containerName="ovn-edpm-deployment-openstack-edpm-ipam" Oct 01 15:44:53 crc kubenswrapper[4869]: I1001 15:44:53.809007 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="f72fd252-cf04-4a98-831e-0424d9f38724" containerName="ovn-edpm-deployment-openstack-edpm-ipam" Oct 01 15:44:53 crc kubenswrapper[4869]: I1001 15:44:53.809415 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="f72fd252-cf04-4a98-831e-0424d9f38724" containerName="ovn-edpm-deployment-openstack-edpm-ipam" Oct 01 15:44:53 crc kubenswrapper[4869]: I1001 15:44:53.810174 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-bgt6n" Oct 01 15:44:53 crc kubenswrapper[4869]: I1001 15:44:53.812570 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Oct 01 15:44:53 crc kubenswrapper[4869]: I1001 15:44:53.812849 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-neutron-config" Oct 01 15:44:53 crc kubenswrapper[4869]: I1001 15:44:53.812986 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceph-conf-files" Oct 01 15:44:53 crc kubenswrapper[4869]: I1001 15:44:53.813117 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Oct 01 15:44:53 crc kubenswrapper[4869]: I1001 15:44:53.813421 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Oct 01 15:44:53 crc kubenswrapper[4869]: I1001 15:44:53.813565 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-cjg8g" Oct 01 15:44:53 crc kubenswrapper[4869]: I1001 15:44:53.815456 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-ovn-metadata-agent-neutron-config" Oct 01 15:44:53 crc kubenswrapper[4869]: I1001 15:44:53.834324 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-bgt6n"] Oct 01 15:44:53 crc kubenswrapper[4869]: I1001 15:44:53.899188 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/acf87e0a-4aea-4c68-b46c-d5397c47e5b3-ceph\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-bgt6n\" (UID: \"acf87e0a-4aea-4c68-b46c-d5397c47e5b3\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-bgt6n" Oct 01 15:44:53 crc kubenswrapper[4869]: I1001 15:44:53.899286 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/acf87e0a-4aea-4c68-b46c-d5397c47e5b3-ssh-key\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-bgt6n\" (UID: \"acf87e0a-4aea-4c68-b46c-d5397c47e5b3\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-bgt6n" Oct 01 15:44:53 crc kubenswrapper[4869]: I1001 15:44:53.899357 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-metadata-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/acf87e0a-4aea-4c68-b46c-d5397c47e5b3-nova-metadata-neutron-config-0\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-bgt6n\" (UID: \"acf87e0a-4aea-4c68-b46c-d5397c47e5b3\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-bgt6n" Oct 01 15:44:53 crc kubenswrapper[4869]: I1001 15:44:53.899459 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5kdjc\" (UniqueName: \"kubernetes.io/projected/acf87e0a-4aea-4c68-b46c-d5397c47e5b3-kube-api-access-5kdjc\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-bgt6n\" (UID: \"acf87e0a-4aea-4c68-b46c-d5397c47e5b3\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-bgt6n" Oct 01 15:44:53 crc kubenswrapper[4869]: I1001 15:44:53.899498 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/acf87e0a-4aea-4c68-b46c-d5397c47e5b3-inventory\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-bgt6n\" (UID: \"acf87e0a-4aea-4c68-b46c-d5397c47e5b3\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-bgt6n" Oct 01 15:44:53 crc kubenswrapper[4869]: I1001 15:44:53.899583 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/acf87e0a-4aea-4c68-b46c-d5397c47e5b3-neutron-metadata-combined-ca-bundle\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-bgt6n\" (UID: \"acf87e0a-4aea-4c68-b46c-d5397c47e5b3\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-bgt6n" Oct 01 15:44:53 crc kubenswrapper[4869]: I1001 15:44:53.899607 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"neutron-ovn-metadata-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/acf87e0a-4aea-4c68-b46c-d5397c47e5b3-neutron-ovn-metadata-agent-neutron-config-0\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-bgt6n\" (UID: \"acf87e0a-4aea-4c68-b46c-d5397c47e5b3\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-bgt6n" Oct 01 15:44:54 crc kubenswrapper[4869]: I1001 15:44:54.001169 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/acf87e0a-4aea-4c68-b46c-d5397c47e5b3-ssh-key\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-bgt6n\" (UID: \"acf87e0a-4aea-4c68-b46c-d5397c47e5b3\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-bgt6n" Oct 01 15:44:54 crc kubenswrapper[4869]: I1001 15:44:54.001283 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-metadata-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/acf87e0a-4aea-4c68-b46c-d5397c47e5b3-nova-metadata-neutron-config-0\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-bgt6n\" (UID: \"acf87e0a-4aea-4c68-b46c-d5397c47e5b3\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-bgt6n" Oct 01 15:44:54 crc kubenswrapper[4869]: I1001 15:44:54.001365 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5kdjc\" (UniqueName: \"kubernetes.io/projected/acf87e0a-4aea-4c68-b46c-d5397c47e5b3-kube-api-access-5kdjc\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-bgt6n\" (UID: \"acf87e0a-4aea-4c68-b46c-d5397c47e5b3\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-bgt6n" Oct 01 15:44:54 crc kubenswrapper[4869]: I1001 15:44:54.001402 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/acf87e0a-4aea-4c68-b46c-d5397c47e5b3-inventory\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-bgt6n\" (UID: \"acf87e0a-4aea-4c68-b46c-d5397c47e5b3\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-bgt6n" Oct 01 15:44:54 crc kubenswrapper[4869]: I1001 15:44:54.001491 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/acf87e0a-4aea-4c68-b46c-d5397c47e5b3-neutron-metadata-combined-ca-bundle\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-bgt6n\" (UID: \"acf87e0a-4aea-4c68-b46c-d5397c47e5b3\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-bgt6n" Oct 01 15:44:54 crc kubenswrapper[4869]: I1001 15:44:54.001527 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"neutron-ovn-metadata-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/acf87e0a-4aea-4c68-b46c-d5397c47e5b3-neutron-ovn-metadata-agent-neutron-config-0\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-bgt6n\" (UID: \"acf87e0a-4aea-4c68-b46c-d5397c47e5b3\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-bgt6n" Oct 01 15:44:54 crc kubenswrapper[4869]: I1001 15:44:54.001612 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/acf87e0a-4aea-4c68-b46c-d5397c47e5b3-ceph\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-bgt6n\" (UID: \"acf87e0a-4aea-4c68-b46c-d5397c47e5b3\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-bgt6n" Oct 01 15:44:54 crc kubenswrapper[4869]: I1001 15:44:54.005877 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-metadata-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/acf87e0a-4aea-4c68-b46c-d5397c47e5b3-nova-metadata-neutron-config-0\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-bgt6n\" (UID: \"acf87e0a-4aea-4c68-b46c-d5397c47e5b3\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-bgt6n" Oct 01 15:44:54 crc kubenswrapper[4869]: I1001 15:44:54.005907 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/acf87e0a-4aea-4c68-b46c-d5397c47e5b3-inventory\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-bgt6n\" (UID: \"acf87e0a-4aea-4c68-b46c-d5397c47e5b3\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-bgt6n" Oct 01 15:44:54 crc kubenswrapper[4869]: I1001 15:44:54.006179 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/acf87e0a-4aea-4c68-b46c-d5397c47e5b3-ssh-key\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-bgt6n\" (UID: \"acf87e0a-4aea-4c68-b46c-d5397c47e5b3\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-bgt6n" Oct 01 15:44:54 crc kubenswrapper[4869]: I1001 15:44:54.007575 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/acf87e0a-4aea-4c68-b46c-d5397c47e5b3-ceph\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-bgt6n\" (UID: \"acf87e0a-4aea-4c68-b46c-d5397c47e5b3\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-bgt6n" Oct 01 15:44:54 crc kubenswrapper[4869]: I1001 15:44:54.007626 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/acf87e0a-4aea-4c68-b46c-d5397c47e5b3-neutron-metadata-combined-ca-bundle\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-bgt6n\" (UID: \"acf87e0a-4aea-4c68-b46c-d5397c47e5b3\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-bgt6n" Oct 01 15:44:54 crc kubenswrapper[4869]: I1001 15:44:54.009027 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"neutron-ovn-metadata-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/acf87e0a-4aea-4c68-b46c-d5397c47e5b3-neutron-ovn-metadata-agent-neutron-config-0\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-bgt6n\" (UID: \"acf87e0a-4aea-4c68-b46c-d5397c47e5b3\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-bgt6n" Oct 01 15:44:54 crc kubenswrapper[4869]: I1001 15:44:54.027877 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5kdjc\" (UniqueName: \"kubernetes.io/projected/acf87e0a-4aea-4c68-b46c-d5397c47e5b3-kube-api-access-5kdjc\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-bgt6n\" (UID: \"acf87e0a-4aea-4c68-b46c-d5397c47e5b3\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-bgt6n" Oct 01 15:44:54 crc kubenswrapper[4869]: I1001 15:44:54.137982 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-bgt6n" Oct 01 15:44:54 crc kubenswrapper[4869]: I1001 15:44:54.582244 4869 scope.go:117] "RemoveContainer" containerID="850fa9657d55ab61ed48a042dba1eb165240f62a294e8f87d32c9a3206247a54" Oct 01 15:44:54 crc kubenswrapper[4869]: E1001 15:44:54.583289 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-c86m8_openshift-machine-config-operator(a4b64f7f-0b03-4f47-965b-9fde048b735c)\"" pod="openshift-machine-config-operator/machine-config-daemon-c86m8" podUID="a4b64f7f-0b03-4f47-965b-9fde048b735c" Oct 01 15:44:54 crc kubenswrapper[4869]: I1001 15:44:54.682109 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-bgt6n"] Oct 01 15:44:55 crc kubenswrapper[4869]: I1001 15:44:55.673366 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-bgt6n" event={"ID":"acf87e0a-4aea-4c68-b46c-d5397c47e5b3","Type":"ContainerStarted","Data":"1119f0b93def0702c5c477d9c4d1092d0cf1257e149d0d80f8d21fe290b848ef"} Oct 01 15:44:55 crc kubenswrapper[4869]: I1001 15:44:55.673617 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-bgt6n" event={"ID":"acf87e0a-4aea-4c68-b46c-d5397c47e5b3","Type":"ContainerStarted","Data":"2866dc8911cb33488902e157f2bfdf9d909de386102b0222d56bd66cc3cdcd12"} Oct 01 15:45:00 crc kubenswrapper[4869]: I1001 15:45:00.140854 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-bgt6n" podStartSLOduration=6.593365895 podStartE2EDuration="7.14081488s" podCreationTimestamp="2025-10-01 15:44:53 +0000 UTC" firstStartedPulling="2025-10-01 15:44:54.68948148 +0000 UTC m=+2403.836324636" lastFinishedPulling="2025-10-01 15:44:55.236930475 +0000 UTC m=+2404.383773621" observedRunningTime="2025-10-01 15:44:55.691811118 +0000 UTC m=+2404.838654284" watchObservedRunningTime="2025-10-01 15:45:00.14081488 +0000 UTC m=+2409.287658076" Oct 01 15:45:00 crc kubenswrapper[4869]: I1001 15:45:00.148307 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29322225-2hwr8"] Oct 01 15:45:00 crc kubenswrapper[4869]: I1001 15:45:00.149931 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29322225-2hwr8" Oct 01 15:45:00 crc kubenswrapper[4869]: I1001 15:45:00.152656 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Oct 01 15:45:00 crc kubenswrapper[4869]: I1001 15:45:00.153688 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Oct 01 15:45:00 crc kubenswrapper[4869]: I1001 15:45:00.157577 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29322225-2hwr8"] Oct 01 15:45:00 crc kubenswrapper[4869]: I1001 15:45:00.236380 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/3d379718-8ee6-4e42-8d61-01f74beb9e0a-secret-volume\") pod \"collect-profiles-29322225-2hwr8\" (UID: \"3d379718-8ee6-4e42-8d61-01f74beb9e0a\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29322225-2hwr8" Oct 01 15:45:00 crc kubenswrapper[4869]: I1001 15:45:00.236470 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xjkcp\" (UniqueName: \"kubernetes.io/projected/3d379718-8ee6-4e42-8d61-01f74beb9e0a-kube-api-access-xjkcp\") pod \"collect-profiles-29322225-2hwr8\" (UID: \"3d379718-8ee6-4e42-8d61-01f74beb9e0a\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29322225-2hwr8" Oct 01 15:45:00 crc kubenswrapper[4869]: I1001 15:45:00.236520 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/3d379718-8ee6-4e42-8d61-01f74beb9e0a-config-volume\") pod \"collect-profiles-29322225-2hwr8\" (UID: \"3d379718-8ee6-4e42-8d61-01f74beb9e0a\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29322225-2hwr8" Oct 01 15:45:00 crc kubenswrapper[4869]: I1001 15:45:00.338544 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/3d379718-8ee6-4e42-8d61-01f74beb9e0a-secret-volume\") pod \"collect-profiles-29322225-2hwr8\" (UID: \"3d379718-8ee6-4e42-8d61-01f74beb9e0a\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29322225-2hwr8" Oct 01 15:45:00 crc kubenswrapper[4869]: I1001 15:45:00.338613 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xjkcp\" (UniqueName: \"kubernetes.io/projected/3d379718-8ee6-4e42-8d61-01f74beb9e0a-kube-api-access-xjkcp\") pod \"collect-profiles-29322225-2hwr8\" (UID: \"3d379718-8ee6-4e42-8d61-01f74beb9e0a\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29322225-2hwr8" Oct 01 15:45:00 crc kubenswrapper[4869]: I1001 15:45:00.338651 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/3d379718-8ee6-4e42-8d61-01f74beb9e0a-config-volume\") pod \"collect-profiles-29322225-2hwr8\" (UID: \"3d379718-8ee6-4e42-8d61-01f74beb9e0a\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29322225-2hwr8" Oct 01 15:45:00 crc kubenswrapper[4869]: I1001 15:45:00.339539 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/3d379718-8ee6-4e42-8d61-01f74beb9e0a-config-volume\") pod \"collect-profiles-29322225-2hwr8\" (UID: \"3d379718-8ee6-4e42-8d61-01f74beb9e0a\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29322225-2hwr8" Oct 01 15:45:00 crc kubenswrapper[4869]: I1001 15:45:00.355663 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/3d379718-8ee6-4e42-8d61-01f74beb9e0a-secret-volume\") pod \"collect-profiles-29322225-2hwr8\" (UID: \"3d379718-8ee6-4e42-8d61-01f74beb9e0a\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29322225-2hwr8" Oct 01 15:45:00 crc kubenswrapper[4869]: I1001 15:45:00.357596 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xjkcp\" (UniqueName: \"kubernetes.io/projected/3d379718-8ee6-4e42-8d61-01f74beb9e0a-kube-api-access-xjkcp\") pod \"collect-profiles-29322225-2hwr8\" (UID: \"3d379718-8ee6-4e42-8d61-01f74beb9e0a\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29322225-2hwr8" Oct 01 15:45:00 crc kubenswrapper[4869]: I1001 15:45:00.485152 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29322225-2hwr8" Oct 01 15:45:00 crc kubenswrapper[4869]: I1001 15:45:00.995761 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29322225-2hwr8"] Oct 01 15:45:01 crc kubenswrapper[4869]: I1001 15:45:01.744349 4869 generic.go:334] "Generic (PLEG): container finished" podID="3d379718-8ee6-4e42-8d61-01f74beb9e0a" containerID="93cb852400d436873f3210fc6f8f028e1ab9f3f05ad5d495f8c0fc8fdad210e7" exitCode=0 Oct 01 15:45:01 crc kubenswrapper[4869]: I1001 15:45:01.744420 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29322225-2hwr8" event={"ID":"3d379718-8ee6-4e42-8d61-01f74beb9e0a","Type":"ContainerDied","Data":"93cb852400d436873f3210fc6f8f028e1ab9f3f05ad5d495f8c0fc8fdad210e7"} Oct 01 15:45:01 crc kubenswrapper[4869]: I1001 15:45:01.744631 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29322225-2hwr8" event={"ID":"3d379718-8ee6-4e42-8d61-01f74beb9e0a","Type":"ContainerStarted","Data":"0c8b2d7e314837cd5c83d2d956955d744844fbf95a5fed92594e0b17b70ff722"} Oct 01 15:45:03 crc kubenswrapper[4869]: I1001 15:45:03.085428 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29322225-2hwr8" Oct 01 15:45:03 crc kubenswrapper[4869]: I1001 15:45:03.188719 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/3d379718-8ee6-4e42-8d61-01f74beb9e0a-secret-volume\") pod \"3d379718-8ee6-4e42-8d61-01f74beb9e0a\" (UID: \"3d379718-8ee6-4e42-8d61-01f74beb9e0a\") " Oct 01 15:45:03 crc kubenswrapper[4869]: I1001 15:45:03.189056 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/3d379718-8ee6-4e42-8d61-01f74beb9e0a-config-volume\") pod \"3d379718-8ee6-4e42-8d61-01f74beb9e0a\" (UID: \"3d379718-8ee6-4e42-8d61-01f74beb9e0a\") " Oct 01 15:45:03 crc kubenswrapper[4869]: I1001 15:45:03.189112 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xjkcp\" (UniqueName: \"kubernetes.io/projected/3d379718-8ee6-4e42-8d61-01f74beb9e0a-kube-api-access-xjkcp\") pod \"3d379718-8ee6-4e42-8d61-01f74beb9e0a\" (UID: \"3d379718-8ee6-4e42-8d61-01f74beb9e0a\") " Oct 01 15:45:03 crc kubenswrapper[4869]: I1001 15:45:03.189643 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3d379718-8ee6-4e42-8d61-01f74beb9e0a-config-volume" (OuterVolumeSpecName: "config-volume") pod "3d379718-8ee6-4e42-8d61-01f74beb9e0a" (UID: "3d379718-8ee6-4e42-8d61-01f74beb9e0a"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 15:45:03 crc kubenswrapper[4869]: I1001 15:45:03.195055 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3d379718-8ee6-4e42-8d61-01f74beb9e0a-kube-api-access-xjkcp" (OuterVolumeSpecName: "kube-api-access-xjkcp") pod "3d379718-8ee6-4e42-8d61-01f74beb9e0a" (UID: "3d379718-8ee6-4e42-8d61-01f74beb9e0a"). InnerVolumeSpecName "kube-api-access-xjkcp". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 15:45:03 crc kubenswrapper[4869]: I1001 15:45:03.195680 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3d379718-8ee6-4e42-8d61-01f74beb9e0a-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "3d379718-8ee6-4e42-8d61-01f74beb9e0a" (UID: "3d379718-8ee6-4e42-8d61-01f74beb9e0a"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 15:45:03 crc kubenswrapper[4869]: I1001 15:45:03.292188 4869 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/3d379718-8ee6-4e42-8d61-01f74beb9e0a-secret-volume\") on node \"crc\" DevicePath \"\"" Oct 01 15:45:03 crc kubenswrapper[4869]: I1001 15:45:03.292245 4869 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/3d379718-8ee6-4e42-8d61-01f74beb9e0a-config-volume\") on node \"crc\" DevicePath \"\"" Oct 01 15:45:03 crc kubenswrapper[4869]: I1001 15:45:03.292288 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xjkcp\" (UniqueName: \"kubernetes.io/projected/3d379718-8ee6-4e42-8d61-01f74beb9e0a-kube-api-access-xjkcp\") on node \"crc\" DevicePath \"\"" Oct 01 15:45:03 crc kubenswrapper[4869]: I1001 15:45:03.767287 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29322225-2hwr8" event={"ID":"3d379718-8ee6-4e42-8d61-01f74beb9e0a","Type":"ContainerDied","Data":"0c8b2d7e314837cd5c83d2d956955d744844fbf95a5fed92594e0b17b70ff722"} Oct 01 15:45:03 crc kubenswrapper[4869]: I1001 15:45:03.767322 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29322225-2hwr8" Oct 01 15:45:03 crc kubenswrapper[4869]: I1001 15:45:03.767332 4869 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="0c8b2d7e314837cd5c83d2d956955d744844fbf95a5fed92594e0b17b70ff722" Oct 01 15:45:04 crc kubenswrapper[4869]: I1001 15:45:04.147632 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29322180-8fxq6"] Oct 01 15:45:04 crc kubenswrapper[4869]: I1001 15:45:04.154396 4869 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29322180-8fxq6"] Oct 01 15:45:05 crc kubenswrapper[4869]: I1001 15:45:05.613460 4869 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4c1dcd57-cee2-45ab-9a92-8cdd8b864f98" path="/var/lib/kubelet/pods/4c1dcd57-cee2-45ab-9a92-8cdd8b864f98/volumes" Oct 01 15:45:06 crc kubenswrapper[4869]: I1001 15:45:06.581897 4869 scope.go:117] "RemoveContainer" containerID="850fa9657d55ab61ed48a042dba1eb165240f62a294e8f87d32c9a3206247a54" Oct 01 15:45:06 crc kubenswrapper[4869]: E1001 15:45:06.582309 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-c86m8_openshift-machine-config-operator(a4b64f7f-0b03-4f47-965b-9fde048b735c)\"" pod="openshift-machine-config-operator/machine-config-daemon-c86m8" podUID="a4b64f7f-0b03-4f47-965b-9fde048b735c" Oct 01 15:45:15 crc kubenswrapper[4869]: I1001 15:45:15.005215 4869 scope.go:117] "RemoveContainer" containerID="b9697e7378c7c6e7287e428875324fbb361baef8c62aa101b17c958416a63273" Oct 01 15:45:19 crc kubenswrapper[4869]: I1001 15:45:19.582317 4869 scope.go:117] "RemoveContainer" containerID="850fa9657d55ab61ed48a042dba1eb165240f62a294e8f87d32c9a3206247a54" Oct 01 15:45:19 crc kubenswrapper[4869]: I1001 15:45:19.929987 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-c86m8" event={"ID":"a4b64f7f-0b03-4f47-965b-9fde048b735c","Type":"ContainerStarted","Data":"25093fbe92e6ae652a3af4c4b9e03e2282557f8fc51b27a9e7955934416a4f59"} Oct 01 15:45:58 crc kubenswrapper[4869]: I1001 15:45:58.329569 4869 generic.go:334] "Generic (PLEG): container finished" podID="acf87e0a-4aea-4c68-b46c-d5397c47e5b3" containerID="1119f0b93def0702c5c477d9c4d1092d0cf1257e149d0d80f8d21fe290b848ef" exitCode=0 Oct 01 15:45:58 crc kubenswrapper[4869]: I1001 15:45:58.329660 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-bgt6n" event={"ID":"acf87e0a-4aea-4c68-b46c-d5397c47e5b3","Type":"ContainerDied","Data":"1119f0b93def0702c5c477d9c4d1092d0cf1257e149d0d80f8d21fe290b848ef"} Oct 01 15:45:59 crc kubenswrapper[4869]: I1001 15:45:59.883945 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-bgt6n" Oct 01 15:45:59 crc kubenswrapper[4869]: I1001 15:45:59.971015 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/acf87e0a-4aea-4c68-b46c-d5397c47e5b3-ceph\") pod \"acf87e0a-4aea-4c68-b46c-d5397c47e5b3\" (UID: \"acf87e0a-4aea-4c68-b46c-d5397c47e5b3\") " Oct 01 15:45:59 crc kubenswrapper[4869]: I1001 15:45:59.971456 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-metadata-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/acf87e0a-4aea-4c68-b46c-d5397c47e5b3-nova-metadata-neutron-config-0\") pod \"acf87e0a-4aea-4c68-b46c-d5397c47e5b3\" (UID: \"acf87e0a-4aea-4c68-b46c-d5397c47e5b3\") " Oct 01 15:45:59 crc kubenswrapper[4869]: I1001 15:45:59.971511 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"neutron-ovn-metadata-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/acf87e0a-4aea-4c68-b46c-d5397c47e5b3-neutron-ovn-metadata-agent-neutron-config-0\") pod \"acf87e0a-4aea-4c68-b46c-d5397c47e5b3\" (UID: \"acf87e0a-4aea-4c68-b46c-d5397c47e5b3\") " Oct 01 15:45:59 crc kubenswrapper[4869]: I1001 15:45:59.971592 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/acf87e0a-4aea-4c68-b46c-d5397c47e5b3-neutron-metadata-combined-ca-bundle\") pod \"acf87e0a-4aea-4c68-b46c-d5397c47e5b3\" (UID: \"acf87e0a-4aea-4c68-b46c-d5397c47e5b3\") " Oct 01 15:45:59 crc kubenswrapper[4869]: I1001 15:45:59.971677 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/acf87e0a-4aea-4c68-b46c-d5397c47e5b3-ssh-key\") pod \"acf87e0a-4aea-4c68-b46c-d5397c47e5b3\" (UID: \"acf87e0a-4aea-4c68-b46c-d5397c47e5b3\") " Oct 01 15:45:59 crc kubenswrapper[4869]: I1001 15:45:59.971776 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/acf87e0a-4aea-4c68-b46c-d5397c47e5b3-inventory\") pod \"acf87e0a-4aea-4c68-b46c-d5397c47e5b3\" (UID: \"acf87e0a-4aea-4c68-b46c-d5397c47e5b3\") " Oct 01 15:45:59 crc kubenswrapper[4869]: I1001 15:45:59.971848 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5kdjc\" (UniqueName: \"kubernetes.io/projected/acf87e0a-4aea-4c68-b46c-d5397c47e5b3-kube-api-access-5kdjc\") pod \"acf87e0a-4aea-4c68-b46c-d5397c47e5b3\" (UID: \"acf87e0a-4aea-4c68-b46c-d5397c47e5b3\") " Oct 01 15:45:59 crc kubenswrapper[4869]: I1001 15:45:59.991150 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/acf87e0a-4aea-4c68-b46c-d5397c47e5b3-neutron-metadata-combined-ca-bundle" (OuterVolumeSpecName: "neutron-metadata-combined-ca-bundle") pod "acf87e0a-4aea-4c68-b46c-d5397c47e5b3" (UID: "acf87e0a-4aea-4c68-b46c-d5397c47e5b3"). InnerVolumeSpecName "neutron-metadata-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 15:45:59 crc kubenswrapper[4869]: I1001 15:45:59.995787 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/acf87e0a-4aea-4c68-b46c-d5397c47e5b3-ceph" (OuterVolumeSpecName: "ceph") pod "acf87e0a-4aea-4c68-b46c-d5397c47e5b3" (UID: "acf87e0a-4aea-4c68-b46c-d5397c47e5b3"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 15:45:59 crc kubenswrapper[4869]: I1001 15:45:59.997762 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/acf87e0a-4aea-4c68-b46c-d5397c47e5b3-kube-api-access-5kdjc" (OuterVolumeSpecName: "kube-api-access-5kdjc") pod "acf87e0a-4aea-4c68-b46c-d5397c47e5b3" (UID: "acf87e0a-4aea-4c68-b46c-d5397c47e5b3"). InnerVolumeSpecName "kube-api-access-5kdjc". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 15:46:00 crc kubenswrapper[4869]: I1001 15:46:00.009352 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/acf87e0a-4aea-4c68-b46c-d5397c47e5b3-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "acf87e0a-4aea-4c68-b46c-d5397c47e5b3" (UID: "acf87e0a-4aea-4c68-b46c-d5397c47e5b3"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 15:46:00 crc kubenswrapper[4869]: I1001 15:46:00.009769 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/acf87e0a-4aea-4c68-b46c-d5397c47e5b3-nova-metadata-neutron-config-0" (OuterVolumeSpecName: "nova-metadata-neutron-config-0") pod "acf87e0a-4aea-4c68-b46c-d5397c47e5b3" (UID: "acf87e0a-4aea-4c68-b46c-d5397c47e5b3"). InnerVolumeSpecName "nova-metadata-neutron-config-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 15:46:00 crc kubenswrapper[4869]: I1001 15:46:00.012873 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/acf87e0a-4aea-4c68-b46c-d5397c47e5b3-neutron-ovn-metadata-agent-neutron-config-0" (OuterVolumeSpecName: "neutron-ovn-metadata-agent-neutron-config-0") pod "acf87e0a-4aea-4c68-b46c-d5397c47e5b3" (UID: "acf87e0a-4aea-4c68-b46c-d5397c47e5b3"). InnerVolumeSpecName "neutron-ovn-metadata-agent-neutron-config-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 15:46:00 crc kubenswrapper[4869]: I1001 15:46:00.029490 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/acf87e0a-4aea-4c68-b46c-d5397c47e5b3-inventory" (OuterVolumeSpecName: "inventory") pod "acf87e0a-4aea-4c68-b46c-d5397c47e5b3" (UID: "acf87e0a-4aea-4c68-b46c-d5397c47e5b3"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 15:46:00 crc kubenswrapper[4869]: I1001 15:46:00.074516 4869 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/acf87e0a-4aea-4c68-b46c-d5397c47e5b3-ceph\") on node \"crc\" DevicePath \"\"" Oct 01 15:46:00 crc kubenswrapper[4869]: I1001 15:46:00.074553 4869 reconciler_common.go:293] "Volume detached for volume \"nova-metadata-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/acf87e0a-4aea-4c68-b46c-d5397c47e5b3-nova-metadata-neutron-config-0\") on node \"crc\" DevicePath \"\"" Oct 01 15:46:00 crc kubenswrapper[4869]: I1001 15:46:00.074566 4869 reconciler_common.go:293] "Volume detached for volume \"neutron-ovn-metadata-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/acf87e0a-4aea-4c68-b46c-d5397c47e5b3-neutron-ovn-metadata-agent-neutron-config-0\") on node \"crc\" DevicePath \"\"" Oct 01 15:46:00 crc kubenswrapper[4869]: I1001 15:46:00.074578 4869 reconciler_common.go:293] "Volume detached for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/acf87e0a-4aea-4c68-b46c-d5397c47e5b3-neutron-metadata-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 01 15:46:00 crc kubenswrapper[4869]: I1001 15:46:00.074588 4869 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/acf87e0a-4aea-4c68-b46c-d5397c47e5b3-ssh-key\") on node \"crc\" DevicePath \"\"" Oct 01 15:46:00 crc kubenswrapper[4869]: I1001 15:46:00.074596 4869 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/acf87e0a-4aea-4c68-b46c-d5397c47e5b3-inventory\") on node \"crc\" DevicePath \"\"" Oct 01 15:46:00 crc kubenswrapper[4869]: I1001 15:46:00.074605 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5kdjc\" (UniqueName: \"kubernetes.io/projected/acf87e0a-4aea-4c68-b46c-d5397c47e5b3-kube-api-access-5kdjc\") on node \"crc\" DevicePath \"\"" Oct 01 15:46:00 crc kubenswrapper[4869]: I1001 15:46:00.366895 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-bgt6n" event={"ID":"acf87e0a-4aea-4c68-b46c-d5397c47e5b3","Type":"ContainerDied","Data":"2866dc8911cb33488902e157f2bfdf9d909de386102b0222d56bd66cc3cdcd12"} Oct 01 15:46:00 crc kubenswrapper[4869]: I1001 15:46:00.366960 4869 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="2866dc8911cb33488902e157f2bfdf9d909de386102b0222d56bd66cc3cdcd12" Oct 01 15:46:00 crc kubenswrapper[4869]: I1001 15:46:00.367442 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-bgt6n" Oct 01 15:46:00 crc kubenswrapper[4869]: I1001 15:46:00.479785 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/libvirt-edpm-deployment-openstack-edpm-ipam-4m5m5"] Oct 01 15:46:00 crc kubenswrapper[4869]: E1001 15:46:00.480498 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="acf87e0a-4aea-4c68-b46c-d5397c47e5b3" containerName="neutron-metadata-edpm-deployment-openstack-edpm-ipam" Oct 01 15:46:00 crc kubenswrapper[4869]: I1001 15:46:00.480531 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="acf87e0a-4aea-4c68-b46c-d5397c47e5b3" containerName="neutron-metadata-edpm-deployment-openstack-edpm-ipam" Oct 01 15:46:00 crc kubenswrapper[4869]: E1001 15:46:00.480563 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3d379718-8ee6-4e42-8d61-01f74beb9e0a" containerName="collect-profiles" Oct 01 15:46:00 crc kubenswrapper[4869]: I1001 15:46:00.480577 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="3d379718-8ee6-4e42-8d61-01f74beb9e0a" containerName="collect-profiles" Oct 01 15:46:00 crc kubenswrapper[4869]: I1001 15:46:00.480953 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="acf87e0a-4aea-4c68-b46c-d5397c47e5b3" containerName="neutron-metadata-edpm-deployment-openstack-edpm-ipam" Oct 01 15:46:00 crc kubenswrapper[4869]: I1001 15:46:00.480990 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="3d379718-8ee6-4e42-8d61-01f74beb9e0a" containerName="collect-profiles" Oct 01 15:46:00 crc kubenswrapper[4869]: I1001 15:46:00.482343 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-4m5m5" Oct 01 15:46:00 crc kubenswrapper[4869]: I1001 15:46:00.488935 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Oct 01 15:46:00 crc kubenswrapper[4869]: I1001 15:46:00.489072 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Oct 01 15:46:00 crc kubenswrapper[4869]: I1001 15:46:00.489242 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"libvirt-secret" Oct 01 15:46:00 crc kubenswrapper[4869]: I1001 15:46:00.489412 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-cjg8g" Oct 01 15:46:00 crc kubenswrapper[4869]: I1001 15:46:00.492813 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceph-conf-files" Oct 01 15:46:00 crc kubenswrapper[4869]: I1001 15:46:00.492837 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Oct 01 15:46:00 crc kubenswrapper[4869]: I1001 15:46:00.495725 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/libvirt-edpm-deployment-openstack-edpm-ipam-4m5m5"] Oct 01 15:46:00 crc kubenswrapper[4869]: I1001 15:46:00.582340 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/49bff277-b5e6-4b61-b964-2a615ff1cf94-ssh-key\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-4m5m5\" (UID: \"49bff277-b5e6-4b61-b964-2a615ff1cf94\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-4m5m5" Oct 01 15:46:00 crc kubenswrapper[4869]: I1001 15:46:00.582453 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"libvirt-secret-0\" (UniqueName: \"kubernetes.io/secret/49bff277-b5e6-4b61-b964-2a615ff1cf94-libvirt-secret-0\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-4m5m5\" (UID: \"49bff277-b5e6-4b61-b964-2a615ff1cf94\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-4m5m5" Oct 01 15:46:00 crc kubenswrapper[4869]: I1001 15:46:00.582507 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/49bff277-b5e6-4b61-b964-2a615ff1cf94-libvirt-combined-ca-bundle\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-4m5m5\" (UID: \"49bff277-b5e6-4b61-b964-2a615ff1cf94\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-4m5m5" Oct 01 15:46:00 crc kubenswrapper[4869]: I1001 15:46:00.582534 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/49bff277-b5e6-4b61-b964-2a615ff1cf94-ceph\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-4m5m5\" (UID: \"49bff277-b5e6-4b61-b964-2a615ff1cf94\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-4m5m5" Oct 01 15:46:00 crc kubenswrapper[4869]: I1001 15:46:00.582604 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6vcpm\" (UniqueName: \"kubernetes.io/projected/49bff277-b5e6-4b61-b964-2a615ff1cf94-kube-api-access-6vcpm\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-4m5m5\" (UID: \"49bff277-b5e6-4b61-b964-2a615ff1cf94\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-4m5m5" Oct 01 15:46:00 crc kubenswrapper[4869]: I1001 15:46:00.582671 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/49bff277-b5e6-4b61-b964-2a615ff1cf94-inventory\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-4m5m5\" (UID: \"49bff277-b5e6-4b61-b964-2a615ff1cf94\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-4m5m5" Oct 01 15:46:00 crc kubenswrapper[4869]: I1001 15:46:00.684027 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/49bff277-b5e6-4b61-b964-2a615ff1cf94-inventory\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-4m5m5\" (UID: \"49bff277-b5e6-4b61-b964-2a615ff1cf94\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-4m5m5" Oct 01 15:46:00 crc kubenswrapper[4869]: I1001 15:46:00.684147 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/49bff277-b5e6-4b61-b964-2a615ff1cf94-ssh-key\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-4m5m5\" (UID: \"49bff277-b5e6-4b61-b964-2a615ff1cf94\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-4m5m5" Oct 01 15:46:00 crc kubenswrapper[4869]: I1001 15:46:00.685856 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"libvirt-secret-0\" (UniqueName: \"kubernetes.io/secret/49bff277-b5e6-4b61-b964-2a615ff1cf94-libvirt-secret-0\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-4m5m5\" (UID: \"49bff277-b5e6-4b61-b964-2a615ff1cf94\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-4m5m5" Oct 01 15:46:00 crc kubenswrapper[4869]: I1001 15:46:00.686100 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/49bff277-b5e6-4b61-b964-2a615ff1cf94-libvirt-combined-ca-bundle\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-4m5m5\" (UID: \"49bff277-b5e6-4b61-b964-2a615ff1cf94\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-4m5m5" Oct 01 15:46:00 crc kubenswrapper[4869]: I1001 15:46:00.686148 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/49bff277-b5e6-4b61-b964-2a615ff1cf94-ceph\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-4m5m5\" (UID: \"49bff277-b5e6-4b61-b964-2a615ff1cf94\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-4m5m5" Oct 01 15:46:00 crc kubenswrapper[4869]: I1001 15:46:00.686458 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6vcpm\" (UniqueName: \"kubernetes.io/projected/49bff277-b5e6-4b61-b964-2a615ff1cf94-kube-api-access-6vcpm\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-4m5m5\" (UID: \"49bff277-b5e6-4b61-b964-2a615ff1cf94\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-4m5m5" Oct 01 15:46:00 crc kubenswrapper[4869]: I1001 15:46:00.689662 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/49bff277-b5e6-4b61-b964-2a615ff1cf94-inventory\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-4m5m5\" (UID: \"49bff277-b5e6-4b61-b964-2a615ff1cf94\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-4m5m5" Oct 01 15:46:00 crc kubenswrapper[4869]: I1001 15:46:00.689807 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/49bff277-b5e6-4b61-b964-2a615ff1cf94-ssh-key\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-4m5m5\" (UID: \"49bff277-b5e6-4b61-b964-2a615ff1cf94\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-4m5m5" Oct 01 15:46:00 crc kubenswrapper[4869]: I1001 15:46:00.690279 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/49bff277-b5e6-4b61-b964-2a615ff1cf94-ceph\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-4m5m5\" (UID: \"49bff277-b5e6-4b61-b964-2a615ff1cf94\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-4m5m5" Oct 01 15:46:00 crc kubenswrapper[4869]: I1001 15:46:00.692684 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"libvirt-secret-0\" (UniqueName: \"kubernetes.io/secret/49bff277-b5e6-4b61-b964-2a615ff1cf94-libvirt-secret-0\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-4m5m5\" (UID: \"49bff277-b5e6-4b61-b964-2a615ff1cf94\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-4m5m5" Oct 01 15:46:00 crc kubenswrapper[4869]: I1001 15:46:00.692721 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/49bff277-b5e6-4b61-b964-2a615ff1cf94-libvirt-combined-ca-bundle\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-4m5m5\" (UID: \"49bff277-b5e6-4b61-b964-2a615ff1cf94\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-4m5m5" Oct 01 15:46:00 crc kubenswrapper[4869]: I1001 15:46:00.707865 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6vcpm\" (UniqueName: \"kubernetes.io/projected/49bff277-b5e6-4b61-b964-2a615ff1cf94-kube-api-access-6vcpm\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-4m5m5\" (UID: \"49bff277-b5e6-4b61-b964-2a615ff1cf94\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-4m5m5" Oct 01 15:46:00 crc kubenswrapper[4869]: I1001 15:46:00.804044 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-4m5m5" Oct 01 15:46:01 crc kubenswrapper[4869]: I1001 15:46:01.443407 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/libvirt-edpm-deployment-openstack-edpm-ipam-4m5m5"] Oct 01 15:46:02 crc kubenswrapper[4869]: I1001 15:46:02.394726 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-4m5m5" event={"ID":"49bff277-b5e6-4b61-b964-2a615ff1cf94","Type":"ContainerStarted","Data":"fcfd7f1880eeca543d7163a38f2d3f9eec6ba35e2f3ea6c388bf1fa5bd16c207"} Oct 01 15:46:02 crc kubenswrapper[4869]: I1001 15:46:02.395145 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-4m5m5" event={"ID":"49bff277-b5e6-4b61-b964-2a615ff1cf94","Type":"ContainerStarted","Data":"88d2c4dc3e01bf386918bfbe292619fa34f5527532362b0c57f39ddd726d8c8a"} Oct 01 15:46:02 crc kubenswrapper[4869]: I1001 15:46:02.422768 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-4m5m5" podStartSLOduration=1.877621669 podStartE2EDuration="2.422737307s" podCreationTimestamp="2025-10-01 15:46:00 +0000 UTC" firstStartedPulling="2025-10-01 15:46:01.453431638 +0000 UTC m=+2470.600274764" lastFinishedPulling="2025-10-01 15:46:01.998547246 +0000 UTC m=+2471.145390402" observedRunningTime="2025-10-01 15:46:02.416732981 +0000 UTC m=+2471.563576167" watchObservedRunningTime="2025-10-01 15:46:02.422737307 +0000 UTC m=+2471.569580463" Oct 01 15:47:43 crc kubenswrapper[4869]: I1001 15:47:43.354526 4869 patch_prober.go:28] interesting pod/machine-config-daemon-c86m8 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 01 15:47:43 crc kubenswrapper[4869]: I1001 15:47:43.355225 4869 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-c86m8" podUID="a4b64f7f-0b03-4f47-965b-9fde048b735c" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 01 15:48:03 crc kubenswrapper[4869]: I1001 15:48:03.024522 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-hv6mg"] Oct 01 15:48:03 crc kubenswrapper[4869]: I1001 15:48:03.067634 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-hv6mg"] Oct 01 15:48:03 crc kubenswrapper[4869]: I1001 15:48:03.067757 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-hv6mg" Oct 01 15:48:03 crc kubenswrapper[4869]: I1001 15:48:03.088647 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a0692fb9-73dc-42bd-bc5a-63023b230217-utilities\") pod \"community-operators-hv6mg\" (UID: \"a0692fb9-73dc-42bd-bc5a-63023b230217\") " pod="openshift-marketplace/community-operators-hv6mg" Oct 01 15:48:03 crc kubenswrapper[4869]: I1001 15:48:03.088985 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a0692fb9-73dc-42bd-bc5a-63023b230217-catalog-content\") pod \"community-operators-hv6mg\" (UID: \"a0692fb9-73dc-42bd-bc5a-63023b230217\") " pod="openshift-marketplace/community-operators-hv6mg" Oct 01 15:48:03 crc kubenswrapper[4869]: I1001 15:48:03.089053 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bchgr\" (UniqueName: \"kubernetes.io/projected/a0692fb9-73dc-42bd-bc5a-63023b230217-kube-api-access-bchgr\") pod \"community-operators-hv6mg\" (UID: \"a0692fb9-73dc-42bd-bc5a-63023b230217\") " pod="openshift-marketplace/community-operators-hv6mg" Oct 01 15:48:03 crc kubenswrapper[4869]: I1001 15:48:03.191222 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a0692fb9-73dc-42bd-bc5a-63023b230217-utilities\") pod \"community-operators-hv6mg\" (UID: \"a0692fb9-73dc-42bd-bc5a-63023b230217\") " pod="openshift-marketplace/community-operators-hv6mg" Oct 01 15:48:03 crc kubenswrapper[4869]: I1001 15:48:03.191317 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a0692fb9-73dc-42bd-bc5a-63023b230217-catalog-content\") pod \"community-operators-hv6mg\" (UID: \"a0692fb9-73dc-42bd-bc5a-63023b230217\") " pod="openshift-marketplace/community-operators-hv6mg" Oct 01 15:48:03 crc kubenswrapper[4869]: I1001 15:48:03.191398 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bchgr\" (UniqueName: \"kubernetes.io/projected/a0692fb9-73dc-42bd-bc5a-63023b230217-kube-api-access-bchgr\") pod \"community-operators-hv6mg\" (UID: \"a0692fb9-73dc-42bd-bc5a-63023b230217\") " pod="openshift-marketplace/community-operators-hv6mg" Oct 01 15:48:03 crc kubenswrapper[4869]: I1001 15:48:03.191842 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a0692fb9-73dc-42bd-bc5a-63023b230217-catalog-content\") pod \"community-operators-hv6mg\" (UID: \"a0692fb9-73dc-42bd-bc5a-63023b230217\") " pod="openshift-marketplace/community-operators-hv6mg" Oct 01 15:48:03 crc kubenswrapper[4869]: I1001 15:48:03.191856 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a0692fb9-73dc-42bd-bc5a-63023b230217-utilities\") pod \"community-operators-hv6mg\" (UID: \"a0692fb9-73dc-42bd-bc5a-63023b230217\") " pod="openshift-marketplace/community-operators-hv6mg" Oct 01 15:48:03 crc kubenswrapper[4869]: I1001 15:48:03.212008 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bchgr\" (UniqueName: \"kubernetes.io/projected/a0692fb9-73dc-42bd-bc5a-63023b230217-kube-api-access-bchgr\") pod \"community-operators-hv6mg\" (UID: \"a0692fb9-73dc-42bd-bc5a-63023b230217\") " pod="openshift-marketplace/community-operators-hv6mg" Oct 01 15:48:03 crc kubenswrapper[4869]: I1001 15:48:03.392778 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-hv6mg" Oct 01 15:48:03 crc kubenswrapper[4869]: I1001 15:48:03.882407 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-hv6mg"] Oct 01 15:48:04 crc kubenswrapper[4869]: I1001 15:48:04.668871 4869 generic.go:334] "Generic (PLEG): container finished" podID="a0692fb9-73dc-42bd-bc5a-63023b230217" containerID="5ec947588ad8376d2cfce092d29d01d329991634cdead61978677aa76f7b2ed8" exitCode=0 Oct 01 15:48:04 crc kubenswrapper[4869]: I1001 15:48:04.669408 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-hv6mg" event={"ID":"a0692fb9-73dc-42bd-bc5a-63023b230217","Type":"ContainerDied","Data":"5ec947588ad8376d2cfce092d29d01d329991634cdead61978677aa76f7b2ed8"} Oct 01 15:48:04 crc kubenswrapper[4869]: I1001 15:48:04.669470 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-hv6mg" event={"ID":"a0692fb9-73dc-42bd-bc5a-63023b230217","Type":"ContainerStarted","Data":"a568a47e2080551b8a5292292b1e4656b823f346ab4e1b39e8244aa83ce91abf"} Oct 01 15:48:04 crc kubenswrapper[4869]: I1001 15:48:04.671818 4869 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Oct 01 15:48:05 crc kubenswrapper[4869]: I1001 15:48:05.686032 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-hv6mg" event={"ID":"a0692fb9-73dc-42bd-bc5a-63023b230217","Type":"ContainerStarted","Data":"5bc592911a9a2083e4d7aa232af65688dcf44a704523aa957405fc4eb9d73ae4"} Oct 01 15:48:06 crc kubenswrapper[4869]: I1001 15:48:06.700988 4869 generic.go:334] "Generic (PLEG): container finished" podID="a0692fb9-73dc-42bd-bc5a-63023b230217" containerID="5bc592911a9a2083e4d7aa232af65688dcf44a704523aa957405fc4eb9d73ae4" exitCode=0 Oct 01 15:48:06 crc kubenswrapper[4869]: I1001 15:48:06.701057 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-hv6mg" event={"ID":"a0692fb9-73dc-42bd-bc5a-63023b230217","Type":"ContainerDied","Data":"5bc592911a9a2083e4d7aa232af65688dcf44a704523aa957405fc4eb9d73ae4"} Oct 01 15:48:08 crc kubenswrapper[4869]: I1001 15:48:08.734650 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-hv6mg" event={"ID":"a0692fb9-73dc-42bd-bc5a-63023b230217","Type":"ContainerStarted","Data":"03332bfc948304e531879dc2d8e3e2ae118b46beb164ac46ad6a0227a44c172e"} Oct 01 15:48:08 crc kubenswrapper[4869]: I1001 15:48:08.757296 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-hv6mg" podStartSLOduration=2.866748801 podStartE2EDuration="5.757249413s" podCreationTimestamp="2025-10-01 15:48:03 +0000 UTC" firstStartedPulling="2025-10-01 15:48:04.671601387 +0000 UTC m=+2593.818444493" lastFinishedPulling="2025-10-01 15:48:07.562101989 +0000 UTC m=+2596.708945105" observedRunningTime="2025-10-01 15:48:08.749561249 +0000 UTC m=+2597.896404375" watchObservedRunningTime="2025-10-01 15:48:08.757249413 +0000 UTC m=+2597.904092569" Oct 01 15:48:13 crc kubenswrapper[4869]: I1001 15:48:13.354781 4869 patch_prober.go:28] interesting pod/machine-config-daemon-c86m8 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 01 15:48:13 crc kubenswrapper[4869]: I1001 15:48:13.355350 4869 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-c86m8" podUID="a4b64f7f-0b03-4f47-965b-9fde048b735c" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 01 15:48:13 crc kubenswrapper[4869]: I1001 15:48:13.393926 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-hv6mg" Oct 01 15:48:13 crc kubenswrapper[4869]: I1001 15:48:13.393992 4869 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-hv6mg" Oct 01 15:48:13 crc kubenswrapper[4869]: I1001 15:48:13.444861 4869 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-hv6mg" Oct 01 15:48:13 crc kubenswrapper[4869]: I1001 15:48:13.873470 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-hv6mg" Oct 01 15:48:13 crc kubenswrapper[4869]: I1001 15:48:13.955118 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-hv6mg"] Oct 01 15:48:15 crc kubenswrapper[4869]: I1001 15:48:15.815285 4869 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-hv6mg" podUID="a0692fb9-73dc-42bd-bc5a-63023b230217" containerName="registry-server" containerID="cri-o://03332bfc948304e531879dc2d8e3e2ae118b46beb164ac46ad6a0227a44c172e" gracePeriod=2 Oct 01 15:48:16 crc kubenswrapper[4869]: I1001 15:48:16.325127 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-hv6mg" Oct 01 15:48:16 crc kubenswrapper[4869]: I1001 15:48:16.481629 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a0692fb9-73dc-42bd-bc5a-63023b230217-utilities\") pod \"a0692fb9-73dc-42bd-bc5a-63023b230217\" (UID: \"a0692fb9-73dc-42bd-bc5a-63023b230217\") " Oct 01 15:48:16 crc kubenswrapper[4869]: I1001 15:48:16.481824 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bchgr\" (UniqueName: \"kubernetes.io/projected/a0692fb9-73dc-42bd-bc5a-63023b230217-kube-api-access-bchgr\") pod \"a0692fb9-73dc-42bd-bc5a-63023b230217\" (UID: \"a0692fb9-73dc-42bd-bc5a-63023b230217\") " Oct 01 15:48:16 crc kubenswrapper[4869]: I1001 15:48:16.481929 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a0692fb9-73dc-42bd-bc5a-63023b230217-catalog-content\") pod \"a0692fb9-73dc-42bd-bc5a-63023b230217\" (UID: \"a0692fb9-73dc-42bd-bc5a-63023b230217\") " Oct 01 15:48:16 crc kubenswrapper[4869]: I1001 15:48:16.483346 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a0692fb9-73dc-42bd-bc5a-63023b230217-utilities" (OuterVolumeSpecName: "utilities") pod "a0692fb9-73dc-42bd-bc5a-63023b230217" (UID: "a0692fb9-73dc-42bd-bc5a-63023b230217"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 15:48:16 crc kubenswrapper[4869]: I1001 15:48:16.492342 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a0692fb9-73dc-42bd-bc5a-63023b230217-kube-api-access-bchgr" (OuterVolumeSpecName: "kube-api-access-bchgr") pod "a0692fb9-73dc-42bd-bc5a-63023b230217" (UID: "a0692fb9-73dc-42bd-bc5a-63023b230217"). InnerVolumeSpecName "kube-api-access-bchgr". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 15:48:16 crc kubenswrapper[4869]: I1001 15:48:16.546781 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a0692fb9-73dc-42bd-bc5a-63023b230217-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "a0692fb9-73dc-42bd-bc5a-63023b230217" (UID: "a0692fb9-73dc-42bd-bc5a-63023b230217"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 15:48:16 crc kubenswrapper[4869]: I1001 15:48:16.584423 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bchgr\" (UniqueName: \"kubernetes.io/projected/a0692fb9-73dc-42bd-bc5a-63023b230217-kube-api-access-bchgr\") on node \"crc\" DevicePath \"\"" Oct 01 15:48:16 crc kubenswrapper[4869]: I1001 15:48:16.584474 4869 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a0692fb9-73dc-42bd-bc5a-63023b230217-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 01 15:48:16 crc kubenswrapper[4869]: I1001 15:48:16.584494 4869 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a0692fb9-73dc-42bd-bc5a-63023b230217-utilities\") on node \"crc\" DevicePath \"\"" Oct 01 15:48:16 crc kubenswrapper[4869]: I1001 15:48:16.827029 4869 generic.go:334] "Generic (PLEG): container finished" podID="a0692fb9-73dc-42bd-bc5a-63023b230217" containerID="03332bfc948304e531879dc2d8e3e2ae118b46beb164ac46ad6a0227a44c172e" exitCode=0 Oct 01 15:48:16 crc kubenswrapper[4869]: I1001 15:48:16.827082 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-hv6mg" event={"ID":"a0692fb9-73dc-42bd-bc5a-63023b230217","Type":"ContainerDied","Data":"03332bfc948304e531879dc2d8e3e2ae118b46beb164ac46ad6a0227a44c172e"} Oct 01 15:48:16 crc kubenswrapper[4869]: I1001 15:48:16.827128 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-hv6mg" event={"ID":"a0692fb9-73dc-42bd-bc5a-63023b230217","Type":"ContainerDied","Data":"a568a47e2080551b8a5292292b1e4656b823f346ab4e1b39e8244aa83ce91abf"} Oct 01 15:48:16 crc kubenswrapper[4869]: I1001 15:48:16.827137 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-hv6mg" Oct 01 15:48:16 crc kubenswrapper[4869]: I1001 15:48:16.827152 4869 scope.go:117] "RemoveContainer" containerID="03332bfc948304e531879dc2d8e3e2ae118b46beb164ac46ad6a0227a44c172e" Oct 01 15:48:16 crc kubenswrapper[4869]: I1001 15:48:16.855724 4869 scope.go:117] "RemoveContainer" containerID="5bc592911a9a2083e4d7aa232af65688dcf44a704523aa957405fc4eb9d73ae4" Oct 01 15:48:16 crc kubenswrapper[4869]: I1001 15:48:16.896650 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-hv6mg"] Oct 01 15:48:16 crc kubenswrapper[4869]: I1001 15:48:16.897151 4869 scope.go:117] "RemoveContainer" containerID="5ec947588ad8376d2cfce092d29d01d329991634cdead61978677aa76f7b2ed8" Oct 01 15:48:16 crc kubenswrapper[4869]: I1001 15:48:16.906078 4869 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-hv6mg"] Oct 01 15:48:16 crc kubenswrapper[4869]: I1001 15:48:16.968744 4869 scope.go:117] "RemoveContainer" containerID="03332bfc948304e531879dc2d8e3e2ae118b46beb164ac46ad6a0227a44c172e" Oct 01 15:48:16 crc kubenswrapper[4869]: E1001 15:48:16.969251 4869 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"03332bfc948304e531879dc2d8e3e2ae118b46beb164ac46ad6a0227a44c172e\": container with ID starting with 03332bfc948304e531879dc2d8e3e2ae118b46beb164ac46ad6a0227a44c172e not found: ID does not exist" containerID="03332bfc948304e531879dc2d8e3e2ae118b46beb164ac46ad6a0227a44c172e" Oct 01 15:48:16 crc kubenswrapper[4869]: I1001 15:48:16.969379 4869 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"03332bfc948304e531879dc2d8e3e2ae118b46beb164ac46ad6a0227a44c172e"} err="failed to get container status \"03332bfc948304e531879dc2d8e3e2ae118b46beb164ac46ad6a0227a44c172e\": rpc error: code = NotFound desc = could not find container \"03332bfc948304e531879dc2d8e3e2ae118b46beb164ac46ad6a0227a44c172e\": container with ID starting with 03332bfc948304e531879dc2d8e3e2ae118b46beb164ac46ad6a0227a44c172e not found: ID does not exist" Oct 01 15:48:16 crc kubenswrapper[4869]: I1001 15:48:16.969415 4869 scope.go:117] "RemoveContainer" containerID="5bc592911a9a2083e4d7aa232af65688dcf44a704523aa957405fc4eb9d73ae4" Oct 01 15:48:16 crc kubenswrapper[4869]: E1001 15:48:16.969944 4869 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5bc592911a9a2083e4d7aa232af65688dcf44a704523aa957405fc4eb9d73ae4\": container with ID starting with 5bc592911a9a2083e4d7aa232af65688dcf44a704523aa957405fc4eb9d73ae4 not found: ID does not exist" containerID="5bc592911a9a2083e4d7aa232af65688dcf44a704523aa957405fc4eb9d73ae4" Oct 01 15:48:16 crc kubenswrapper[4869]: I1001 15:48:16.970009 4869 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5bc592911a9a2083e4d7aa232af65688dcf44a704523aa957405fc4eb9d73ae4"} err="failed to get container status \"5bc592911a9a2083e4d7aa232af65688dcf44a704523aa957405fc4eb9d73ae4\": rpc error: code = NotFound desc = could not find container \"5bc592911a9a2083e4d7aa232af65688dcf44a704523aa957405fc4eb9d73ae4\": container with ID starting with 5bc592911a9a2083e4d7aa232af65688dcf44a704523aa957405fc4eb9d73ae4 not found: ID does not exist" Oct 01 15:48:16 crc kubenswrapper[4869]: I1001 15:48:16.970054 4869 scope.go:117] "RemoveContainer" containerID="5ec947588ad8376d2cfce092d29d01d329991634cdead61978677aa76f7b2ed8" Oct 01 15:48:16 crc kubenswrapper[4869]: E1001 15:48:16.970528 4869 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5ec947588ad8376d2cfce092d29d01d329991634cdead61978677aa76f7b2ed8\": container with ID starting with 5ec947588ad8376d2cfce092d29d01d329991634cdead61978677aa76f7b2ed8 not found: ID does not exist" containerID="5ec947588ad8376d2cfce092d29d01d329991634cdead61978677aa76f7b2ed8" Oct 01 15:48:16 crc kubenswrapper[4869]: I1001 15:48:16.970563 4869 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5ec947588ad8376d2cfce092d29d01d329991634cdead61978677aa76f7b2ed8"} err="failed to get container status \"5ec947588ad8376d2cfce092d29d01d329991634cdead61978677aa76f7b2ed8\": rpc error: code = NotFound desc = could not find container \"5ec947588ad8376d2cfce092d29d01d329991634cdead61978677aa76f7b2ed8\": container with ID starting with 5ec947588ad8376d2cfce092d29d01d329991634cdead61978677aa76f7b2ed8 not found: ID does not exist" Oct 01 15:48:17 crc kubenswrapper[4869]: I1001 15:48:17.616093 4869 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a0692fb9-73dc-42bd-bc5a-63023b230217" path="/var/lib/kubelet/pods/a0692fb9-73dc-42bd-bc5a-63023b230217/volumes" Oct 01 15:48:40 crc kubenswrapper[4869]: I1001 15:48:40.961977 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-6w7sf"] Oct 01 15:48:40 crc kubenswrapper[4869]: E1001 15:48:40.962889 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a0692fb9-73dc-42bd-bc5a-63023b230217" containerName="extract-utilities" Oct 01 15:48:40 crc kubenswrapper[4869]: I1001 15:48:40.962905 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="a0692fb9-73dc-42bd-bc5a-63023b230217" containerName="extract-utilities" Oct 01 15:48:40 crc kubenswrapper[4869]: E1001 15:48:40.962934 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a0692fb9-73dc-42bd-bc5a-63023b230217" containerName="extract-content" Oct 01 15:48:40 crc kubenswrapper[4869]: I1001 15:48:40.962942 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="a0692fb9-73dc-42bd-bc5a-63023b230217" containerName="extract-content" Oct 01 15:48:40 crc kubenswrapper[4869]: E1001 15:48:40.962964 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a0692fb9-73dc-42bd-bc5a-63023b230217" containerName="registry-server" Oct 01 15:48:40 crc kubenswrapper[4869]: I1001 15:48:40.962972 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="a0692fb9-73dc-42bd-bc5a-63023b230217" containerName="registry-server" Oct 01 15:48:40 crc kubenswrapper[4869]: I1001 15:48:40.963212 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="a0692fb9-73dc-42bd-bc5a-63023b230217" containerName="registry-server" Oct 01 15:48:40 crc kubenswrapper[4869]: I1001 15:48:40.965669 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-6w7sf" Oct 01 15:48:41 crc kubenswrapper[4869]: I1001 15:48:41.028397 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-6w7sf"] Oct 01 15:48:41 crc kubenswrapper[4869]: I1001 15:48:41.029665 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6c691a39-81be-4ae3-a699-31a79fc811ac-catalog-content\") pod \"redhat-marketplace-6w7sf\" (UID: \"6c691a39-81be-4ae3-a699-31a79fc811ac\") " pod="openshift-marketplace/redhat-marketplace-6w7sf" Oct 01 15:48:41 crc kubenswrapper[4869]: I1001 15:48:41.029750 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-v6qnc\" (UniqueName: \"kubernetes.io/projected/6c691a39-81be-4ae3-a699-31a79fc811ac-kube-api-access-v6qnc\") pod \"redhat-marketplace-6w7sf\" (UID: \"6c691a39-81be-4ae3-a699-31a79fc811ac\") " pod="openshift-marketplace/redhat-marketplace-6w7sf" Oct 01 15:48:41 crc kubenswrapper[4869]: I1001 15:48:41.029813 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6c691a39-81be-4ae3-a699-31a79fc811ac-utilities\") pod \"redhat-marketplace-6w7sf\" (UID: \"6c691a39-81be-4ae3-a699-31a79fc811ac\") " pod="openshift-marketplace/redhat-marketplace-6w7sf" Oct 01 15:48:41 crc kubenswrapper[4869]: I1001 15:48:41.131436 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6c691a39-81be-4ae3-a699-31a79fc811ac-catalog-content\") pod \"redhat-marketplace-6w7sf\" (UID: \"6c691a39-81be-4ae3-a699-31a79fc811ac\") " pod="openshift-marketplace/redhat-marketplace-6w7sf" Oct 01 15:48:41 crc kubenswrapper[4869]: I1001 15:48:41.131816 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-v6qnc\" (UniqueName: \"kubernetes.io/projected/6c691a39-81be-4ae3-a699-31a79fc811ac-kube-api-access-v6qnc\") pod \"redhat-marketplace-6w7sf\" (UID: \"6c691a39-81be-4ae3-a699-31a79fc811ac\") " pod="openshift-marketplace/redhat-marketplace-6w7sf" Oct 01 15:48:41 crc kubenswrapper[4869]: I1001 15:48:41.131891 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6c691a39-81be-4ae3-a699-31a79fc811ac-utilities\") pod \"redhat-marketplace-6w7sf\" (UID: \"6c691a39-81be-4ae3-a699-31a79fc811ac\") " pod="openshift-marketplace/redhat-marketplace-6w7sf" Oct 01 15:48:41 crc kubenswrapper[4869]: I1001 15:48:41.132105 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6c691a39-81be-4ae3-a699-31a79fc811ac-catalog-content\") pod \"redhat-marketplace-6w7sf\" (UID: \"6c691a39-81be-4ae3-a699-31a79fc811ac\") " pod="openshift-marketplace/redhat-marketplace-6w7sf" Oct 01 15:48:41 crc kubenswrapper[4869]: I1001 15:48:41.132408 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6c691a39-81be-4ae3-a699-31a79fc811ac-utilities\") pod \"redhat-marketplace-6w7sf\" (UID: \"6c691a39-81be-4ae3-a699-31a79fc811ac\") " pod="openshift-marketplace/redhat-marketplace-6w7sf" Oct 01 15:48:41 crc kubenswrapper[4869]: I1001 15:48:41.161210 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-v6qnc\" (UniqueName: \"kubernetes.io/projected/6c691a39-81be-4ae3-a699-31a79fc811ac-kube-api-access-v6qnc\") pod \"redhat-marketplace-6w7sf\" (UID: \"6c691a39-81be-4ae3-a699-31a79fc811ac\") " pod="openshift-marketplace/redhat-marketplace-6w7sf" Oct 01 15:48:41 crc kubenswrapper[4869]: I1001 15:48:41.321874 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-6w7sf" Oct 01 15:48:41 crc kubenswrapper[4869]: I1001 15:48:41.843227 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-6w7sf"] Oct 01 15:48:42 crc kubenswrapper[4869]: I1001 15:48:42.104784 4869 generic.go:334] "Generic (PLEG): container finished" podID="6c691a39-81be-4ae3-a699-31a79fc811ac" containerID="291e793fec80504651b4563cd9f63595eb95301eea9a65d5545990f9a0aafd33" exitCode=0 Oct 01 15:48:42 crc kubenswrapper[4869]: I1001 15:48:42.104851 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-6w7sf" event={"ID":"6c691a39-81be-4ae3-a699-31a79fc811ac","Type":"ContainerDied","Data":"291e793fec80504651b4563cd9f63595eb95301eea9a65d5545990f9a0aafd33"} Oct 01 15:48:42 crc kubenswrapper[4869]: I1001 15:48:42.105143 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-6w7sf" event={"ID":"6c691a39-81be-4ae3-a699-31a79fc811ac","Type":"ContainerStarted","Data":"62a2ebc27c796c93a25cfb184953af7907fe23d43bf258a1a318807253d6abfd"} Oct 01 15:48:43 crc kubenswrapper[4869]: I1001 15:48:43.117607 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-6w7sf" event={"ID":"6c691a39-81be-4ae3-a699-31a79fc811ac","Type":"ContainerStarted","Data":"03e5b9001abf6b69481ac641515873a3f96a8375edbac55fd546ff36a422e86a"} Oct 01 15:48:43 crc kubenswrapper[4869]: I1001 15:48:43.354435 4869 patch_prober.go:28] interesting pod/machine-config-daemon-c86m8 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 01 15:48:43 crc kubenswrapper[4869]: I1001 15:48:43.354485 4869 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-c86m8" podUID="a4b64f7f-0b03-4f47-965b-9fde048b735c" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 01 15:48:43 crc kubenswrapper[4869]: I1001 15:48:43.354522 4869 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-c86m8" Oct 01 15:48:43 crc kubenswrapper[4869]: I1001 15:48:43.355147 4869 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"25093fbe92e6ae652a3af4c4b9e03e2282557f8fc51b27a9e7955934416a4f59"} pod="openshift-machine-config-operator/machine-config-daemon-c86m8" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Oct 01 15:48:43 crc kubenswrapper[4869]: I1001 15:48:43.355199 4869 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-c86m8" podUID="a4b64f7f-0b03-4f47-965b-9fde048b735c" containerName="machine-config-daemon" containerID="cri-o://25093fbe92e6ae652a3af4c4b9e03e2282557f8fc51b27a9e7955934416a4f59" gracePeriod=600 Oct 01 15:48:44 crc kubenswrapper[4869]: I1001 15:48:44.128977 4869 generic.go:334] "Generic (PLEG): container finished" podID="6c691a39-81be-4ae3-a699-31a79fc811ac" containerID="03e5b9001abf6b69481ac641515873a3f96a8375edbac55fd546ff36a422e86a" exitCode=0 Oct 01 15:48:44 crc kubenswrapper[4869]: I1001 15:48:44.129069 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-6w7sf" event={"ID":"6c691a39-81be-4ae3-a699-31a79fc811ac","Type":"ContainerDied","Data":"03e5b9001abf6b69481ac641515873a3f96a8375edbac55fd546ff36a422e86a"} Oct 01 15:48:44 crc kubenswrapper[4869]: I1001 15:48:44.132842 4869 generic.go:334] "Generic (PLEG): container finished" podID="a4b64f7f-0b03-4f47-965b-9fde048b735c" containerID="25093fbe92e6ae652a3af4c4b9e03e2282557f8fc51b27a9e7955934416a4f59" exitCode=0 Oct 01 15:48:44 crc kubenswrapper[4869]: I1001 15:48:44.132882 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-c86m8" event={"ID":"a4b64f7f-0b03-4f47-965b-9fde048b735c","Type":"ContainerDied","Data":"25093fbe92e6ae652a3af4c4b9e03e2282557f8fc51b27a9e7955934416a4f59"} Oct 01 15:48:44 crc kubenswrapper[4869]: I1001 15:48:44.132912 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-c86m8" event={"ID":"a4b64f7f-0b03-4f47-965b-9fde048b735c","Type":"ContainerStarted","Data":"ea87da209cc47118cc41bdb3107264d5dd2e07bc832e620553f0ac1be9456693"} Oct 01 15:48:44 crc kubenswrapper[4869]: I1001 15:48:44.132931 4869 scope.go:117] "RemoveContainer" containerID="850fa9657d55ab61ed48a042dba1eb165240f62a294e8f87d32c9a3206247a54" Oct 01 15:48:46 crc kubenswrapper[4869]: I1001 15:48:46.168513 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-6w7sf" event={"ID":"6c691a39-81be-4ae3-a699-31a79fc811ac","Type":"ContainerStarted","Data":"59e400200c7048958213c0a93d512fcdf3642feea5692bd47969b3782092a9e9"} Oct 01 15:48:47 crc kubenswrapper[4869]: I1001 15:48:47.204029 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-6w7sf" podStartSLOduration=3.469728838 podStartE2EDuration="7.204006884s" podCreationTimestamp="2025-10-01 15:48:40 +0000 UTC" firstStartedPulling="2025-10-01 15:48:42.106802683 +0000 UTC m=+2631.253645839" lastFinishedPulling="2025-10-01 15:48:45.841080779 +0000 UTC m=+2634.987923885" observedRunningTime="2025-10-01 15:48:47.197690035 +0000 UTC m=+2636.344533181" watchObservedRunningTime="2025-10-01 15:48:47.204006884 +0000 UTC m=+2636.350850020" Oct 01 15:48:51 crc kubenswrapper[4869]: I1001 15:48:51.323168 4869 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-6w7sf" Oct 01 15:48:51 crc kubenswrapper[4869]: I1001 15:48:51.325028 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-6w7sf" Oct 01 15:48:51 crc kubenswrapper[4869]: I1001 15:48:51.406056 4869 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-6w7sf" Oct 01 15:48:52 crc kubenswrapper[4869]: I1001 15:48:52.307172 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-6w7sf" Oct 01 15:48:52 crc kubenswrapper[4869]: I1001 15:48:52.376765 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-6w7sf"] Oct 01 15:48:54 crc kubenswrapper[4869]: I1001 15:48:54.263548 4869 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-6w7sf" podUID="6c691a39-81be-4ae3-a699-31a79fc811ac" containerName="registry-server" containerID="cri-o://59e400200c7048958213c0a93d512fcdf3642feea5692bd47969b3782092a9e9" gracePeriod=2 Oct 01 15:48:54 crc kubenswrapper[4869]: I1001 15:48:54.967933 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-6w7sf" Oct 01 15:48:55 crc kubenswrapper[4869]: I1001 15:48:55.040013 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6c691a39-81be-4ae3-a699-31a79fc811ac-catalog-content\") pod \"6c691a39-81be-4ae3-a699-31a79fc811ac\" (UID: \"6c691a39-81be-4ae3-a699-31a79fc811ac\") " Oct 01 15:48:55 crc kubenswrapper[4869]: I1001 15:48:55.040156 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6c691a39-81be-4ae3-a699-31a79fc811ac-utilities\") pod \"6c691a39-81be-4ae3-a699-31a79fc811ac\" (UID: \"6c691a39-81be-4ae3-a699-31a79fc811ac\") " Oct 01 15:48:55 crc kubenswrapper[4869]: I1001 15:48:55.040524 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-v6qnc\" (UniqueName: \"kubernetes.io/projected/6c691a39-81be-4ae3-a699-31a79fc811ac-kube-api-access-v6qnc\") pod \"6c691a39-81be-4ae3-a699-31a79fc811ac\" (UID: \"6c691a39-81be-4ae3-a699-31a79fc811ac\") " Oct 01 15:48:55 crc kubenswrapper[4869]: I1001 15:48:55.041461 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/6c691a39-81be-4ae3-a699-31a79fc811ac-utilities" (OuterVolumeSpecName: "utilities") pod "6c691a39-81be-4ae3-a699-31a79fc811ac" (UID: "6c691a39-81be-4ae3-a699-31a79fc811ac"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 15:48:55 crc kubenswrapper[4869]: I1001 15:48:55.048321 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6c691a39-81be-4ae3-a699-31a79fc811ac-kube-api-access-v6qnc" (OuterVolumeSpecName: "kube-api-access-v6qnc") pod "6c691a39-81be-4ae3-a699-31a79fc811ac" (UID: "6c691a39-81be-4ae3-a699-31a79fc811ac"). InnerVolumeSpecName "kube-api-access-v6qnc". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 15:48:55 crc kubenswrapper[4869]: I1001 15:48:55.064444 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/6c691a39-81be-4ae3-a699-31a79fc811ac-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "6c691a39-81be-4ae3-a699-31a79fc811ac" (UID: "6c691a39-81be-4ae3-a699-31a79fc811ac"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 15:48:55 crc kubenswrapper[4869]: I1001 15:48:55.143093 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-v6qnc\" (UniqueName: \"kubernetes.io/projected/6c691a39-81be-4ae3-a699-31a79fc811ac-kube-api-access-v6qnc\") on node \"crc\" DevicePath \"\"" Oct 01 15:48:55 crc kubenswrapper[4869]: I1001 15:48:55.143436 4869 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6c691a39-81be-4ae3-a699-31a79fc811ac-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 01 15:48:55 crc kubenswrapper[4869]: I1001 15:48:55.143449 4869 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6c691a39-81be-4ae3-a699-31a79fc811ac-utilities\") on node \"crc\" DevicePath \"\"" Oct 01 15:48:55 crc kubenswrapper[4869]: I1001 15:48:55.275670 4869 generic.go:334] "Generic (PLEG): container finished" podID="6c691a39-81be-4ae3-a699-31a79fc811ac" containerID="59e400200c7048958213c0a93d512fcdf3642feea5692bd47969b3782092a9e9" exitCode=0 Oct 01 15:48:55 crc kubenswrapper[4869]: I1001 15:48:55.275774 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-6w7sf" event={"ID":"6c691a39-81be-4ae3-a699-31a79fc811ac","Type":"ContainerDied","Data":"59e400200c7048958213c0a93d512fcdf3642feea5692bd47969b3782092a9e9"} Oct 01 15:48:55 crc kubenswrapper[4869]: I1001 15:48:55.275873 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-6w7sf" event={"ID":"6c691a39-81be-4ae3-a699-31a79fc811ac","Type":"ContainerDied","Data":"62a2ebc27c796c93a25cfb184953af7907fe23d43bf258a1a318807253d6abfd"} Oct 01 15:48:55 crc kubenswrapper[4869]: I1001 15:48:55.275932 4869 scope.go:117] "RemoveContainer" containerID="59e400200c7048958213c0a93d512fcdf3642feea5692bd47969b3782092a9e9" Oct 01 15:48:55 crc kubenswrapper[4869]: I1001 15:48:55.280880 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-6w7sf" Oct 01 15:48:55 crc kubenswrapper[4869]: I1001 15:48:55.309135 4869 scope.go:117] "RemoveContainer" containerID="03e5b9001abf6b69481ac641515873a3f96a8375edbac55fd546ff36a422e86a" Oct 01 15:48:55 crc kubenswrapper[4869]: I1001 15:48:55.329651 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-6w7sf"] Oct 01 15:48:55 crc kubenswrapper[4869]: I1001 15:48:55.344435 4869 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-6w7sf"] Oct 01 15:48:55 crc kubenswrapper[4869]: I1001 15:48:55.356009 4869 scope.go:117] "RemoveContainer" containerID="291e793fec80504651b4563cd9f63595eb95301eea9a65d5545990f9a0aafd33" Oct 01 15:48:55 crc kubenswrapper[4869]: I1001 15:48:55.384867 4869 scope.go:117] "RemoveContainer" containerID="59e400200c7048958213c0a93d512fcdf3642feea5692bd47969b3782092a9e9" Oct 01 15:48:55 crc kubenswrapper[4869]: E1001 15:48:55.385748 4869 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"59e400200c7048958213c0a93d512fcdf3642feea5692bd47969b3782092a9e9\": container with ID starting with 59e400200c7048958213c0a93d512fcdf3642feea5692bd47969b3782092a9e9 not found: ID does not exist" containerID="59e400200c7048958213c0a93d512fcdf3642feea5692bd47969b3782092a9e9" Oct 01 15:48:55 crc kubenswrapper[4869]: I1001 15:48:55.385999 4869 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"59e400200c7048958213c0a93d512fcdf3642feea5692bd47969b3782092a9e9"} err="failed to get container status \"59e400200c7048958213c0a93d512fcdf3642feea5692bd47969b3782092a9e9\": rpc error: code = NotFound desc = could not find container \"59e400200c7048958213c0a93d512fcdf3642feea5692bd47969b3782092a9e9\": container with ID starting with 59e400200c7048958213c0a93d512fcdf3642feea5692bd47969b3782092a9e9 not found: ID does not exist" Oct 01 15:48:55 crc kubenswrapper[4869]: I1001 15:48:55.386231 4869 scope.go:117] "RemoveContainer" containerID="03e5b9001abf6b69481ac641515873a3f96a8375edbac55fd546ff36a422e86a" Oct 01 15:48:55 crc kubenswrapper[4869]: E1001 15:48:55.386932 4869 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"03e5b9001abf6b69481ac641515873a3f96a8375edbac55fd546ff36a422e86a\": container with ID starting with 03e5b9001abf6b69481ac641515873a3f96a8375edbac55fd546ff36a422e86a not found: ID does not exist" containerID="03e5b9001abf6b69481ac641515873a3f96a8375edbac55fd546ff36a422e86a" Oct 01 15:48:55 crc kubenswrapper[4869]: I1001 15:48:55.386974 4869 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"03e5b9001abf6b69481ac641515873a3f96a8375edbac55fd546ff36a422e86a"} err="failed to get container status \"03e5b9001abf6b69481ac641515873a3f96a8375edbac55fd546ff36a422e86a\": rpc error: code = NotFound desc = could not find container \"03e5b9001abf6b69481ac641515873a3f96a8375edbac55fd546ff36a422e86a\": container with ID starting with 03e5b9001abf6b69481ac641515873a3f96a8375edbac55fd546ff36a422e86a not found: ID does not exist" Oct 01 15:48:55 crc kubenswrapper[4869]: I1001 15:48:55.386997 4869 scope.go:117] "RemoveContainer" containerID="291e793fec80504651b4563cd9f63595eb95301eea9a65d5545990f9a0aafd33" Oct 01 15:48:55 crc kubenswrapper[4869]: E1001 15:48:55.387587 4869 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"291e793fec80504651b4563cd9f63595eb95301eea9a65d5545990f9a0aafd33\": container with ID starting with 291e793fec80504651b4563cd9f63595eb95301eea9a65d5545990f9a0aafd33 not found: ID does not exist" containerID="291e793fec80504651b4563cd9f63595eb95301eea9a65d5545990f9a0aafd33" Oct 01 15:48:55 crc kubenswrapper[4869]: I1001 15:48:55.388021 4869 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"291e793fec80504651b4563cd9f63595eb95301eea9a65d5545990f9a0aafd33"} err="failed to get container status \"291e793fec80504651b4563cd9f63595eb95301eea9a65d5545990f9a0aafd33\": rpc error: code = NotFound desc = could not find container \"291e793fec80504651b4563cd9f63595eb95301eea9a65d5545990f9a0aafd33\": container with ID starting with 291e793fec80504651b4563cd9f63595eb95301eea9a65d5545990f9a0aafd33 not found: ID does not exist" Oct 01 15:48:55 crc kubenswrapper[4869]: I1001 15:48:55.607412 4869 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6c691a39-81be-4ae3-a699-31a79fc811ac" path="/var/lib/kubelet/pods/6c691a39-81be-4ae3-a699-31a79fc811ac/volumes" Oct 01 15:49:25 crc kubenswrapper[4869]: I1001 15:49:25.119057 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-j77xs"] Oct 01 15:49:25 crc kubenswrapper[4869]: E1001 15:49:25.120011 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6c691a39-81be-4ae3-a699-31a79fc811ac" containerName="extract-utilities" Oct 01 15:49:25 crc kubenswrapper[4869]: I1001 15:49:25.120027 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="6c691a39-81be-4ae3-a699-31a79fc811ac" containerName="extract-utilities" Oct 01 15:49:25 crc kubenswrapper[4869]: E1001 15:49:25.120053 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6c691a39-81be-4ae3-a699-31a79fc811ac" containerName="extract-content" Oct 01 15:49:25 crc kubenswrapper[4869]: I1001 15:49:25.120060 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="6c691a39-81be-4ae3-a699-31a79fc811ac" containerName="extract-content" Oct 01 15:49:25 crc kubenswrapper[4869]: E1001 15:49:25.120069 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6c691a39-81be-4ae3-a699-31a79fc811ac" containerName="registry-server" Oct 01 15:49:25 crc kubenswrapper[4869]: I1001 15:49:25.120075 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="6c691a39-81be-4ae3-a699-31a79fc811ac" containerName="registry-server" Oct 01 15:49:25 crc kubenswrapper[4869]: I1001 15:49:25.120326 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="6c691a39-81be-4ae3-a699-31a79fc811ac" containerName="registry-server" Oct 01 15:49:25 crc kubenswrapper[4869]: I1001 15:49:25.121863 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-j77xs" Oct 01 15:49:25 crc kubenswrapper[4869]: I1001 15:49:25.134207 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-j77xs"] Oct 01 15:49:25 crc kubenswrapper[4869]: I1001 15:49:25.245958 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mfz9b\" (UniqueName: \"kubernetes.io/projected/b278a4f4-391f-4207-930f-1d1f833279b2-kube-api-access-mfz9b\") pod \"certified-operators-j77xs\" (UID: \"b278a4f4-391f-4207-930f-1d1f833279b2\") " pod="openshift-marketplace/certified-operators-j77xs" Oct 01 15:49:25 crc kubenswrapper[4869]: I1001 15:49:25.246432 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b278a4f4-391f-4207-930f-1d1f833279b2-utilities\") pod \"certified-operators-j77xs\" (UID: \"b278a4f4-391f-4207-930f-1d1f833279b2\") " pod="openshift-marketplace/certified-operators-j77xs" Oct 01 15:49:25 crc kubenswrapper[4869]: I1001 15:49:25.246567 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b278a4f4-391f-4207-930f-1d1f833279b2-catalog-content\") pod \"certified-operators-j77xs\" (UID: \"b278a4f4-391f-4207-930f-1d1f833279b2\") " pod="openshift-marketplace/certified-operators-j77xs" Oct 01 15:49:25 crc kubenswrapper[4869]: I1001 15:49:25.348441 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mfz9b\" (UniqueName: \"kubernetes.io/projected/b278a4f4-391f-4207-930f-1d1f833279b2-kube-api-access-mfz9b\") pod \"certified-operators-j77xs\" (UID: \"b278a4f4-391f-4207-930f-1d1f833279b2\") " pod="openshift-marketplace/certified-operators-j77xs" Oct 01 15:49:25 crc kubenswrapper[4869]: I1001 15:49:25.348567 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b278a4f4-391f-4207-930f-1d1f833279b2-utilities\") pod \"certified-operators-j77xs\" (UID: \"b278a4f4-391f-4207-930f-1d1f833279b2\") " pod="openshift-marketplace/certified-operators-j77xs" Oct 01 15:49:25 crc kubenswrapper[4869]: I1001 15:49:25.348601 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b278a4f4-391f-4207-930f-1d1f833279b2-catalog-content\") pod \"certified-operators-j77xs\" (UID: \"b278a4f4-391f-4207-930f-1d1f833279b2\") " pod="openshift-marketplace/certified-operators-j77xs" Oct 01 15:49:25 crc kubenswrapper[4869]: I1001 15:49:25.349069 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b278a4f4-391f-4207-930f-1d1f833279b2-catalog-content\") pod \"certified-operators-j77xs\" (UID: \"b278a4f4-391f-4207-930f-1d1f833279b2\") " pod="openshift-marketplace/certified-operators-j77xs" Oct 01 15:49:25 crc kubenswrapper[4869]: I1001 15:49:25.349310 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b278a4f4-391f-4207-930f-1d1f833279b2-utilities\") pod \"certified-operators-j77xs\" (UID: \"b278a4f4-391f-4207-930f-1d1f833279b2\") " pod="openshift-marketplace/certified-operators-j77xs" Oct 01 15:49:25 crc kubenswrapper[4869]: I1001 15:49:25.375924 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mfz9b\" (UniqueName: \"kubernetes.io/projected/b278a4f4-391f-4207-930f-1d1f833279b2-kube-api-access-mfz9b\") pod \"certified-operators-j77xs\" (UID: \"b278a4f4-391f-4207-930f-1d1f833279b2\") " pod="openshift-marketplace/certified-operators-j77xs" Oct 01 15:49:25 crc kubenswrapper[4869]: I1001 15:49:25.447839 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-j77xs" Oct 01 15:49:25 crc kubenswrapper[4869]: I1001 15:49:25.963027 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-j77xs"] Oct 01 15:49:26 crc kubenswrapper[4869]: I1001 15:49:26.635414 4869 generic.go:334] "Generic (PLEG): container finished" podID="b278a4f4-391f-4207-930f-1d1f833279b2" containerID="dcff541c96dba12ba0ead29d3b75b548f45575094f1f4f61f28448018972ca7d" exitCode=0 Oct 01 15:49:26 crc kubenswrapper[4869]: I1001 15:49:26.635483 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-j77xs" event={"ID":"b278a4f4-391f-4207-930f-1d1f833279b2","Type":"ContainerDied","Data":"dcff541c96dba12ba0ead29d3b75b548f45575094f1f4f61f28448018972ca7d"} Oct 01 15:49:26 crc kubenswrapper[4869]: I1001 15:49:26.635792 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-j77xs" event={"ID":"b278a4f4-391f-4207-930f-1d1f833279b2","Type":"ContainerStarted","Data":"fcfb75fdad964a8b226486340187840f3ccb36cd099b65d08bb5a1367b570aa8"} Oct 01 15:49:29 crc kubenswrapper[4869]: I1001 15:49:29.667109 4869 generic.go:334] "Generic (PLEG): container finished" podID="b278a4f4-391f-4207-930f-1d1f833279b2" containerID="3d863a20679bb24706bca70c967efd7d0bb10fe19b461751fa5815624c992250" exitCode=0 Oct 01 15:49:29 crc kubenswrapper[4869]: I1001 15:49:29.667209 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-j77xs" event={"ID":"b278a4f4-391f-4207-930f-1d1f833279b2","Type":"ContainerDied","Data":"3d863a20679bb24706bca70c967efd7d0bb10fe19b461751fa5815624c992250"} Oct 01 15:49:32 crc kubenswrapper[4869]: I1001 15:49:32.705461 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-j77xs" event={"ID":"b278a4f4-391f-4207-930f-1d1f833279b2","Type":"ContainerStarted","Data":"b3e5b7f4fd336faf4e8c3ab4170e3a30cc70fde295fc51d405de6d9ea8a3a15d"} Oct 01 15:49:32 crc kubenswrapper[4869]: I1001 15:49:32.736429 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-j77xs" podStartSLOduration=3.254708696 podStartE2EDuration="7.736407007s" podCreationTimestamp="2025-10-01 15:49:25 +0000 UTC" firstStartedPulling="2025-10-01 15:49:26.639150061 +0000 UTC m=+2675.785993207" lastFinishedPulling="2025-10-01 15:49:31.120848402 +0000 UTC m=+2680.267691518" observedRunningTime="2025-10-01 15:49:32.735219977 +0000 UTC m=+2681.882063103" watchObservedRunningTime="2025-10-01 15:49:32.736407007 +0000 UTC m=+2681.883250143" Oct 01 15:49:35 crc kubenswrapper[4869]: I1001 15:49:35.448251 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-j77xs" Oct 01 15:49:35 crc kubenswrapper[4869]: I1001 15:49:35.448787 4869 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-j77xs" Oct 01 15:49:35 crc kubenswrapper[4869]: I1001 15:49:35.506542 4869 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-j77xs" Oct 01 15:49:36 crc kubenswrapper[4869]: I1001 15:49:36.796049 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-j77xs" Oct 01 15:49:36 crc kubenswrapper[4869]: I1001 15:49:36.847522 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-j77xs"] Oct 01 15:49:38 crc kubenswrapper[4869]: I1001 15:49:38.762319 4869 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-j77xs" podUID="b278a4f4-391f-4207-930f-1d1f833279b2" containerName="registry-server" containerID="cri-o://b3e5b7f4fd336faf4e8c3ab4170e3a30cc70fde295fc51d405de6d9ea8a3a15d" gracePeriod=2 Oct 01 15:49:39 crc kubenswrapper[4869]: I1001 15:49:39.774742 4869 generic.go:334] "Generic (PLEG): container finished" podID="b278a4f4-391f-4207-930f-1d1f833279b2" containerID="b3e5b7f4fd336faf4e8c3ab4170e3a30cc70fde295fc51d405de6d9ea8a3a15d" exitCode=0 Oct 01 15:49:39 crc kubenswrapper[4869]: I1001 15:49:39.774943 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-j77xs" event={"ID":"b278a4f4-391f-4207-930f-1d1f833279b2","Type":"ContainerDied","Data":"b3e5b7f4fd336faf4e8c3ab4170e3a30cc70fde295fc51d405de6d9ea8a3a15d"} Oct 01 15:49:39 crc kubenswrapper[4869]: I1001 15:49:39.775107 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-j77xs" event={"ID":"b278a4f4-391f-4207-930f-1d1f833279b2","Type":"ContainerDied","Data":"fcfb75fdad964a8b226486340187840f3ccb36cd099b65d08bb5a1367b570aa8"} Oct 01 15:49:39 crc kubenswrapper[4869]: I1001 15:49:39.775126 4869 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="fcfb75fdad964a8b226486340187840f3ccb36cd099b65d08bb5a1367b570aa8" Oct 01 15:49:39 crc kubenswrapper[4869]: I1001 15:49:39.818933 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-j77xs" Oct 01 15:49:39 crc kubenswrapper[4869]: I1001 15:49:39.851282 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b278a4f4-391f-4207-930f-1d1f833279b2-catalog-content\") pod \"b278a4f4-391f-4207-930f-1d1f833279b2\" (UID: \"b278a4f4-391f-4207-930f-1d1f833279b2\") " Oct 01 15:49:39 crc kubenswrapper[4869]: I1001 15:49:39.851362 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mfz9b\" (UniqueName: \"kubernetes.io/projected/b278a4f4-391f-4207-930f-1d1f833279b2-kube-api-access-mfz9b\") pod \"b278a4f4-391f-4207-930f-1d1f833279b2\" (UID: \"b278a4f4-391f-4207-930f-1d1f833279b2\") " Oct 01 15:49:39 crc kubenswrapper[4869]: I1001 15:49:39.851406 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b278a4f4-391f-4207-930f-1d1f833279b2-utilities\") pod \"b278a4f4-391f-4207-930f-1d1f833279b2\" (UID: \"b278a4f4-391f-4207-930f-1d1f833279b2\") " Oct 01 15:49:39 crc kubenswrapper[4869]: I1001 15:49:39.854223 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b278a4f4-391f-4207-930f-1d1f833279b2-utilities" (OuterVolumeSpecName: "utilities") pod "b278a4f4-391f-4207-930f-1d1f833279b2" (UID: "b278a4f4-391f-4207-930f-1d1f833279b2"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 15:49:39 crc kubenswrapper[4869]: I1001 15:49:39.859657 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b278a4f4-391f-4207-930f-1d1f833279b2-kube-api-access-mfz9b" (OuterVolumeSpecName: "kube-api-access-mfz9b") pod "b278a4f4-391f-4207-930f-1d1f833279b2" (UID: "b278a4f4-391f-4207-930f-1d1f833279b2"). InnerVolumeSpecName "kube-api-access-mfz9b". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 15:49:39 crc kubenswrapper[4869]: I1001 15:49:39.915022 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b278a4f4-391f-4207-930f-1d1f833279b2-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "b278a4f4-391f-4207-930f-1d1f833279b2" (UID: "b278a4f4-391f-4207-930f-1d1f833279b2"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 15:49:39 crc kubenswrapper[4869]: I1001 15:49:39.952979 4869 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b278a4f4-391f-4207-930f-1d1f833279b2-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 01 15:49:39 crc kubenswrapper[4869]: I1001 15:49:39.953013 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mfz9b\" (UniqueName: \"kubernetes.io/projected/b278a4f4-391f-4207-930f-1d1f833279b2-kube-api-access-mfz9b\") on node \"crc\" DevicePath \"\"" Oct 01 15:49:39 crc kubenswrapper[4869]: I1001 15:49:39.953024 4869 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b278a4f4-391f-4207-930f-1d1f833279b2-utilities\") on node \"crc\" DevicePath \"\"" Oct 01 15:49:40 crc kubenswrapper[4869]: I1001 15:49:40.784026 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-j77xs" Oct 01 15:49:40 crc kubenswrapper[4869]: I1001 15:49:40.844965 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-j77xs"] Oct 01 15:49:40 crc kubenswrapper[4869]: I1001 15:49:40.854854 4869 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-j77xs"] Oct 01 15:49:41 crc kubenswrapper[4869]: I1001 15:49:41.602569 4869 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b278a4f4-391f-4207-930f-1d1f833279b2" path="/var/lib/kubelet/pods/b278a4f4-391f-4207-930f-1d1f833279b2/volumes" Oct 01 15:50:39 crc kubenswrapper[4869]: I1001 15:50:39.386227 4869 generic.go:334] "Generic (PLEG): container finished" podID="49bff277-b5e6-4b61-b964-2a615ff1cf94" containerID="fcfd7f1880eeca543d7163a38f2d3f9eec6ba35e2f3ea6c388bf1fa5bd16c207" exitCode=0 Oct 01 15:50:39 crc kubenswrapper[4869]: I1001 15:50:39.386315 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-4m5m5" event={"ID":"49bff277-b5e6-4b61-b964-2a615ff1cf94","Type":"ContainerDied","Data":"fcfd7f1880eeca543d7163a38f2d3f9eec6ba35e2f3ea6c388bf1fa5bd16c207"} Oct 01 15:50:40 crc kubenswrapper[4869]: I1001 15:50:40.782557 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-4m5m5" Oct 01 15:50:40 crc kubenswrapper[4869]: I1001 15:50:40.854640 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6vcpm\" (UniqueName: \"kubernetes.io/projected/49bff277-b5e6-4b61-b964-2a615ff1cf94-kube-api-access-6vcpm\") pod \"49bff277-b5e6-4b61-b964-2a615ff1cf94\" (UID: \"49bff277-b5e6-4b61-b964-2a615ff1cf94\") " Oct 01 15:50:40 crc kubenswrapper[4869]: I1001 15:50:40.854754 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/49bff277-b5e6-4b61-b964-2a615ff1cf94-ceph\") pod \"49bff277-b5e6-4b61-b964-2a615ff1cf94\" (UID: \"49bff277-b5e6-4b61-b964-2a615ff1cf94\") " Oct 01 15:50:40 crc kubenswrapper[4869]: I1001 15:50:40.854864 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"libvirt-secret-0\" (UniqueName: \"kubernetes.io/secret/49bff277-b5e6-4b61-b964-2a615ff1cf94-libvirt-secret-0\") pod \"49bff277-b5e6-4b61-b964-2a615ff1cf94\" (UID: \"49bff277-b5e6-4b61-b964-2a615ff1cf94\") " Oct 01 15:50:40 crc kubenswrapper[4869]: I1001 15:50:40.854931 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/49bff277-b5e6-4b61-b964-2a615ff1cf94-ssh-key\") pod \"49bff277-b5e6-4b61-b964-2a615ff1cf94\" (UID: \"49bff277-b5e6-4b61-b964-2a615ff1cf94\") " Oct 01 15:50:40 crc kubenswrapper[4869]: I1001 15:50:40.855024 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/49bff277-b5e6-4b61-b964-2a615ff1cf94-libvirt-combined-ca-bundle\") pod \"49bff277-b5e6-4b61-b964-2a615ff1cf94\" (UID: \"49bff277-b5e6-4b61-b964-2a615ff1cf94\") " Oct 01 15:50:40 crc kubenswrapper[4869]: I1001 15:50:40.855099 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/49bff277-b5e6-4b61-b964-2a615ff1cf94-inventory\") pod \"49bff277-b5e6-4b61-b964-2a615ff1cf94\" (UID: \"49bff277-b5e6-4b61-b964-2a615ff1cf94\") " Oct 01 15:50:40 crc kubenswrapper[4869]: I1001 15:50:40.861153 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49bff277-b5e6-4b61-b964-2a615ff1cf94-ceph" (OuterVolumeSpecName: "ceph") pod "49bff277-b5e6-4b61-b964-2a615ff1cf94" (UID: "49bff277-b5e6-4b61-b964-2a615ff1cf94"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 15:50:40 crc kubenswrapper[4869]: I1001 15:50:40.861213 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/49bff277-b5e6-4b61-b964-2a615ff1cf94-kube-api-access-6vcpm" (OuterVolumeSpecName: "kube-api-access-6vcpm") pod "49bff277-b5e6-4b61-b964-2a615ff1cf94" (UID: "49bff277-b5e6-4b61-b964-2a615ff1cf94"). InnerVolumeSpecName "kube-api-access-6vcpm". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 15:50:40 crc kubenswrapper[4869]: I1001 15:50:40.861845 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49bff277-b5e6-4b61-b964-2a615ff1cf94-libvirt-combined-ca-bundle" (OuterVolumeSpecName: "libvirt-combined-ca-bundle") pod "49bff277-b5e6-4b61-b964-2a615ff1cf94" (UID: "49bff277-b5e6-4b61-b964-2a615ff1cf94"). InnerVolumeSpecName "libvirt-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 15:50:40 crc kubenswrapper[4869]: I1001 15:50:40.885169 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49bff277-b5e6-4b61-b964-2a615ff1cf94-libvirt-secret-0" (OuterVolumeSpecName: "libvirt-secret-0") pod "49bff277-b5e6-4b61-b964-2a615ff1cf94" (UID: "49bff277-b5e6-4b61-b964-2a615ff1cf94"). InnerVolumeSpecName "libvirt-secret-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 15:50:40 crc kubenswrapper[4869]: I1001 15:50:40.888157 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49bff277-b5e6-4b61-b964-2a615ff1cf94-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "49bff277-b5e6-4b61-b964-2a615ff1cf94" (UID: "49bff277-b5e6-4b61-b964-2a615ff1cf94"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 15:50:40 crc kubenswrapper[4869]: I1001 15:50:40.891300 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49bff277-b5e6-4b61-b964-2a615ff1cf94-inventory" (OuterVolumeSpecName: "inventory") pod "49bff277-b5e6-4b61-b964-2a615ff1cf94" (UID: "49bff277-b5e6-4b61-b964-2a615ff1cf94"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 15:50:40 crc kubenswrapper[4869]: I1001 15:50:40.957592 4869 reconciler_common.go:293] "Volume detached for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/49bff277-b5e6-4b61-b964-2a615ff1cf94-libvirt-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 01 15:50:40 crc kubenswrapper[4869]: I1001 15:50:40.957640 4869 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/49bff277-b5e6-4b61-b964-2a615ff1cf94-inventory\") on node \"crc\" DevicePath \"\"" Oct 01 15:50:40 crc kubenswrapper[4869]: I1001 15:50:40.957657 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6vcpm\" (UniqueName: \"kubernetes.io/projected/49bff277-b5e6-4b61-b964-2a615ff1cf94-kube-api-access-6vcpm\") on node \"crc\" DevicePath \"\"" Oct 01 15:50:40 crc kubenswrapper[4869]: I1001 15:50:40.957669 4869 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/49bff277-b5e6-4b61-b964-2a615ff1cf94-ceph\") on node \"crc\" DevicePath \"\"" Oct 01 15:50:40 crc kubenswrapper[4869]: I1001 15:50:40.957683 4869 reconciler_common.go:293] "Volume detached for volume \"libvirt-secret-0\" (UniqueName: \"kubernetes.io/secret/49bff277-b5e6-4b61-b964-2a615ff1cf94-libvirt-secret-0\") on node \"crc\" DevicePath \"\"" Oct 01 15:50:40 crc kubenswrapper[4869]: I1001 15:50:40.957694 4869 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/49bff277-b5e6-4b61-b964-2a615ff1cf94-ssh-key\") on node \"crc\" DevicePath \"\"" Oct 01 15:50:41 crc kubenswrapper[4869]: I1001 15:50:41.410648 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-4m5m5" event={"ID":"49bff277-b5e6-4b61-b964-2a615ff1cf94","Type":"ContainerDied","Data":"88d2c4dc3e01bf386918bfbe292619fa34f5527532362b0c57f39ddd726d8c8a"} Oct 01 15:50:41 crc kubenswrapper[4869]: I1001 15:50:41.411129 4869 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="88d2c4dc3e01bf386918bfbe292619fa34f5527532362b0c57f39ddd726d8c8a" Oct 01 15:50:41 crc kubenswrapper[4869]: I1001 15:50:41.410942 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-4m5m5" Oct 01 15:50:41 crc kubenswrapper[4869]: I1001 15:50:41.538500 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-kkddc"] Oct 01 15:50:41 crc kubenswrapper[4869]: E1001 15:50:41.539148 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b278a4f4-391f-4207-930f-1d1f833279b2" containerName="extract-content" Oct 01 15:50:41 crc kubenswrapper[4869]: I1001 15:50:41.539179 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="b278a4f4-391f-4207-930f-1d1f833279b2" containerName="extract-content" Oct 01 15:50:41 crc kubenswrapper[4869]: E1001 15:50:41.539213 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="49bff277-b5e6-4b61-b964-2a615ff1cf94" containerName="libvirt-edpm-deployment-openstack-edpm-ipam" Oct 01 15:50:41 crc kubenswrapper[4869]: I1001 15:50:41.539248 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="49bff277-b5e6-4b61-b964-2a615ff1cf94" containerName="libvirt-edpm-deployment-openstack-edpm-ipam" Oct 01 15:50:41 crc kubenswrapper[4869]: E1001 15:50:41.539300 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b278a4f4-391f-4207-930f-1d1f833279b2" containerName="registry-server" Oct 01 15:50:41 crc kubenswrapper[4869]: I1001 15:50:41.539328 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="b278a4f4-391f-4207-930f-1d1f833279b2" containerName="registry-server" Oct 01 15:50:41 crc kubenswrapper[4869]: E1001 15:50:41.539394 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b278a4f4-391f-4207-930f-1d1f833279b2" containerName="extract-utilities" Oct 01 15:50:41 crc kubenswrapper[4869]: I1001 15:50:41.539407 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="b278a4f4-391f-4207-930f-1d1f833279b2" containerName="extract-utilities" Oct 01 15:50:41 crc kubenswrapper[4869]: I1001 15:50:41.539737 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="b278a4f4-391f-4207-930f-1d1f833279b2" containerName="registry-server" Oct 01 15:50:41 crc kubenswrapper[4869]: I1001 15:50:41.539794 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="49bff277-b5e6-4b61-b964-2a615ff1cf94" containerName="libvirt-edpm-deployment-openstack-edpm-ipam" Oct 01 15:50:41 crc kubenswrapper[4869]: I1001 15:50:41.547905 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-kkddc" Oct 01 15:50:41 crc kubenswrapper[4869]: I1001 15:50:41.554782 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-kkddc"] Oct 01 15:50:41 crc kubenswrapper[4869]: I1001 15:50:41.556712 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceph-conf-files" Oct 01 15:50:41 crc kubenswrapper[4869]: I1001 15:50:41.556984 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Oct 01 15:50:41 crc kubenswrapper[4869]: I1001 15:50:41.557094 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Oct 01 15:50:41 crc kubenswrapper[4869]: I1001 15:50:41.557206 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"nova-extra-config" Oct 01 15:50:41 crc kubenswrapper[4869]: I1001 15:50:41.557297 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-cjg8g" Oct 01 15:50:41 crc kubenswrapper[4869]: I1001 15:50:41.557378 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-compute-config" Oct 01 15:50:41 crc kubenswrapper[4869]: I1001 15:50:41.557434 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-migration-ssh-key" Oct 01 15:50:41 crc kubenswrapper[4869]: I1001 15:50:41.557598 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Oct 01 15:50:41 crc kubenswrapper[4869]: I1001 15:50:41.557701 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ceph-nova" Oct 01 15:50:41 crc kubenswrapper[4869]: I1001 15:50:41.566901 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/91f3b9d8-e4a2-4c04-978d-e43153d4af93-inventory\") pod \"nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-kkddc\" (UID: \"91f3b9d8-e4a2-4c04-978d-e43153d4af93\") " pod="openstack/nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-kkddc" Oct 01 15:50:41 crc kubenswrapper[4869]: I1001 15:50:41.566941 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-cell1-compute-config-1\" (UniqueName: \"kubernetes.io/secret/91f3b9d8-e4a2-4c04-978d-e43153d4af93-nova-cell1-compute-config-1\") pod \"nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-kkddc\" (UID: \"91f3b9d8-e4a2-4c04-978d-e43153d4af93\") " pod="openstack/nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-kkddc" Oct 01 15:50:41 crc kubenswrapper[4869]: I1001 15:50:41.566962 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-cell1-compute-config-0\" (UniqueName: \"kubernetes.io/secret/91f3b9d8-e4a2-4c04-978d-e43153d4af93-nova-cell1-compute-config-0\") pod \"nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-kkddc\" (UID: \"91f3b9d8-e4a2-4c04-978d-e43153d4af93\") " pod="openstack/nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-kkddc" Oct 01 15:50:41 crc kubenswrapper[4869]: I1001 15:50:41.566998 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-extra-config-0\" (UniqueName: \"kubernetes.io/configmap/91f3b9d8-e4a2-4c04-978d-e43153d4af93-nova-extra-config-0\") pod \"nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-kkddc\" (UID: \"91f3b9d8-e4a2-4c04-978d-e43153d4af93\") " pod="openstack/nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-kkddc" Oct 01 15:50:41 crc kubenswrapper[4869]: I1001 15:50:41.567019 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-migration-ssh-key-0\" (UniqueName: \"kubernetes.io/secret/91f3b9d8-e4a2-4c04-978d-e43153d4af93-nova-migration-ssh-key-0\") pod \"nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-kkddc\" (UID: \"91f3b9d8-e4a2-4c04-978d-e43153d4af93\") " pod="openstack/nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-kkddc" Oct 01 15:50:41 crc kubenswrapper[4869]: I1001 15:50:41.567049 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/91f3b9d8-e4a2-4c04-978d-e43153d4af93-ceph\") pod \"nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-kkddc\" (UID: \"91f3b9d8-e4a2-4c04-978d-e43153d4af93\") " pod="openstack/nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-kkddc" Oct 01 15:50:41 crc kubenswrapper[4869]: I1001 15:50:41.567065 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tvcph\" (UniqueName: \"kubernetes.io/projected/91f3b9d8-e4a2-4c04-978d-e43153d4af93-kube-api-access-tvcph\") pod \"nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-kkddc\" (UID: \"91f3b9d8-e4a2-4c04-978d-e43153d4af93\") " pod="openstack/nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-kkddc" Oct 01 15:50:41 crc kubenswrapper[4869]: I1001 15:50:41.567088 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/91f3b9d8-e4a2-4c04-978d-e43153d4af93-ssh-key\") pod \"nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-kkddc\" (UID: \"91f3b9d8-e4a2-4c04-978d-e43153d4af93\") " pod="openstack/nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-kkddc" Oct 01 15:50:41 crc kubenswrapper[4869]: I1001 15:50:41.567112 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-migration-ssh-key-1\" (UniqueName: \"kubernetes.io/secret/91f3b9d8-e4a2-4c04-978d-e43153d4af93-nova-migration-ssh-key-1\") pod \"nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-kkddc\" (UID: \"91f3b9d8-e4a2-4c04-978d-e43153d4af93\") " pod="openstack/nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-kkddc" Oct 01 15:50:41 crc kubenswrapper[4869]: I1001 15:50:41.567147 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph-nova-0\" (UniqueName: \"kubernetes.io/configmap/91f3b9d8-e4a2-4c04-978d-e43153d4af93-ceph-nova-0\") pod \"nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-kkddc\" (UID: \"91f3b9d8-e4a2-4c04-978d-e43153d4af93\") " pod="openstack/nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-kkddc" Oct 01 15:50:41 crc kubenswrapper[4869]: I1001 15:50:41.567195 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-custom-ceph-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/91f3b9d8-e4a2-4c04-978d-e43153d4af93-nova-custom-ceph-combined-ca-bundle\") pod \"nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-kkddc\" (UID: \"91f3b9d8-e4a2-4c04-978d-e43153d4af93\") " pod="openstack/nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-kkddc" Oct 01 15:50:41 crc kubenswrapper[4869]: I1001 15:50:41.670222 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-migration-ssh-key-1\" (UniqueName: \"kubernetes.io/secret/91f3b9d8-e4a2-4c04-978d-e43153d4af93-nova-migration-ssh-key-1\") pod \"nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-kkddc\" (UID: \"91f3b9d8-e4a2-4c04-978d-e43153d4af93\") " pod="openstack/nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-kkddc" Oct 01 15:50:41 crc kubenswrapper[4869]: I1001 15:50:41.670724 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph-nova-0\" (UniqueName: \"kubernetes.io/configmap/91f3b9d8-e4a2-4c04-978d-e43153d4af93-ceph-nova-0\") pod \"nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-kkddc\" (UID: \"91f3b9d8-e4a2-4c04-978d-e43153d4af93\") " pod="openstack/nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-kkddc" Oct 01 15:50:41 crc kubenswrapper[4869]: I1001 15:50:41.670965 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-custom-ceph-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/91f3b9d8-e4a2-4c04-978d-e43153d4af93-nova-custom-ceph-combined-ca-bundle\") pod \"nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-kkddc\" (UID: \"91f3b9d8-e4a2-4c04-978d-e43153d4af93\") " pod="openstack/nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-kkddc" Oct 01 15:50:41 crc kubenswrapper[4869]: I1001 15:50:41.671052 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/91f3b9d8-e4a2-4c04-978d-e43153d4af93-inventory\") pod \"nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-kkddc\" (UID: \"91f3b9d8-e4a2-4c04-978d-e43153d4af93\") " pod="openstack/nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-kkddc" Oct 01 15:50:41 crc kubenswrapper[4869]: I1001 15:50:41.671103 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-cell1-compute-config-1\" (UniqueName: \"kubernetes.io/secret/91f3b9d8-e4a2-4c04-978d-e43153d4af93-nova-cell1-compute-config-1\") pod \"nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-kkddc\" (UID: \"91f3b9d8-e4a2-4c04-978d-e43153d4af93\") " pod="openstack/nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-kkddc" Oct 01 15:50:41 crc kubenswrapper[4869]: I1001 15:50:41.671137 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-cell1-compute-config-0\" (UniqueName: \"kubernetes.io/secret/91f3b9d8-e4a2-4c04-978d-e43153d4af93-nova-cell1-compute-config-0\") pod \"nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-kkddc\" (UID: \"91f3b9d8-e4a2-4c04-978d-e43153d4af93\") " pod="openstack/nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-kkddc" Oct 01 15:50:41 crc kubenswrapper[4869]: I1001 15:50:41.671249 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-extra-config-0\" (UniqueName: \"kubernetes.io/configmap/91f3b9d8-e4a2-4c04-978d-e43153d4af93-nova-extra-config-0\") pod \"nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-kkddc\" (UID: \"91f3b9d8-e4a2-4c04-978d-e43153d4af93\") " pod="openstack/nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-kkddc" Oct 01 15:50:41 crc kubenswrapper[4869]: I1001 15:50:41.671315 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-migration-ssh-key-0\" (UniqueName: \"kubernetes.io/secret/91f3b9d8-e4a2-4c04-978d-e43153d4af93-nova-migration-ssh-key-0\") pod \"nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-kkddc\" (UID: \"91f3b9d8-e4a2-4c04-978d-e43153d4af93\") " pod="openstack/nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-kkddc" Oct 01 15:50:41 crc kubenswrapper[4869]: I1001 15:50:41.671591 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/91f3b9d8-e4a2-4c04-978d-e43153d4af93-ceph\") pod \"nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-kkddc\" (UID: \"91f3b9d8-e4a2-4c04-978d-e43153d4af93\") " pod="openstack/nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-kkddc" Oct 01 15:50:41 crc kubenswrapper[4869]: I1001 15:50:41.672006 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tvcph\" (UniqueName: \"kubernetes.io/projected/91f3b9d8-e4a2-4c04-978d-e43153d4af93-kube-api-access-tvcph\") pod \"nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-kkddc\" (UID: \"91f3b9d8-e4a2-4c04-978d-e43153d4af93\") " pod="openstack/nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-kkddc" Oct 01 15:50:41 crc kubenswrapper[4869]: I1001 15:50:41.672124 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/91f3b9d8-e4a2-4c04-978d-e43153d4af93-ssh-key\") pod \"nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-kkddc\" (UID: \"91f3b9d8-e4a2-4c04-978d-e43153d4af93\") " pod="openstack/nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-kkddc" Oct 01 15:50:41 crc kubenswrapper[4869]: I1001 15:50:41.673103 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph-nova-0\" (UniqueName: \"kubernetes.io/configmap/91f3b9d8-e4a2-4c04-978d-e43153d4af93-ceph-nova-0\") pod \"nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-kkddc\" (UID: \"91f3b9d8-e4a2-4c04-978d-e43153d4af93\") " pod="openstack/nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-kkddc" Oct 01 15:50:41 crc kubenswrapper[4869]: I1001 15:50:41.673591 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-extra-config-0\" (UniqueName: \"kubernetes.io/configmap/91f3b9d8-e4a2-4c04-978d-e43153d4af93-nova-extra-config-0\") pod \"nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-kkddc\" (UID: \"91f3b9d8-e4a2-4c04-978d-e43153d4af93\") " pod="openstack/nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-kkddc" Oct 01 15:50:41 crc kubenswrapper[4869]: I1001 15:50:41.677280 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-cell1-compute-config-1\" (UniqueName: \"kubernetes.io/secret/91f3b9d8-e4a2-4c04-978d-e43153d4af93-nova-cell1-compute-config-1\") pod \"nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-kkddc\" (UID: \"91f3b9d8-e4a2-4c04-978d-e43153d4af93\") " pod="openstack/nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-kkddc" Oct 01 15:50:41 crc kubenswrapper[4869]: I1001 15:50:41.677384 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/91f3b9d8-e4a2-4c04-978d-e43153d4af93-inventory\") pod \"nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-kkddc\" (UID: \"91f3b9d8-e4a2-4c04-978d-e43153d4af93\") " pod="openstack/nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-kkddc" Oct 01 15:50:41 crc kubenswrapper[4869]: I1001 15:50:41.677837 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-cell1-compute-config-0\" (UniqueName: \"kubernetes.io/secret/91f3b9d8-e4a2-4c04-978d-e43153d4af93-nova-cell1-compute-config-0\") pod \"nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-kkddc\" (UID: \"91f3b9d8-e4a2-4c04-978d-e43153d4af93\") " pod="openstack/nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-kkddc" Oct 01 15:50:41 crc kubenswrapper[4869]: I1001 15:50:41.677845 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-migration-ssh-key-1\" (UniqueName: \"kubernetes.io/secret/91f3b9d8-e4a2-4c04-978d-e43153d4af93-nova-migration-ssh-key-1\") pod \"nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-kkddc\" (UID: \"91f3b9d8-e4a2-4c04-978d-e43153d4af93\") " pod="openstack/nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-kkddc" Oct 01 15:50:41 crc kubenswrapper[4869]: I1001 15:50:41.678092 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/91f3b9d8-e4a2-4c04-978d-e43153d4af93-ssh-key\") pod \"nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-kkddc\" (UID: \"91f3b9d8-e4a2-4c04-978d-e43153d4af93\") " pod="openstack/nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-kkddc" Oct 01 15:50:41 crc kubenswrapper[4869]: I1001 15:50:41.679159 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-custom-ceph-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/91f3b9d8-e4a2-4c04-978d-e43153d4af93-nova-custom-ceph-combined-ca-bundle\") pod \"nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-kkddc\" (UID: \"91f3b9d8-e4a2-4c04-978d-e43153d4af93\") " pod="openstack/nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-kkddc" Oct 01 15:50:41 crc kubenswrapper[4869]: I1001 15:50:41.685844 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/91f3b9d8-e4a2-4c04-978d-e43153d4af93-ceph\") pod \"nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-kkddc\" (UID: \"91f3b9d8-e4a2-4c04-978d-e43153d4af93\") " pod="openstack/nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-kkddc" Oct 01 15:50:41 crc kubenswrapper[4869]: I1001 15:50:41.687819 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-migration-ssh-key-0\" (UniqueName: \"kubernetes.io/secret/91f3b9d8-e4a2-4c04-978d-e43153d4af93-nova-migration-ssh-key-0\") pod \"nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-kkddc\" (UID: \"91f3b9d8-e4a2-4c04-978d-e43153d4af93\") " pod="openstack/nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-kkddc" Oct 01 15:50:41 crc kubenswrapper[4869]: I1001 15:50:41.694159 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tvcph\" (UniqueName: \"kubernetes.io/projected/91f3b9d8-e4a2-4c04-978d-e43153d4af93-kube-api-access-tvcph\") pod \"nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-kkddc\" (UID: \"91f3b9d8-e4a2-4c04-978d-e43153d4af93\") " pod="openstack/nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-kkddc" Oct 01 15:50:41 crc kubenswrapper[4869]: I1001 15:50:41.878659 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-kkddc" Oct 01 15:50:42 crc kubenswrapper[4869]: I1001 15:50:42.496493 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-kkddc"] Oct 01 15:50:43 crc kubenswrapper[4869]: I1001 15:50:43.354210 4869 patch_prober.go:28] interesting pod/machine-config-daemon-c86m8 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 01 15:50:43 crc kubenswrapper[4869]: I1001 15:50:43.354852 4869 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-c86m8" podUID="a4b64f7f-0b03-4f47-965b-9fde048b735c" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 01 15:50:43 crc kubenswrapper[4869]: I1001 15:50:43.450905 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-kkddc" event={"ID":"91f3b9d8-e4a2-4c04-978d-e43153d4af93","Type":"ContainerStarted","Data":"9d6774d5a4d257138de833e01cc7eb28244197df9cf4ba04a7c3061b2c46c31a"} Oct 01 15:50:44 crc kubenswrapper[4869]: I1001 15:50:44.463232 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-kkddc" event={"ID":"91f3b9d8-e4a2-4c04-978d-e43153d4af93","Type":"ContainerStarted","Data":"4e55ad18b48b4515a5c1e9e3c2422ad70d250260157fd1f1b1292b714c04993b"} Oct 01 15:50:44 crc kubenswrapper[4869]: I1001 15:50:44.488304 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-kkddc" podStartSLOduration=2.724794628 podStartE2EDuration="3.488282878s" podCreationTimestamp="2025-10-01 15:50:41 +0000 UTC" firstStartedPulling="2025-10-01 15:50:42.505945895 +0000 UTC m=+2751.652789011" lastFinishedPulling="2025-10-01 15:50:43.269434145 +0000 UTC m=+2752.416277261" observedRunningTime="2025-10-01 15:50:44.479976228 +0000 UTC m=+2753.626819364" watchObservedRunningTime="2025-10-01 15:50:44.488282878 +0000 UTC m=+2753.635126014" Oct 01 15:51:13 crc kubenswrapper[4869]: I1001 15:51:13.354131 4869 patch_prober.go:28] interesting pod/machine-config-daemon-c86m8 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 01 15:51:13 crc kubenswrapper[4869]: I1001 15:51:13.354684 4869 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-c86m8" podUID="a4b64f7f-0b03-4f47-965b-9fde048b735c" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 01 15:51:43 crc kubenswrapper[4869]: I1001 15:51:43.354581 4869 patch_prober.go:28] interesting pod/machine-config-daemon-c86m8 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 01 15:51:43 crc kubenswrapper[4869]: I1001 15:51:43.355174 4869 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-c86m8" podUID="a4b64f7f-0b03-4f47-965b-9fde048b735c" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 01 15:51:43 crc kubenswrapper[4869]: I1001 15:51:43.355229 4869 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-c86m8" Oct 01 15:51:43 crc kubenswrapper[4869]: I1001 15:51:43.355973 4869 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"ea87da209cc47118cc41bdb3107264d5dd2e07bc832e620553f0ac1be9456693"} pod="openshift-machine-config-operator/machine-config-daemon-c86m8" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Oct 01 15:51:43 crc kubenswrapper[4869]: I1001 15:51:43.356025 4869 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-c86m8" podUID="a4b64f7f-0b03-4f47-965b-9fde048b735c" containerName="machine-config-daemon" containerID="cri-o://ea87da209cc47118cc41bdb3107264d5dd2e07bc832e620553f0ac1be9456693" gracePeriod=600 Oct 01 15:51:44 crc kubenswrapper[4869]: I1001 15:51:44.022661 4869 generic.go:334] "Generic (PLEG): container finished" podID="a4b64f7f-0b03-4f47-965b-9fde048b735c" containerID="ea87da209cc47118cc41bdb3107264d5dd2e07bc832e620553f0ac1be9456693" exitCode=0 Oct 01 15:51:44 crc kubenswrapper[4869]: I1001 15:51:44.022720 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-c86m8" event={"ID":"a4b64f7f-0b03-4f47-965b-9fde048b735c","Type":"ContainerDied","Data":"ea87da209cc47118cc41bdb3107264d5dd2e07bc832e620553f0ac1be9456693"} Oct 01 15:51:44 crc kubenswrapper[4869]: I1001 15:51:44.022972 4869 scope.go:117] "RemoveContainer" containerID="25093fbe92e6ae652a3af4c4b9e03e2282557f8fc51b27a9e7955934416a4f59" Oct 01 15:51:44 crc kubenswrapper[4869]: E1001 15:51:44.042978 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-c86m8_openshift-machine-config-operator(a4b64f7f-0b03-4f47-965b-9fde048b735c)\"" pod="openshift-machine-config-operator/machine-config-daemon-c86m8" podUID="a4b64f7f-0b03-4f47-965b-9fde048b735c" Oct 01 15:51:45 crc kubenswrapper[4869]: I1001 15:51:45.034431 4869 scope.go:117] "RemoveContainer" containerID="ea87da209cc47118cc41bdb3107264d5dd2e07bc832e620553f0ac1be9456693" Oct 01 15:51:45 crc kubenswrapper[4869]: E1001 15:51:45.035860 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-c86m8_openshift-machine-config-operator(a4b64f7f-0b03-4f47-965b-9fde048b735c)\"" pod="openshift-machine-config-operator/machine-config-daemon-c86m8" podUID="a4b64f7f-0b03-4f47-965b-9fde048b735c" Oct 01 15:51:57 crc kubenswrapper[4869]: I1001 15:51:57.580935 4869 scope.go:117] "RemoveContainer" containerID="ea87da209cc47118cc41bdb3107264d5dd2e07bc832e620553f0ac1be9456693" Oct 01 15:51:57 crc kubenswrapper[4869]: E1001 15:51:57.581866 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-c86m8_openshift-machine-config-operator(a4b64f7f-0b03-4f47-965b-9fde048b735c)\"" pod="openshift-machine-config-operator/machine-config-daemon-c86m8" podUID="a4b64f7f-0b03-4f47-965b-9fde048b735c" Oct 01 15:52:09 crc kubenswrapper[4869]: I1001 15:52:09.582713 4869 scope.go:117] "RemoveContainer" containerID="ea87da209cc47118cc41bdb3107264d5dd2e07bc832e620553f0ac1be9456693" Oct 01 15:52:09 crc kubenswrapper[4869]: E1001 15:52:09.583870 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-c86m8_openshift-machine-config-operator(a4b64f7f-0b03-4f47-965b-9fde048b735c)\"" pod="openshift-machine-config-operator/machine-config-daemon-c86m8" podUID="a4b64f7f-0b03-4f47-965b-9fde048b735c" Oct 01 15:52:23 crc kubenswrapper[4869]: I1001 15:52:23.585329 4869 scope.go:117] "RemoveContainer" containerID="ea87da209cc47118cc41bdb3107264d5dd2e07bc832e620553f0ac1be9456693" Oct 01 15:52:23 crc kubenswrapper[4869]: E1001 15:52:23.586056 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-c86m8_openshift-machine-config-operator(a4b64f7f-0b03-4f47-965b-9fde048b735c)\"" pod="openshift-machine-config-operator/machine-config-daemon-c86m8" podUID="a4b64f7f-0b03-4f47-965b-9fde048b735c" Oct 01 15:52:36 crc kubenswrapper[4869]: I1001 15:52:36.581412 4869 scope.go:117] "RemoveContainer" containerID="ea87da209cc47118cc41bdb3107264d5dd2e07bc832e620553f0ac1be9456693" Oct 01 15:52:36 crc kubenswrapper[4869]: E1001 15:52:36.582168 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-c86m8_openshift-machine-config-operator(a4b64f7f-0b03-4f47-965b-9fde048b735c)\"" pod="openshift-machine-config-operator/machine-config-daemon-c86m8" podUID="a4b64f7f-0b03-4f47-965b-9fde048b735c" Oct 01 15:52:48 crc kubenswrapper[4869]: I1001 15:52:48.581673 4869 scope.go:117] "RemoveContainer" containerID="ea87da209cc47118cc41bdb3107264d5dd2e07bc832e620553f0ac1be9456693" Oct 01 15:52:48 crc kubenswrapper[4869]: E1001 15:52:48.582713 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-c86m8_openshift-machine-config-operator(a4b64f7f-0b03-4f47-965b-9fde048b735c)\"" pod="openshift-machine-config-operator/machine-config-daemon-c86m8" podUID="a4b64f7f-0b03-4f47-965b-9fde048b735c" Oct 01 15:53:03 crc kubenswrapper[4869]: I1001 15:53:03.581436 4869 scope.go:117] "RemoveContainer" containerID="ea87da209cc47118cc41bdb3107264d5dd2e07bc832e620553f0ac1be9456693" Oct 01 15:53:03 crc kubenswrapper[4869]: E1001 15:53:03.582329 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-c86m8_openshift-machine-config-operator(a4b64f7f-0b03-4f47-965b-9fde048b735c)\"" pod="openshift-machine-config-operator/machine-config-daemon-c86m8" podUID="a4b64f7f-0b03-4f47-965b-9fde048b735c" Oct 01 15:53:16 crc kubenswrapper[4869]: I1001 15:53:16.581355 4869 scope.go:117] "RemoveContainer" containerID="ea87da209cc47118cc41bdb3107264d5dd2e07bc832e620553f0ac1be9456693" Oct 01 15:53:16 crc kubenswrapper[4869]: E1001 15:53:16.582344 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-c86m8_openshift-machine-config-operator(a4b64f7f-0b03-4f47-965b-9fde048b735c)\"" pod="openshift-machine-config-operator/machine-config-daemon-c86m8" podUID="a4b64f7f-0b03-4f47-965b-9fde048b735c" Oct 01 15:53:27 crc kubenswrapper[4869]: I1001 15:53:27.581358 4869 scope.go:117] "RemoveContainer" containerID="ea87da209cc47118cc41bdb3107264d5dd2e07bc832e620553f0ac1be9456693" Oct 01 15:53:27 crc kubenswrapper[4869]: E1001 15:53:27.582120 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-c86m8_openshift-machine-config-operator(a4b64f7f-0b03-4f47-965b-9fde048b735c)\"" pod="openshift-machine-config-operator/machine-config-daemon-c86m8" podUID="a4b64f7f-0b03-4f47-965b-9fde048b735c" Oct 01 15:53:31 crc kubenswrapper[4869]: I1001 15:53:31.114985 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-j5fls"] Oct 01 15:53:31 crc kubenswrapper[4869]: I1001 15:53:31.122384 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-j5fls" Oct 01 15:53:31 crc kubenswrapper[4869]: I1001 15:53:31.139701 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-j5fls"] Oct 01 15:53:31 crc kubenswrapper[4869]: I1001 15:53:31.249437 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c65985c6-dee8-4e55-8fb2-40b23d45b669-utilities\") pod \"redhat-operators-j5fls\" (UID: \"c65985c6-dee8-4e55-8fb2-40b23d45b669\") " pod="openshift-marketplace/redhat-operators-j5fls" Oct 01 15:53:31 crc kubenswrapper[4869]: I1001 15:53:31.249515 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lkrnr\" (UniqueName: \"kubernetes.io/projected/c65985c6-dee8-4e55-8fb2-40b23d45b669-kube-api-access-lkrnr\") pod \"redhat-operators-j5fls\" (UID: \"c65985c6-dee8-4e55-8fb2-40b23d45b669\") " pod="openshift-marketplace/redhat-operators-j5fls" Oct 01 15:53:31 crc kubenswrapper[4869]: I1001 15:53:31.249747 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c65985c6-dee8-4e55-8fb2-40b23d45b669-catalog-content\") pod \"redhat-operators-j5fls\" (UID: \"c65985c6-dee8-4e55-8fb2-40b23d45b669\") " pod="openshift-marketplace/redhat-operators-j5fls" Oct 01 15:53:31 crc kubenswrapper[4869]: I1001 15:53:31.351914 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c65985c6-dee8-4e55-8fb2-40b23d45b669-catalog-content\") pod \"redhat-operators-j5fls\" (UID: \"c65985c6-dee8-4e55-8fb2-40b23d45b669\") " pod="openshift-marketplace/redhat-operators-j5fls" Oct 01 15:53:31 crc kubenswrapper[4869]: I1001 15:53:31.352124 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c65985c6-dee8-4e55-8fb2-40b23d45b669-utilities\") pod \"redhat-operators-j5fls\" (UID: \"c65985c6-dee8-4e55-8fb2-40b23d45b669\") " pod="openshift-marketplace/redhat-operators-j5fls" Oct 01 15:53:31 crc kubenswrapper[4869]: I1001 15:53:31.352207 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lkrnr\" (UniqueName: \"kubernetes.io/projected/c65985c6-dee8-4e55-8fb2-40b23d45b669-kube-api-access-lkrnr\") pod \"redhat-operators-j5fls\" (UID: \"c65985c6-dee8-4e55-8fb2-40b23d45b669\") " pod="openshift-marketplace/redhat-operators-j5fls" Oct 01 15:53:31 crc kubenswrapper[4869]: I1001 15:53:31.352988 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c65985c6-dee8-4e55-8fb2-40b23d45b669-catalog-content\") pod \"redhat-operators-j5fls\" (UID: \"c65985c6-dee8-4e55-8fb2-40b23d45b669\") " pod="openshift-marketplace/redhat-operators-j5fls" Oct 01 15:53:31 crc kubenswrapper[4869]: I1001 15:53:31.353127 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c65985c6-dee8-4e55-8fb2-40b23d45b669-utilities\") pod \"redhat-operators-j5fls\" (UID: \"c65985c6-dee8-4e55-8fb2-40b23d45b669\") " pod="openshift-marketplace/redhat-operators-j5fls" Oct 01 15:53:31 crc kubenswrapper[4869]: I1001 15:53:31.378878 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lkrnr\" (UniqueName: \"kubernetes.io/projected/c65985c6-dee8-4e55-8fb2-40b23d45b669-kube-api-access-lkrnr\") pod \"redhat-operators-j5fls\" (UID: \"c65985c6-dee8-4e55-8fb2-40b23d45b669\") " pod="openshift-marketplace/redhat-operators-j5fls" Oct 01 15:53:31 crc kubenswrapper[4869]: I1001 15:53:31.451109 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-j5fls" Oct 01 15:53:31 crc kubenswrapper[4869]: I1001 15:53:31.913458 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-j5fls"] Oct 01 15:53:32 crc kubenswrapper[4869]: I1001 15:53:32.085882 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-j5fls" event={"ID":"c65985c6-dee8-4e55-8fb2-40b23d45b669","Type":"ContainerStarted","Data":"d11c6e961f365c18869f033952485bd60aba9e33c4032fe5ab8f1c3c926c2968"} Oct 01 15:53:33 crc kubenswrapper[4869]: I1001 15:53:33.112994 4869 generic.go:334] "Generic (PLEG): container finished" podID="c65985c6-dee8-4e55-8fb2-40b23d45b669" containerID="38484f3270641789a82c4d539859b537b5d0888e4c7da1ef160ce93ab7a2ec11" exitCode=0 Oct 01 15:53:33 crc kubenswrapper[4869]: I1001 15:53:33.113492 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-j5fls" event={"ID":"c65985c6-dee8-4e55-8fb2-40b23d45b669","Type":"ContainerDied","Data":"38484f3270641789a82c4d539859b537b5d0888e4c7da1ef160ce93ab7a2ec11"} Oct 01 15:53:33 crc kubenswrapper[4869]: I1001 15:53:33.117093 4869 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Oct 01 15:53:35 crc kubenswrapper[4869]: E1001 15:53:35.849060 4869 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podc65985c6_dee8_4e55_8fb2_40b23d45b669.slice/crio-dd17baa5439e205babaa48830984dee2c4cadf6d3ebe98814249912f105a9deb.scope\": RecentStats: unable to find data in memory cache]" Oct 01 15:53:36 crc kubenswrapper[4869]: I1001 15:53:36.156988 4869 generic.go:334] "Generic (PLEG): container finished" podID="c65985c6-dee8-4e55-8fb2-40b23d45b669" containerID="dd17baa5439e205babaa48830984dee2c4cadf6d3ebe98814249912f105a9deb" exitCode=0 Oct 01 15:53:36 crc kubenswrapper[4869]: I1001 15:53:36.157047 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-j5fls" event={"ID":"c65985c6-dee8-4e55-8fb2-40b23d45b669","Type":"ContainerDied","Data":"dd17baa5439e205babaa48830984dee2c4cadf6d3ebe98814249912f105a9deb"} Oct 01 15:53:37 crc kubenswrapper[4869]: I1001 15:53:37.167855 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-j5fls" event={"ID":"c65985c6-dee8-4e55-8fb2-40b23d45b669","Type":"ContainerStarted","Data":"f9ed6ea1f9ef8a8e4d896de6ba76f97fa8dcc0016258e043bb2f28d7142211c5"} Oct 01 15:53:37 crc kubenswrapper[4869]: I1001 15:53:37.191643 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-j5fls" podStartSLOduration=2.685161037 podStartE2EDuration="6.191624049s" podCreationTimestamp="2025-10-01 15:53:31 +0000 UTC" firstStartedPulling="2025-10-01 15:53:33.116831599 +0000 UTC m=+2922.263674725" lastFinishedPulling="2025-10-01 15:53:36.623294581 +0000 UTC m=+2925.770137737" observedRunningTime="2025-10-01 15:53:37.190728817 +0000 UTC m=+2926.337571953" watchObservedRunningTime="2025-10-01 15:53:37.191624049 +0000 UTC m=+2926.338467165" Oct 01 15:53:41 crc kubenswrapper[4869]: I1001 15:53:41.452189 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-j5fls" Oct 01 15:53:41 crc kubenswrapper[4869]: I1001 15:53:41.453061 4869 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-j5fls" Oct 01 15:53:41 crc kubenswrapper[4869]: I1001 15:53:41.508835 4869 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-j5fls" Oct 01 15:53:41 crc kubenswrapper[4869]: I1001 15:53:41.593177 4869 scope.go:117] "RemoveContainer" containerID="ea87da209cc47118cc41bdb3107264d5dd2e07bc832e620553f0ac1be9456693" Oct 01 15:53:41 crc kubenswrapper[4869]: E1001 15:53:41.593434 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-c86m8_openshift-machine-config-operator(a4b64f7f-0b03-4f47-965b-9fde048b735c)\"" pod="openshift-machine-config-operator/machine-config-daemon-c86m8" podUID="a4b64f7f-0b03-4f47-965b-9fde048b735c" Oct 01 15:53:42 crc kubenswrapper[4869]: I1001 15:53:42.279341 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-j5fls" Oct 01 15:53:42 crc kubenswrapper[4869]: I1001 15:53:42.326622 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-j5fls"] Oct 01 15:53:44 crc kubenswrapper[4869]: I1001 15:53:44.227845 4869 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-j5fls" podUID="c65985c6-dee8-4e55-8fb2-40b23d45b669" containerName="registry-server" containerID="cri-o://f9ed6ea1f9ef8a8e4d896de6ba76f97fa8dcc0016258e043bb2f28d7142211c5" gracePeriod=2 Oct 01 15:53:44 crc kubenswrapper[4869]: I1001 15:53:44.713509 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-j5fls" Oct 01 15:53:44 crc kubenswrapper[4869]: I1001 15:53:44.855157 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c65985c6-dee8-4e55-8fb2-40b23d45b669-catalog-content\") pod \"c65985c6-dee8-4e55-8fb2-40b23d45b669\" (UID: \"c65985c6-dee8-4e55-8fb2-40b23d45b669\") " Oct 01 15:53:44 crc kubenswrapper[4869]: I1001 15:53:44.855401 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c65985c6-dee8-4e55-8fb2-40b23d45b669-utilities\") pod \"c65985c6-dee8-4e55-8fb2-40b23d45b669\" (UID: \"c65985c6-dee8-4e55-8fb2-40b23d45b669\") " Oct 01 15:53:44 crc kubenswrapper[4869]: I1001 15:53:44.855625 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lkrnr\" (UniqueName: \"kubernetes.io/projected/c65985c6-dee8-4e55-8fb2-40b23d45b669-kube-api-access-lkrnr\") pod \"c65985c6-dee8-4e55-8fb2-40b23d45b669\" (UID: \"c65985c6-dee8-4e55-8fb2-40b23d45b669\") " Oct 01 15:53:44 crc kubenswrapper[4869]: I1001 15:53:44.856931 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c65985c6-dee8-4e55-8fb2-40b23d45b669-utilities" (OuterVolumeSpecName: "utilities") pod "c65985c6-dee8-4e55-8fb2-40b23d45b669" (UID: "c65985c6-dee8-4e55-8fb2-40b23d45b669"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 15:53:44 crc kubenswrapper[4869]: I1001 15:53:44.863679 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c65985c6-dee8-4e55-8fb2-40b23d45b669-kube-api-access-lkrnr" (OuterVolumeSpecName: "kube-api-access-lkrnr") pod "c65985c6-dee8-4e55-8fb2-40b23d45b669" (UID: "c65985c6-dee8-4e55-8fb2-40b23d45b669"). InnerVolumeSpecName "kube-api-access-lkrnr". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 15:53:44 crc kubenswrapper[4869]: I1001 15:53:44.958446 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lkrnr\" (UniqueName: \"kubernetes.io/projected/c65985c6-dee8-4e55-8fb2-40b23d45b669-kube-api-access-lkrnr\") on node \"crc\" DevicePath \"\"" Oct 01 15:53:44 crc kubenswrapper[4869]: I1001 15:53:44.958500 4869 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c65985c6-dee8-4e55-8fb2-40b23d45b669-utilities\") on node \"crc\" DevicePath \"\"" Oct 01 15:53:45 crc kubenswrapper[4869]: I1001 15:53:45.011619 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c65985c6-dee8-4e55-8fb2-40b23d45b669-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "c65985c6-dee8-4e55-8fb2-40b23d45b669" (UID: "c65985c6-dee8-4e55-8fb2-40b23d45b669"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 15:53:45 crc kubenswrapper[4869]: I1001 15:53:45.060395 4869 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c65985c6-dee8-4e55-8fb2-40b23d45b669-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 01 15:53:45 crc kubenswrapper[4869]: I1001 15:53:45.239481 4869 generic.go:334] "Generic (PLEG): container finished" podID="c65985c6-dee8-4e55-8fb2-40b23d45b669" containerID="f9ed6ea1f9ef8a8e4d896de6ba76f97fa8dcc0016258e043bb2f28d7142211c5" exitCode=0 Oct 01 15:53:45 crc kubenswrapper[4869]: I1001 15:53:45.239548 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-j5fls" event={"ID":"c65985c6-dee8-4e55-8fb2-40b23d45b669","Type":"ContainerDied","Data":"f9ed6ea1f9ef8a8e4d896de6ba76f97fa8dcc0016258e043bb2f28d7142211c5"} Oct 01 15:53:45 crc kubenswrapper[4869]: I1001 15:53:45.239591 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-j5fls" event={"ID":"c65985c6-dee8-4e55-8fb2-40b23d45b669","Type":"ContainerDied","Data":"d11c6e961f365c18869f033952485bd60aba9e33c4032fe5ab8f1c3c926c2968"} Oct 01 15:53:45 crc kubenswrapper[4869]: I1001 15:53:45.239557 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-j5fls" Oct 01 15:53:45 crc kubenswrapper[4869]: I1001 15:53:45.239653 4869 scope.go:117] "RemoveContainer" containerID="f9ed6ea1f9ef8a8e4d896de6ba76f97fa8dcc0016258e043bb2f28d7142211c5" Oct 01 15:53:45 crc kubenswrapper[4869]: I1001 15:53:45.273011 4869 scope.go:117] "RemoveContainer" containerID="dd17baa5439e205babaa48830984dee2c4cadf6d3ebe98814249912f105a9deb" Oct 01 15:53:45 crc kubenswrapper[4869]: I1001 15:53:45.287995 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-j5fls"] Oct 01 15:53:45 crc kubenswrapper[4869]: I1001 15:53:45.297777 4869 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-j5fls"] Oct 01 15:53:45 crc kubenswrapper[4869]: I1001 15:53:45.300981 4869 scope.go:117] "RemoveContainer" containerID="38484f3270641789a82c4d539859b537b5d0888e4c7da1ef160ce93ab7a2ec11" Oct 01 15:53:45 crc kubenswrapper[4869]: I1001 15:53:45.349692 4869 scope.go:117] "RemoveContainer" containerID="f9ed6ea1f9ef8a8e4d896de6ba76f97fa8dcc0016258e043bb2f28d7142211c5" Oct 01 15:53:45 crc kubenswrapper[4869]: E1001 15:53:45.350173 4869 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f9ed6ea1f9ef8a8e4d896de6ba76f97fa8dcc0016258e043bb2f28d7142211c5\": container with ID starting with f9ed6ea1f9ef8a8e4d896de6ba76f97fa8dcc0016258e043bb2f28d7142211c5 not found: ID does not exist" containerID="f9ed6ea1f9ef8a8e4d896de6ba76f97fa8dcc0016258e043bb2f28d7142211c5" Oct 01 15:53:45 crc kubenswrapper[4869]: I1001 15:53:45.350382 4869 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f9ed6ea1f9ef8a8e4d896de6ba76f97fa8dcc0016258e043bb2f28d7142211c5"} err="failed to get container status \"f9ed6ea1f9ef8a8e4d896de6ba76f97fa8dcc0016258e043bb2f28d7142211c5\": rpc error: code = NotFound desc = could not find container \"f9ed6ea1f9ef8a8e4d896de6ba76f97fa8dcc0016258e043bb2f28d7142211c5\": container with ID starting with f9ed6ea1f9ef8a8e4d896de6ba76f97fa8dcc0016258e043bb2f28d7142211c5 not found: ID does not exist" Oct 01 15:53:45 crc kubenswrapper[4869]: I1001 15:53:45.350510 4869 scope.go:117] "RemoveContainer" containerID="dd17baa5439e205babaa48830984dee2c4cadf6d3ebe98814249912f105a9deb" Oct 01 15:53:45 crc kubenswrapper[4869]: E1001 15:53:45.351035 4869 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"dd17baa5439e205babaa48830984dee2c4cadf6d3ebe98814249912f105a9deb\": container with ID starting with dd17baa5439e205babaa48830984dee2c4cadf6d3ebe98814249912f105a9deb not found: ID does not exist" containerID="dd17baa5439e205babaa48830984dee2c4cadf6d3ebe98814249912f105a9deb" Oct 01 15:53:45 crc kubenswrapper[4869]: I1001 15:53:45.351146 4869 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"dd17baa5439e205babaa48830984dee2c4cadf6d3ebe98814249912f105a9deb"} err="failed to get container status \"dd17baa5439e205babaa48830984dee2c4cadf6d3ebe98814249912f105a9deb\": rpc error: code = NotFound desc = could not find container \"dd17baa5439e205babaa48830984dee2c4cadf6d3ebe98814249912f105a9deb\": container with ID starting with dd17baa5439e205babaa48830984dee2c4cadf6d3ebe98814249912f105a9deb not found: ID does not exist" Oct 01 15:53:45 crc kubenswrapper[4869]: I1001 15:53:45.351427 4869 scope.go:117] "RemoveContainer" containerID="38484f3270641789a82c4d539859b537b5d0888e4c7da1ef160ce93ab7a2ec11" Oct 01 15:53:45 crc kubenswrapper[4869]: E1001 15:53:45.351780 4869 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"38484f3270641789a82c4d539859b537b5d0888e4c7da1ef160ce93ab7a2ec11\": container with ID starting with 38484f3270641789a82c4d539859b537b5d0888e4c7da1ef160ce93ab7a2ec11 not found: ID does not exist" containerID="38484f3270641789a82c4d539859b537b5d0888e4c7da1ef160ce93ab7a2ec11" Oct 01 15:53:45 crc kubenswrapper[4869]: I1001 15:53:45.351812 4869 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"38484f3270641789a82c4d539859b537b5d0888e4c7da1ef160ce93ab7a2ec11"} err="failed to get container status \"38484f3270641789a82c4d539859b537b5d0888e4c7da1ef160ce93ab7a2ec11\": rpc error: code = NotFound desc = could not find container \"38484f3270641789a82c4d539859b537b5d0888e4c7da1ef160ce93ab7a2ec11\": container with ID starting with 38484f3270641789a82c4d539859b537b5d0888e4c7da1ef160ce93ab7a2ec11 not found: ID does not exist" Oct 01 15:53:45 crc kubenswrapper[4869]: I1001 15:53:45.599199 4869 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c65985c6-dee8-4e55-8fb2-40b23d45b669" path="/var/lib/kubelet/pods/c65985c6-dee8-4e55-8fb2-40b23d45b669/volumes" Oct 01 15:53:56 crc kubenswrapper[4869]: I1001 15:53:56.580696 4869 scope.go:117] "RemoveContainer" containerID="ea87da209cc47118cc41bdb3107264d5dd2e07bc832e620553f0ac1be9456693" Oct 01 15:53:56 crc kubenswrapper[4869]: E1001 15:53:56.581507 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-c86m8_openshift-machine-config-operator(a4b64f7f-0b03-4f47-965b-9fde048b735c)\"" pod="openshift-machine-config-operator/machine-config-daemon-c86m8" podUID="a4b64f7f-0b03-4f47-965b-9fde048b735c" Oct 01 15:54:07 crc kubenswrapper[4869]: I1001 15:54:07.581705 4869 scope.go:117] "RemoveContainer" containerID="ea87da209cc47118cc41bdb3107264d5dd2e07bc832e620553f0ac1be9456693" Oct 01 15:54:07 crc kubenswrapper[4869]: E1001 15:54:07.582489 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-c86m8_openshift-machine-config-operator(a4b64f7f-0b03-4f47-965b-9fde048b735c)\"" pod="openshift-machine-config-operator/machine-config-daemon-c86m8" podUID="a4b64f7f-0b03-4f47-965b-9fde048b735c" Oct 01 15:54:18 crc kubenswrapper[4869]: I1001 15:54:18.581553 4869 scope.go:117] "RemoveContainer" containerID="ea87da209cc47118cc41bdb3107264d5dd2e07bc832e620553f0ac1be9456693" Oct 01 15:54:18 crc kubenswrapper[4869]: E1001 15:54:18.583651 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-c86m8_openshift-machine-config-operator(a4b64f7f-0b03-4f47-965b-9fde048b735c)\"" pod="openshift-machine-config-operator/machine-config-daemon-c86m8" podUID="a4b64f7f-0b03-4f47-965b-9fde048b735c" Oct 01 15:54:23 crc kubenswrapper[4869]: I1001 15:54:23.618922 4869 generic.go:334] "Generic (PLEG): container finished" podID="91f3b9d8-e4a2-4c04-978d-e43153d4af93" containerID="4e55ad18b48b4515a5c1e9e3c2422ad70d250260157fd1f1b1292b714c04993b" exitCode=0 Oct 01 15:54:23 crc kubenswrapper[4869]: I1001 15:54:23.618996 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-kkddc" event={"ID":"91f3b9d8-e4a2-4c04-978d-e43153d4af93","Type":"ContainerDied","Data":"4e55ad18b48b4515a5c1e9e3c2422ad70d250260157fd1f1b1292b714c04993b"} Oct 01 15:54:25 crc kubenswrapper[4869]: I1001 15:54:25.019212 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-kkddc" Oct 01 15:54:25 crc kubenswrapper[4869]: I1001 15:54:25.112332 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-migration-ssh-key-1\" (UniqueName: \"kubernetes.io/secret/91f3b9d8-e4a2-4c04-978d-e43153d4af93-nova-migration-ssh-key-1\") pod \"91f3b9d8-e4a2-4c04-978d-e43153d4af93\" (UID: \"91f3b9d8-e4a2-4c04-978d-e43153d4af93\") " Oct 01 15:54:25 crc kubenswrapper[4869]: I1001 15:54:25.112415 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-extra-config-0\" (UniqueName: \"kubernetes.io/configmap/91f3b9d8-e4a2-4c04-978d-e43153d4af93-nova-extra-config-0\") pod \"91f3b9d8-e4a2-4c04-978d-e43153d4af93\" (UID: \"91f3b9d8-e4a2-4c04-978d-e43153d4af93\") " Oct 01 15:54:25 crc kubenswrapper[4869]: I1001 15:54:25.112467 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-custom-ceph-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/91f3b9d8-e4a2-4c04-978d-e43153d4af93-nova-custom-ceph-combined-ca-bundle\") pod \"91f3b9d8-e4a2-4c04-978d-e43153d4af93\" (UID: \"91f3b9d8-e4a2-4c04-978d-e43153d4af93\") " Oct 01 15:54:25 crc kubenswrapper[4869]: I1001 15:54:25.112485 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tvcph\" (UniqueName: \"kubernetes.io/projected/91f3b9d8-e4a2-4c04-978d-e43153d4af93-kube-api-access-tvcph\") pod \"91f3b9d8-e4a2-4c04-978d-e43153d4af93\" (UID: \"91f3b9d8-e4a2-4c04-978d-e43153d4af93\") " Oct 01 15:54:25 crc kubenswrapper[4869]: I1001 15:54:25.112509 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/91f3b9d8-e4a2-4c04-978d-e43153d4af93-ssh-key\") pod \"91f3b9d8-e4a2-4c04-978d-e43153d4af93\" (UID: \"91f3b9d8-e4a2-4c04-978d-e43153d4af93\") " Oct 01 15:54:25 crc kubenswrapper[4869]: I1001 15:54:25.112532 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-cell1-compute-config-0\" (UniqueName: \"kubernetes.io/secret/91f3b9d8-e4a2-4c04-978d-e43153d4af93-nova-cell1-compute-config-0\") pod \"91f3b9d8-e4a2-4c04-978d-e43153d4af93\" (UID: \"91f3b9d8-e4a2-4c04-978d-e43153d4af93\") " Oct 01 15:54:25 crc kubenswrapper[4869]: I1001 15:54:25.112563 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-cell1-compute-config-1\" (UniqueName: \"kubernetes.io/secret/91f3b9d8-e4a2-4c04-978d-e43153d4af93-nova-cell1-compute-config-1\") pod \"91f3b9d8-e4a2-4c04-978d-e43153d4af93\" (UID: \"91f3b9d8-e4a2-4c04-978d-e43153d4af93\") " Oct 01 15:54:25 crc kubenswrapper[4869]: I1001 15:54:25.112615 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph-nova-0\" (UniqueName: \"kubernetes.io/configmap/91f3b9d8-e4a2-4c04-978d-e43153d4af93-ceph-nova-0\") pod \"91f3b9d8-e4a2-4c04-978d-e43153d4af93\" (UID: \"91f3b9d8-e4a2-4c04-978d-e43153d4af93\") " Oct 01 15:54:25 crc kubenswrapper[4869]: I1001 15:54:25.112635 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/91f3b9d8-e4a2-4c04-978d-e43153d4af93-ceph\") pod \"91f3b9d8-e4a2-4c04-978d-e43153d4af93\" (UID: \"91f3b9d8-e4a2-4c04-978d-e43153d4af93\") " Oct 01 15:54:25 crc kubenswrapper[4869]: I1001 15:54:25.112694 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-migration-ssh-key-0\" (UniqueName: \"kubernetes.io/secret/91f3b9d8-e4a2-4c04-978d-e43153d4af93-nova-migration-ssh-key-0\") pod \"91f3b9d8-e4a2-4c04-978d-e43153d4af93\" (UID: \"91f3b9d8-e4a2-4c04-978d-e43153d4af93\") " Oct 01 15:54:25 crc kubenswrapper[4869]: I1001 15:54:25.112729 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/91f3b9d8-e4a2-4c04-978d-e43153d4af93-inventory\") pod \"91f3b9d8-e4a2-4c04-978d-e43153d4af93\" (UID: \"91f3b9d8-e4a2-4c04-978d-e43153d4af93\") " Oct 01 15:54:25 crc kubenswrapper[4869]: I1001 15:54:25.117824 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/91f3b9d8-e4a2-4c04-978d-e43153d4af93-ceph" (OuterVolumeSpecName: "ceph") pod "91f3b9d8-e4a2-4c04-978d-e43153d4af93" (UID: "91f3b9d8-e4a2-4c04-978d-e43153d4af93"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 15:54:25 crc kubenswrapper[4869]: I1001 15:54:25.132468 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/91f3b9d8-e4a2-4c04-978d-e43153d4af93-kube-api-access-tvcph" (OuterVolumeSpecName: "kube-api-access-tvcph") pod "91f3b9d8-e4a2-4c04-978d-e43153d4af93" (UID: "91f3b9d8-e4a2-4c04-978d-e43153d4af93"). InnerVolumeSpecName "kube-api-access-tvcph". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 15:54:25 crc kubenswrapper[4869]: I1001 15:54:25.133187 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/91f3b9d8-e4a2-4c04-978d-e43153d4af93-nova-custom-ceph-combined-ca-bundle" (OuterVolumeSpecName: "nova-custom-ceph-combined-ca-bundle") pod "91f3b9d8-e4a2-4c04-978d-e43153d4af93" (UID: "91f3b9d8-e4a2-4c04-978d-e43153d4af93"). InnerVolumeSpecName "nova-custom-ceph-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 15:54:25 crc kubenswrapper[4869]: I1001 15:54:25.142727 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/91f3b9d8-e4a2-4c04-978d-e43153d4af93-ceph-nova-0" (OuterVolumeSpecName: "ceph-nova-0") pod "91f3b9d8-e4a2-4c04-978d-e43153d4af93" (UID: "91f3b9d8-e4a2-4c04-978d-e43153d4af93"). InnerVolumeSpecName "ceph-nova-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 15:54:25 crc kubenswrapper[4869]: I1001 15:54:25.142890 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/91f3b9d8-e4a2-4c04-978d-e43153d4af93-nova-cell1-compute-config-1" (OuterVolumeSpecName: "nova-cell1-compute-config-1") pod "91f3b9d8-e4a2-4c04-978d-e43153d4af93" (UID: "91f3b9d8-e4a2-4c04-978d-e43153d4af93"). InnerVolumeSpecName "nova-cell1-compute-config-1". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 15:54:25 crc kubenswrapper[4869]: I1001 15:54:25.145138 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/91f3b9d8-e4a2-4c04-978d-e43153d4af93-nova-cell1-compute-config-0" (OuterVolumeSpecName: "nova-cell1-compute-config-0") pod "91f3b9d8-e4a2-4c04-978d-e43153d4af93" (UID: "91f3b9d8-e4a2-4c04-978d-e43153d4af93"). InnerVolumeSpecName "nova-cell1-compute-config-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 15:54:25 crc kubenswrapper[4869]: I1001 15:54:25.145509 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/91f3b9d8-e4a2-4c04-978d-e43153d4af93-nova-migration-ssh-key-1" (OuterVolumeSpecName: "nova-migration-ssh-key-1") pod "91f3b9d8-e4a2-4c04-978d-e43153d4af93" (UID: "91f3b9d8-e4a2-4c04-978d-e43153d4af93"). InnerVolumeSpecName "nova-migration-ssh-key-1". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 15:54:25 crc kubenswrapper[4869]: I1001 15:54:25.146590 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/91f3b9d8-e4a2-4c04-978d-e43153d4af93-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "91f3b9d8-e4a2-4c04-978d-e43153d4af93" (UID: "91f3b9d8-e4a2-4c04-978d-e43153d4af93"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 15:54:25 crc kubenswrapper[4869]: I1001 15:54:25.155526 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/91f3b9d8-e4a2-4c04-978d-e43153d4af93-inventory" (OuterVolumeSpecName: "inventory") pod "91f3b9d8-e4a2-4c04-978d-e43153d4af93" (UID: "91f3b9d8-e4a2-4c04-978d-e43153d4af93"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 15:54:25 crc kubenswrapper[4869]: I1001 15:54:25.155597 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/91f3b9d8-e4a2-4c04-978d-e43153d4af93-nova-extra-config-0" (OuterVolumeSpecName: "nova-extra-config-0") pod "91f3b9d8-e4a2-4c04-978d-e43153d4af93" (UID: "91f3b9d8-e4a2-4c04-978d-e43153d4af93"). InnerVolumeSpecName "nova-extra-config-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 15:54:25 crc kubenswrapper[4869]: I1001 15:54:25.178988 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/91f3b9d8-e4a2-4c04-978d-e43153d4af93-nova-migration-ssh-key-0" (OuterVolumeSpecName: "nova-migration-ssh-key-0") pod "91f3b9d8-e4a2-4c04-978d-e43153d4af93" (UID: "91f3b9d8-e4a2-4c04-978d-e43153d4af93"). InnerVolumeSpecName "nova-migration-ssh-key-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 15:54:25 crc kubenswrapper[4869]: I1001 15:54:25.215644 4869 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/91f3b9d8-e4a2-4c04-978d-e43153d4af93-inventory\") on node \"crc\" DevicePath \"\"" Oct 01 15:54:25 crc kubenswrapper[4869]: I1001 15:54:25.215674 4869 reconciler_common.go:293] "Volume detached for volume \"nova-migration-ssh-key-1\" (UniqueName: \"kubernetes.io/secret/91f3b9d8-e4a2-4c04-978d-e43153d4af93-nova-migration-ssh-key-1\") on node \"crc\" DevicePath \"\"" Oct 01 15:54:25 crc kubenswrapper[4869]: I1001 15:54:25.215685 4869 reconciler_common.go:293] "Volume detached for volume \"nova-extra-config-0\" (UniqueName: \"kubernetes.io/configmap/91f3b9d8-e4a2-4c04-978d-e43153d4af93-nova-extra-config-0\") on node \"crc\" DevicePath \"\"" Oct 01 15:54:25 crc kubenswrapper[4869]: I1001 15:54:25.215696 4869 reconciler_common.go:293] "Volume detached for volume \"nova-custom-ceph-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/91f3b9d8-e4a2-4c04-978d-e43153d4af93-nova-custom-ceph-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 01 15:54:25 crc kubenswrapper[4869]: I1001 15:54:25.215705 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tvcph\" (UniqueName: \"kubernetes.io/projected/91f3b9d8-e4a2-4c04-978d-e43153d4af93-kube-api-access-tvcph\") on node \"crc\" DevicePath \"\"" Oct 01 15:54:25 crc kubenswrapper[4869]: I1001 15:54:25.215713 4869 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/91f3b9d8-e4a2-4c04-978d-e43153d4af93-ssh-key\") on node \"crc\" DevicePath \"\"" Oct 01 15:54:25 crc kubenswrapper[4869]: I1001 15:54:25.215723 4869 reconciler_common.go:293] "Volume detached for volume \"nova-cell1-compute-config-0\" (UniqueName: \"kubernetes.io/secret/91f3b9d8-e4a2-4c04-978d-e43153d4af93-nova-cell1-compute-config-0\") on node \"crc\" DevicePath \"\"" Oct 01 15:54:25 crc kubenswrapper[4869]: I1001 15:54:25.215732 4869 reconciler_common.go:293] "Volume detached for volume \"nova-cell1-compute-config-1\" (UniqueName: \"kubernetes.io/secret/91f3b9d8-e4a2-4c04-978d-e43153d4af93-nova-cell1-compute-config-1\") on node \"crc\" DevicePath \"\"" Oct 01 15:54:25 crc kubenswrapper[4869]: I1001 15:54:25.215741 4869 reconciler_common.go:293] "Volume detached for volume \"ceph-nova-0\" (UniqueName: \"kubernetes.io/configmap/91f3b9d8-e4a2-4c04-978d-e43153d4af93-ceph-nova-0\") on node \"crc\" DevicePath \"\"" Oct 01 15:54:25 crc kubenswrapper[4869]: I1001 15:54:25.215750 4869 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/91f3b9d8-e4a2-4c04-978d-e43153d4af93-ceph\") on node \"crc\" DevicePath \"\"" Oct 01 15:54:25 crc kubenswrapper[4869]: I1001 15:54:25.215759 4869 reconciler_common.go:293] "Volume detached for volume \"nova-migration-ssh-key-0\" (UniqueName: \"kubernetes.io/secret/91f3b9d8-e4a2-4c04-978d-e43153d4af93-nova-migration-ssh-key-0\") on node \"crc\" DevicePath \"\"" Oct 01 15:54:25 crc kubenswrapper[4869]: I1001 15:54:25.649635 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-kkddc" event={"ID":"91f3b9d8-e4a2-4c04-978d-e43153d4af93","Type":"ContainerDied","Data":"9d6774d5a4d257138de833e01cc7eb28244197df9cf4ba04a7c3061b2c46c31a"} Oct 01 15:54:25 crc kubenswrapper[4869]: I1001 15:54:25.649723 4869 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="9d6774d5a4d257138de833e01cc7eb28244197df9cf4ba04a7c3061b2c46c31a" Oct 01 15:54:25 crc kubenswrapper[4869]: I1001 15:54:25.649730 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-kkddc" Oct 01 15:54:29 crc kubenswrapper[4869]: I1001 15:54:29.581708 4869 scope.go:117] "RemoveContainer" containerID="ea87da209cc47118cc41bdb3107264d5dd2e07bc832e620553f0ac1be9456693" Oct 01 15:54:29 crc kubenswrapper[4869]: E1001 15:54:29.582674 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-c86m8_openshift-machine-config-operator(a4b64f7f-0b03-4f47-965b-9fde048b735c)\"" pod="openshift-machine-config-operator/machine-config-daemon-c86m8" podUID="a4b64f7f-0b03-4f47-965b-9fde048b735c" Oct 01 15:54:39 crc kubenswrapper[4869]: I1001 15:54:39.621947 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-backup-0"] Oct 01 15:54:39 crc kubenswrapper[4869]: E1001 15:54:39.622883 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c65985c6-dee8-4e55-8fb2-40b23d45b669" containerName="extract-content" Oct 01 15:54:39 crc kubenswrapper[4869]: I1001 15:54:39.622897 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="c65985c6-dee8-4e55-8fb2-40b23d45b669" containerName="extract-content" Oct 01 15:54:39 crc kubenswrapper[4869]: E1001 15:54:39.622912 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c65985c6-dee8-4e55-8fb2-40b23d45b669" containerName="registry-server" Oct 01 15:54:39 crc kubenswrapper[4869]: I1001 15:54:39.622918 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="c65985c6-dee8-4e55-8fb2-40b23d45b669" containerName="registry-server" Oct 01 15:54:39 crc kubenswrapper[4869]: E1001 15:54:39.622931 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c65985c6-dee8-4e55-8fb2-40b23d45b669" containerName="extract-utilities" Oct 01 15:54:39 crc kubenswrapper[4869]: I1001 15:54:39.622937 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="c65985c6-dee8-4e55-8fb2-40b23d45b669" containerName="extract-utilities" Oct 01 15:54:39 crc kubenswrapper[4869]: E1001 15:54:39.622950 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="91f3b9d8-e4a2-4c04-978d-e43153d4af93" containerName="nova-custom-ceph-edpm-deployment-openstack-edpm-ipam" Oct 01 15:54:39 crc kubenswrapper[4869]: I1001 15:54:39.622959 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="91f3b9d8-e4a2-4c04-978d-e43153d4af93" containerName="nova-custom-ceph-edpm-deployment-openstack-edpm-ipam" Oct 01 15:54:39 crc kubenswrapper[4869]: I1001 15:54:39.623135 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="c65985c6-dee8-4e55-8fb2-40b23d45b669" containerName="registry-server" Oct 01 15:54:39 crc kubenswrapper[4869]: I1001 15:54:39.623146 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="91f3b9d8-e4a2-4c04-978d-e43153d4af93" containerName="nova-custom-ceph-edpm-deployment-openstack-edpm-ipam" Oct 01 15:54:39 crc kubenswrapper[4869]: I1001 15:54:39.624135 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-backup-0" Oct 01 15:54:39 crc kubenswrapper[4869]: I1001 15:54:39.627490 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceph-conf-files" Oct 01 15:54:39 crc kubenswrapper[4869]: I1001 15:54:39.628289 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-volume-volume1-0"] Oct 01 15:54:39 crc kubenswrapper[4869]: I1001 15:54:39.631762 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-volume-volume1-0" Oct 01 15:54:39 crc kubenswrapper[4869]: I1001 15:54:39.636683 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-volume-volume1-config-data" Oct 01 15:54:39 crc kubenswrapper[4869]: I1001 15:54:39.637686 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-backup-0"] Oct 01 15:54:39 crc kubenswrapper[4869]: I1001 15:54:39.642077 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-backup-config-data" Oct 01 15:54:39 crc kubenswrapper[4869]: I1001 15:54:39.646435 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-volume-volume1-0"] Oct 01 15:54:39 crc kubenswrapper[4869]: I1001 15:54:39.713271 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-locks-brick\" (UniqueName: \"kubernetes.io/host-path/456f4cbe-3d22-4705-abbe-09cadc1c0ce2-var-locks-brick\") pod \"cinder-volume-volume1-0\" (UID: \"456f4cbe-3d22-4705-abbe-09cadc1c0ce2\") " pod="openstack/cinder-volume-volume1-0" Oct 01 15:54:39 crc kubenswrapper[4869]: I1001 15:54:39.713310 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-locks-cinder\" (UniqueName: \"kubernetes.io/host-path/456f4cbe-3d22-4705-abbe-09cadc1c0ce2-var-locks-cinder\") pod \"cinder-volume-volume1-0\" (UID: \"456f4cbe-3d22-4705-abbe-09cadc1c0ce2\") " pod="openstack/cinder-volume-volume1-0" Oct 01 15:54:39 crc kubenswrapper[4869]: I1001 15:54:39.713336 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-d2w9p\" (UniqueName: \"kubernetes.io/projected/cd72163f-65c7-4984-a967-de8f42861de4-kube-api-access-d2w9p\") pod \"cinder-backup-0\" (UID: \"cd72163f-65c7-4984-a967-de8f42861de4\") " pod="openstack/cinder-backup-0" Oct 01 15:54:39 crc kubenswrapper[4869]: I1001 15:54:39.713362 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/cd72163f-65c7-4984-a967-de8f42861de4-ceph\") pod \"cinder-backup-0\" (UID: \"cd72163f-65c7-4984-a967-de8f42861de4\") " pod="openstack/cinder-backup-0" Oct 01 15:54:39 crc kubenswrapper[4869]: I1001 15:54:39.713386 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/456f4cbe-3d22-4705-abbe-09cadc1c0ce2-ceph\") pod \"cinder-volume-volume1-0\" (UID: \"456f4cbe-3d22-4705-abbe-09cadc1c0ce2\") " pod="openstack/cinder-volume-volume1-0" Oct 01 15:54:39 crc kubenswrapper[4869]: I1001 15:54:39.713405 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-nvme\" (UniqueName: \"kubernetes.io/host-path/cd72163f-65c7-4984-a967-de8f42861de4-etc-nvme\") pod \"cinder-backup-0\" (UID: \"cd72163f-65c7-4984-a967-de8f42861de4\") " pod="openstack/cinder-backup-0" Oct 01 15:54:39 crc kubenswrapper[4869]: I1001 15:54:39.713420 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"lib-modules\" (UniqueName: \"kubernetes.io/host-path/cd72163f-65c7-4984-a967-de8f42861de4-lib-modules\") pod \"cinder-backup-0\" (UID: \"cd72163f-65c7-4984-a967-de8f42861de4\") " pod="openstack/cinder-backup-0" Oct 01 15:54:39 crc kubenswrapper[4869]: I1001 15:54:39.713553 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sys\" (UniqueName: \"kubernetes.io/host-path/456f4cbe-3d22-4705-abbe-09cadc1c0ce2-sys\") pod \"cinder-volume-volume1-0\" (UID: \"456f4cbe-3d22-4705-abbe-09cadc1c0ce2\") " pod="openstack/cinder-volume-volume1-0" Oct 01 15:54:39 crc kubenswrapper[4869]: I1001 15:54:39.713601 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dev\" (UniqueName: \"kubernetes.io/host-path/cd72163f-65c7-4984-a967-de8f42861de4-dev\") pod \"cinder-backup-0\" (UID: \"cd72163f-65c7-4984-a967-de8f42861de4\") " pod="openstack/cinder-backup-0" Oct 01 15:54:39 crc kubenswrapper[4869]: I1001 15:54:39.713663 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dev\" (UniqueName: \"kubernetes.io/host-path/456f4cbe-3d22-4705-abbe-09cadc1c0ce2-dev\") pod \"cinder-volume-volume1-0\" (UID: \"456f4cbe-3d22-4705-abbe-09cadc1c0ce2\") " pod="openstack/cinder-volume-volume1-0" Oct 01 15:54:39 crc kubenswrapper[4869]: I1001 15:54:39.713745 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/cd72163f-65c7-4984-a967-de8f42861de4-config-data-custom\") pod \"cinder-backup-0\" (UID: \"cd72163f-65c7-4984-a967-de8f42861de4\") " pod="openstack/cinder-backup-0" Oct 01 15:54:39 crc kubenswrapper[4869]: I1001 15:54:39.713844 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/456f4cbe-3d22-4705-abbe-09cadc1c0ce2-config-data-custom\") pod \"cinder-volume-volume1-0\" (UID: \"456f4cbe-3d22-4705-abbe-09cadc1c0ce2\") " pod="openstack/cinder-volume-volume1-0" Oct 01 15:54:39 crc kubenswrapper[4869]: I1001 15:54:39.713888 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/456f4cbe-3d22-4705-abbe-09cadc1c0ce2-etc-machine-id\") pod \"cinder-volume-volume1-0\" (UID: \"456f4cbe-3d22-4705-abbe-09cadc1c0ce2\") " pod="openstack/cinder-volume-volume1-0" Oct 01 15:54:39 crc kubenswrapper[4869]: I1001 15:54:39.713909 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/456f4cbe-3d22-4705-abbe-09cadc1c0ce2-combined-ca-bundle\") pod \"cinder-volume-volume1-0\" (UID: \"456f4cbe-3d22-4705-abbe-09cadc1c0ce2\") " pod="openstack/cinder-volume-volume1-0" Oct 01 15:54:39 crc kubenswrapper[4869]: I1001 15:54:39.713957 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-cinder\" (UniqueName: \"kubernetes.io/host-path/cd72163f-65c7-4984-a967-de8f42861de4-var-lib-cinder\") pod \"cinder-backup-0\" (UID: \"cd72163f-65c7-4984-a967-de8f42861de4\") " pod="openstack/cinder-backup-0" Oct 01 15:54:39 crc kubenswrapper[4869]: I1001 15:54:39.713991 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cd72163f-65c7-4984-a967-de8f42861de4-config-data\") pod \"cinder-backup-0\" (UID: \"cd72163f-65c7-4984-a967-de8f42861de4\") " pod="openstack/cinder-backup-0" Oct 01 15:54:39 crc kubenswrapper[4869]: I1001 15:54:39.714020 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/cd72163f-65c7-4984-a967-de8f42861de4-etc-machine-id\") pod \"cinder-backup-0\" (UID: \"cd72163f-65c7-4984-a967-de8f42861de4\") " pod="openstack/cinder-backup-0" Oct 01 15:54:39 crc kubenswrapper[4869]: I1001 15:54:39.714049 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-iscsi\" (UniqueName: \"kubernetes.io/host-path/cd72163f-65c7-4984-a967-de8f42861de4-etc-iscsi\") pod \"cinder-backup-0\" (UID: \"cd72163f-65c7-4984-a967-de8f42861de4\") " pod="openstack/cinder-backup-0" Oct 01 15:54:39 crc kubenswrapper[4869]: I1001 15:54:39.714063 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/cd72163f-65c7-4984-a967-de8f42861de4-scripts\") pod \"cinder-backup-0\" (UID: \"cd72163f-65c7-4984-a967-de8f42861de4\") " pod="openstack/cinder-backup-0" Oct 01 15:54:39 crc kubenswrapper[4869]: I1001 15:54:39.715002 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/456f4cbe-3d22-4705-abbe-09cadc1c0ce2-config-data\") pod \"cinder-volume-volume1-0\" (UID: \"456f4cbe-3d22-4705-abbe-09cadc1c0ce2\") " pod="openstack/cinder-volume-volume1-0" Oct 01 15:54:39 crc kubenswrapper[4869]: I1001 15:54:39.715042 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"lib-modules\" (UniqueName: \"kubernetes.io/host-path/456f4cbe-3d22-4705-abbe-09cadc1c0ce2-lib-modules\") pod \"cinder-volume-volume1-0\" (UID: \"456f4cbe-3d22-4705-abbe-09cadc1c0ce2\") " pod="openstack/cinder-volume-volume1-0" Oct 01 15:54:39 crc kubenswrapper[4869]: I1001 15:54:39.715089 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-locks-cinder\" (UniqueName: \"kubernetes.io/host-path/cd72163f-65c7-4984-a967-de8f42861de4-var-locks-cinder\") pod \"cinder-backup-0\" (UID: \"cd72163f-65c7-4984-a967-de8f42861de4\") " pod="openstack/cinder-backup-0" Oct 01 15:54:39 crc kubenswrapper[4869]: I1001 15:54:39.715153 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run\" (UniqueName: \"kubernetes.io/host-path/cd72163f-65c7-4984-a967-de8f42861de4-run\") pod \"cinder-backup-0\" (UID: \"cd72163f-65c7-4984-a967-de8f42861de4\") " pod="openstack/cinder-backup-0" Oct 01 15:54:39 crc kubenswrapper[4869]: I1001 15:54:39.715181 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-locks-brick\" (UniqueName: \"kubernetes.io/host-path/cd72163f-65c7-4984-a967-de8f42861de4-var-locks-brick\") pod \"cinder-backup-0\" (UID: \"cd72163f-65c7-4984-a967-de8f42861de4\") " pod="openstack/cinder-backup-0" Oct 01 15:54:39 crc kubenswrapper[4869]: I1001 15:54:39.715218 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-cinder\" (UniqueName: \"kubernetes.io/host-path/456f4cbe-3d22-4705-abbe-09cadc1c0ce2-var-lib-cinder\") pod \"cinder-volume-volume1-0\" (UID: \"456f4cbe-3d22-4705-abbe-09cadc1c0ce2\") " pod="openstack/cinder-volume-volume1-0" Oct 01 15:54:39 crc kubenswrapper[4869]: I1001 15:54:39.715239 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-nvme\" (UniqueName: \"kubernetes.io/host-path/456f4cbe-3d22-4705-abbe-09cadc1c0ce2-etc-nvme\") pod \"cinder-volume-volume1-0\" (UID: \"456f4cbe-3d22-4705-abbe-09cadc1c0ce2\") " pod="openstack/cinder-volume-volume1-0" Oct 01 15:54:39 crc kubenswrapper[4869]: I1001 15:54:39.715279 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cd72163f-65c7-4984-a967-de8f42861de4-combined-ca-bundle\") pod \"cinder-backup-0\" (UID: \"cd72163f-65c7-4984-a967-de8f42861de4\") " pod="openstack/cinder-backup-0" Oct 01 15:54:39 crc kubenswrapper[4869]: I1001 15:54:39.715303 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-iscsi\" (UniqueName: \"kubernetes.io/host-path/456f4cbe-3d22-4705-abbe-09cadc1c0ce2-etc-iscsi\") pod \"cinder-volume-volume1-0\" (UID: \"456f4cbe-3d22-4705-abbe-09cadc1c0ce2\") " pod="openstack/cinder-volume-volume1-0" Oct 01 15:54:39 crc kubenswrapper[4869]: I1001 15:54:39.715339 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/456f4cbe-3d22-4705-abbe-09cadc1c0ce2-scripts\") pod \"cinder-volume-volume1-0\" (UID: \"456f4cbe-3d22-4705-abbe-09cadc1c0ce2\") " pod="openstack/cinder-volume-volume1-0" Oct 01 15:54:39 crc kubenswrapper[4869]: I1001 15:54:39.715405 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run\" (UniqueName: \"kubernetes.io/host-path/456f4cbe-3d22-4705-abbe-09cadc1c0ce2-run\") pod \"cinder-volume-volume1-0\" (UID: \"456f4cbe-3d22-4705-abbe-09cadc1c0ce2\") " pod="openstack/cinder-volume-volume1-0" Oct 01 15:54:39 crc kubenswrapper[4869]: I1001 15:54:39.715425 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sys\" (UniqueName: \"kubernetes.io/host-path/cd72163f-65c7-4984-a967-de8f42861de4-sys\") pod \"cinder-backup-0\" (UID: \"cd72163f-65c7-4984-a967-de8f42861de4\") " pod="openstack/cinder-backup-0" Oct 01 15:54:39 crc kubenswrapper[4869]: I1001 15:54:39.715499 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lszlj\" (UniqueName: \"kubernetes.io/projected/456f4cbe-3d22-4705-abbe-09cadc1c0ce2-kube-api-access-lszlj\") pod \"cinder-volume-volume1-0\" (UID: \"456f4cbe-3d22-4705-abbe-09cadc1c0ce2\") " pod="openstack/cinder-volume-volume1-0" Oct 01 15:54:39 crc kubenswrapper[4869]: I1001 15:54:39.817457 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lszlj\" (UniqueName: \"kubernetes.io/projected/456f4cbe-3d22-4705-abbe-09cadc1c0ce2-kube-api-access-lszlj\") pod \"cinder-volume-volume1-0\" (UID: \"456f4cbe-3d22-4705-abbe-09cadc1c0ce2\") " pod="openstack/cinder-volume-volume1-0" Oct 01 15:54:39 crc kubenswrapper[4869]: I1001 15:54:39.817526 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-locks-cinder\" (UniqueName: \"kubernetes.io/host-path/456f4cbe-3d22-4705-abbe-09cadc1c0ce2-var-locks-cinder\") pod \"cinder-volume-volume1-0\" (UID: \"456f4cbe-3d22-4705-abbe-09cadc1c0ce2\") " pod="openstack/cinder-volume-volume1-0" Oct 01 15:54:39 crc kubenswrapper[4869]: I1001 15:54:39.817551 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-locks-brick\" (UniqueName: \"kubernetes.io/host-path/456f4cbe-3d22-4705-abbe-09cadc1c0ce2-var-locks-brick\") pod \"cinder-volume-volume1-0\" (UID: \"456f4cbe-3d22-4705-abbe-09cadc1c0ce2\") " pod="openstack/cinder-volume-volume1-0" Oct 01 15:54:39 crc kubenswrapper[4869]: I1001 15:54:39.817577 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-d2w9p\" (UniqueName: \"kubernetes.io/projected/cd72163f-65c7-4984-a967-de8f42861de4-kube-api-access-d2w9p\") pod \"cinder-backup-0\" (UID: \"cd72163f-65c7-4984-a967-de8f42861de4\") " pod="openstack/cinder-backup-0" Oct 01 15:54:39 crc kubenswrapper[4869]: I1001 15:54:39.817612 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/cd72163f-65c7-4984-a967-de8f42861de4-ceph\") pod \"cinder-backup-0\" (UID: \"cd72163f-65c7-4984-a967-de8f42861de4\") " pod="openstack/cinder-backup-0" Oct 01 15:54:39 crc kubenswrapper[4869]: I1001 15:54:39.817644 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/456f4cbe-3d22-4705-abbe-09cadc1c0ce2-ceph\") pod \"cinder-volume-volume1-0\" (UID: \"456f4cbe-3d22-4705-abbe-09cadc1c0ce2\") " pod="openstack/cinder-volume-volume1-0" Oct 01 15:54:39 crc kubenswrapper[4869]: I1001 15:54:39.817670 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-nvme\" (UniqueName: \"kubernetes.io/host-path/cd72163f-65c7-4984-a967-de8f42861de4-etc-nvme\") pod \"cinder-backup-0\" (UID: \"cd72163f-65c7-4984-a967-de8f42861de4\") " pod="openstack/cinder-backup-0" Oct 01 15:54:39 crc kubenswrapper[4869]: I1001 15:54:39.817688 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"lib-modules\" (UniqueName: \"kubernetes.io/host-path/cd72163f-65c7-4984-a967-de8f42861de4-lib-modules\") pod \"cinder-backup-0\" (UID: \"cd72163f-65c7-4984-a967-de8f42861de4\") " pod="openstack/cinder-backup-0" Oct 01 15:54:39 crc kubenswrapper[4869]: I1001 15:54:39.817714 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sys\" (UniqueName: \"kubernetes.io/host-path/456f4cbe-3d22-4705-abbe-09cadc1c0ce2-sys\") pod \"cinder-volume-volume1-0\" (UID: \"456f4cbe-3d22-4705-abbe-09cadc1c0ce2\") " pod="openstack/cinder-volume-volume1-0" Oct 01 15:54:39 crc kubenswrapper[4869]: I1001 15:54:39.817731 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dev\" (UniqueName: \"kubernetes.io/host-path/cd72163f-65c7-4984-a967-de8f42861de4-dev\") pod \"cinder-backup-0\" (UID: \"cd72163f-65c7-4984-a967-de8f42861de4\") " pod="openstack/cinder-backup-0" Oct 01 15:54:39 crc kubenswrapper[4869]: I1001 15:54:39.817758 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dev\" (UniqueName: \"kubernetes.io/host-path/456f4cbe-3d22-4705-abbe-09cadc1c0ce2-dev\") pod \"cinder-volume-volume1-0\" (UID: \"456f4cbe-3d22-4705-abbe-09cadc1c0ce2\") " pod="openstack/cinder-volume-volume1-0" Oct 01 15:54:39 crc kubenswrapper[4869]: I1001 15:54:39.817793 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/cd72163f-65c7-4984-a967-de8f42861de4-config-data-custom\") pod \"cinder-backup-0\" (UID: \"cd72163f-65c7-4984-a967-de8f42861de4\") " pod="openstack/cinder-backup-0" Oct 01 15:54:39 crc kubenswrapper[4869]: I1001 15:54:39.817830 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/456f4cbe-3d22-4705-abbe-09cadc1c0ce2-config-data-custom\") pod \"cinder-volume-volume1-0\" (UID: \"456f4cbe-3d22-4705-abbe-09cadc1c0ce2\") " pod="openstack/cinder-volume-volume1-0" Oct 01 15:54:39 crc kubenswrapper[4869]: I1001 15:54:39.817862 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/456f4cbe-3d22-4705-abbe-09cadc1c0ce2-etc-machine-id\") pod \"cinder-volume-volume1-0\" (UID: \"456f4cbe-3d22-4705-abbe-09cadc1c0ce2\") " pod="openstack/cinder-volume-volume1-0" Oct 01 15:54:39 crc kubenswrapper[4869]: I1001 15:54:39.817863 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"lib-modules\" (UniqueName: \"kubernetes.io/host-path/cd72163f-65c7-4984-a967-de8f42861de4-lib-modules\") pod \"cinder-backup-0\" (UID: \"cd72163f-65c7-4984-a967-de8f42861de4\") " pod="openstack/cinder-backup-0" Oct 01 15:54:39 crc kubenswrapper[4869]: I1001 15:54:39.817881 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/456f4cbe-3d22-4705-abbe-09cadc1c0ce2-combined-ca-bundle\") pod \"cinder-volume-volume1-0\" (UID: \"456f4cbe-3d22-4705-abbe-09cadc1c0ce2\") " pod="openstack/cinder-volume-volume1-0" Oct 01 15:54:39 crc kubenswrapper[4869]: I1001 15:54:39.817879 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-locks-cinder\" (UniqueName: \"kubernetes.io/host-path/456f4cbe-3d22-4705-abbe-09cadc1c0ce2-var-locks-cinder\") pod \"cinder-volume-volume1-0\" (UID: \"456f4cbe-3d22-4705-abbe-09cadc1c0ce2\") " pod="openstack/cinder-volume-volume1-0" Oct 01 15:54:39 crc kubenswrapper[4869]: I1001 15:54:39.817934 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-cinder\" (UniqueName: \"kubernetes.io/host-path/cd72163f-65c7-4984-a967-de8f42861de4-var-lib-cinder\") pod \"cinder-backup-0\" (UID: \"cd72163f-65c7-4984-a967-de8f42861de4\") " pod="openstack/cinder-backup-0" Oct 01 15:54:39 crc kubenswrapper[4869]: I1001 15:54:39.817945 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sys\" (UniqueName: \"kubernetes.io/host-path/456f4cbe-3d22-4705-abbe-09cadc1c0ce2-sys\") pod \"cinder-volume-volume1-0\" (UID: \"456f4cbe-3d22-4705-abbe-09cadc1c0ce2\") " pod="openstack/cinder-volume-volume1-0" Oct 01 15:54:39 crc kubenswrapper[4869]: I1001 15:54:39.817962 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cd72163f-65c7-4984-a967-de8f42861de4-config-data\") pod \"cinder-backup-0\" (UID: \"cd72163f-65c7-4984-a967-de8f42861de4\") " pod="openstack/cinder-backup-0" Oct 01 15:54:39 crc kubenswrapper[4869]: I1001 15:54:39.817964 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-locks-brick\" (UniqueName: \"kubernetes.io/host-path/456f4cbe-3d22-4705-abbe-09cadc1c0ce2-var-locks-brick\") pod \"cinder-volume-volume1-0\" (UID: \"456f4cbe-3d22-4705-abbe-09cadc1c0ce2\") " pod="openstack/cinder-volume-volume1-0" Oct 01 15:54:39 crc kubenswrapper[4869]: I1001 15:54:39.817982 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dev\" (UniqueName: \"kubernetes.io/host-path/cd72163f-65c7-4984-a967-de8f42861de4-dev\") pod \"cinder-backup-0\" (UID: \"cd72163f-65c7-4984-a967-de8f42861de4\") " pod="openstack/cinder-backup-0" Oct 01 15:54:39 crc kubenswrapper[4869]: I1001 15:54:39.817988 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/cd72163f-65c7-4984-a967-de8f42861de4-etc-machine-id\") pod \"cinder-backup-0\" (UID: \"cd72163f-65c7-4984-a967-de8f42861de4\") " pod="openstack/cinder-backup-0" Oct 01 15:54:39 crc kubenswrapper[4869]: I1001 15:54:39.818019 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dev\" (UniqueName: \"kubernetes.io/host-path/456f4cbe-3d22-4705-abbe-09cadc1c0ce2-dev\") pod \"cinder-volume-volume1-0\" (UID: \"456f4cbe-3d22-4705-abbe-09cadc1c0ce2\") " pod="openstack/cinder-volume-volume1-0" Oct 01 15:54:39 crc kubenswrapper[4869]: I1001 15:54:39.818024 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/456f4cbe-3d22-4705-abbe-09cadc1c0ce2-etc-machine-id\") pod \"cinder-volume-volume1-0\" (UID: \"456f4cbe-3d22-4705-abbe-09cadc1c0ce2\") " pod="openstack/cinder-volume-volume1-0" Oct 01 15:54:39 crc kubenswrapper[4869]: I1001 15:54:39.818782 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-nvme\" (UniqueName: \"kubernetes.io/host-path/cd72163f-65c7-4984-a967-de8f42861de4-etc-nvme\") pod \"cinder-backup-0\" (UID: \"cd72163f-65c7-4984-a967-de8f42861de4\") " pod="openstack/cinder-backup-0" Oct 01 15:54:39 crc kubenswrapper[4869]: I1001 15:54:39.818868 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-cinder\" (UniqueName: \"kubernetes.io/host-path/cd72163f-65c7-4984-a967-de8f42861de4-var-lib-cinder\") pod \"cinder-backup-0\" (UID: \"cd72163f-65c7-4984-a967-de8f42861de4\") " pod="openstack/cinder-backup-0" Oct 01 15:54:39 crc kubenswrapper[4869]: I1001 15:54:39.818893 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/cd72163f-65c7-4984-a967-de8f42861de4-etc-machine-id\") pod \"cinder-backup-0\" (UID: \"cd72163f-65c7-4984-a967-de8f42861de4\") " pod="openstack/cinder-backup-0" Oct 01 15:54:39 crc kubenswrapper[4869]: I1001 15:54:39.818977 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-iscsi\" (UniqueName: \"kubernetes.io/host-path/cd72163f-65c7-4984-a967-de8f42861de4-etc-iscsi\") pod \"cinder-backup-0\" (UID: \"cd72163f-65c7-4984-a967-de8f42861de4\") " pod="openstack/cinder-backup-0" Oct 01 15:54:39 crc kubenswrapper[4869]: I1001 15:54:39.818994 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/cd72163f-65c7-4984-a967-de8f42861de4-scripts\") pod \"cinder-backup-0\" (UID: \"cd72163f-65c7-4984-a967-de8f42861de4\") " pod="openstack/cinder-backup-0" Oct 01 15:54:39 crc kubenswrapper[4869]: I1001 15:54:39.819014 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/456f4cbe-3d22-4705-abbe-09cadc1c0ce2-config-data\") pod \"cinder-volume-volume1-0\" (UID: \"456f4cbe-3d22-4705-abbe-09cadc1c0ce2\") " pod="openstack/cinder-volume-volume1-0" Oct 01 15:54:39 crc kubenswrapper[4869]: I1001 15:54:39.819031 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"lib-modules\" (UniqueName: \"kubernetes.io/host-path/456f4cbe-3d22-4705-abbe-09cadc1c0ce2-lib-modules\") pod \"cinder-volume-volume1-0\" (UID: \"456f4cbe-3d22-4705-abbe-09cadc1c0ce2\") " pod="openstack/cinder-volume-volume1-0" Oct 01 15:54:39 crc kubenswrapper[4869]: I1001 15:54:39.819053 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-iscsi\" (UniqueName: \"kubernetes.io/host-path/cd72163f-65c7-4984-a967-de8f42861de4-etc-iscsi\") pod \"cinder-backup-0\" (UID: \"cd72163f-65c7-4984-a967-de8f42861de4\") " pod="openstack/cinder-backup-0" Oct 01 15:54:39 crc kubenswrapper[4869]: I1001 15:54:39.819085 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"lib-modules\" (UniqueName: \"kubernetes.io/host-path/456f4cbe-3d22-4705-abbe-09cadc1c0ce2-lib-modules\") pod \"cinder-volume-volume1-0\" (UID: \"456f4cbe-3d22-4705-abbe-09cadc1c0ce2\") " pod="openstack/cinder-volume-volume1-0" Oct 01 15:54:39 crc kubenswrapper[4869]: I1001 15:54:39.819061 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-locks-cinder\" (UniqueName: \"kubernetes.io/host-path/cd72163f-65c7-4984-a967-de8f42861de4-var-locks-cinder\") pod \"cinder-backup-0\" (UID: \"cd72163f-65c7-4984-a967-de8f42861de4\") " pod="openstack/cinder-backup-0" Oct 01 15:54:39 crc kubenswrapper[4869]: I1001 15:54:39.819140 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run\" (UniqueName: \"kubernetes.io/host-path/cd72163f-65c7-4984-a967-de8f42861de4-run\") pod \"cinder-backup-0\" (UID: \"cd72163f-65c7-4984-a967-de8f42861de4\") " pod="openstack/cinder-backup-0" Oct 01 15:54:39 crc kubenswrapper[4869]: I1001 15:54:39.819166 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-locks-brick\" (UniqueName: \"kubernetes.io/host-path/cd72163f-65c7-4984-a967-de8f42861de4-var-locks-brick\") pod \"cinder-backup-0\" (UID: \"cd72163f-65c7-4984-a967-de8f42861de4\") " pod="openstack/cinder-backup-0" Oct 01 15:54:39 crc kubenswrapper[4869]: I1001 15:54:39.819191 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-locks-cinder\" (UniqueName: \"kubernetes.io/host-path/cd72163f-65c7-4984-a967-de8f42861de4-var-locks-cinder\") pod \"cinder-backup-0\" (UID: \"cd72163f-65c7-4984-a967-de8f42861de4\") " pod="openstack/cinder-backup-0" Oct 01 15:54:39 crc kubenswrapper[4869]: I1001 15:54:39.819214 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-nvme\" (UniqueName: \"kubernetes.io/host-path/456f4cbe-3d22-4705-abbe-09cadc1c0ce2-etc-nvme\") pod \"cinder-volume-volume1-0\" (UID: \"456f4cbe-3d22-4705-abbe-09cadc1c0ce2\") " pod="openstack/cinder-volume-volume1-0" Oct 01 15:54:39 crc kubenswrapper[4869]: I1001 15:54:39.819221 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run\" (UniqueName: \"kubernetes.io/host-path/cd72163f-65c7-4984-a967-de8f42861de4-run\") pod \"cinder-backup-0\" (UID: \"cd72163f-65c7-4984-a967-de8f42861de4\") " pod="openstack/cinder-backup-0" Oct 01 15:54:39 crc kubenswrapper[4869]: I1001 15:54:39.819240 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-cinder\" (UniqueName: \"kubernetes.io/host-path/456f4cbe-3d22-4705-abbe-09cadc1c0ce2-var-lib-cinder\") pod \"cinder-volume-volume1-0\" (UID: \"456f4cbe-3d22-4705-abbe-09cadc1c0ce2\") " pod="openstack/cinder-volume-volume1-0" Oct 01 15:54:39 crc kubenswrapper[4869]: I1001 15:54:39.819252 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-locks-brick\" (UniqueName: \"kubernetes.io/host-path/cd72163f-65c7-4984-a967-de8f42861de4-var-locks-brick\") pod \"cinder-backup-0\" (UID: \"cd72163f-65c7-4984-a967-de8f42861de4\") " pod="openstack/cinder-backup-0" Oct 01 15:54:39 crc kubenswrapper[4869]: I1001 15:54:39.819286 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cd72163f-65c7-4984-a967-de8f42861de4-combined-ca-bundle\") pod \"cinder-backup-0\" (UID: \"cd72163f-65c7-4984-a967-de8f42861de4\") " pod="openstack/cinder-backup-0" Oct 01 15:54:39 crc kubenswrapper[4869]: I1001 15:54:39.819295 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-nvme\" (UniqueName: \"kubernetes.io/host-path/456f4cbe-3d22-4705-abbe-09cadc1c0ce2-etc-nvme\") pod \"cinder-volume-volume1-0\" (UID: \"456f4cbe-3d22-4705-abbe-09cadc1c0ce2\") " pod="openstack/cinder-volume-volume1-0" Oct 01 15:54:39 crc kubenswrapper[4869]: I1001 15:54:39.819311 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-iscsi\" (UniqueName: \"kubernetes.io/host-path/456f4cbe-3d22-4705-abbe-09cadc1c0ce2-etc-iscsi\") pod \"cinder-volume-volume1-0\" (UID: \"456f4cbe-3d22-4705-abbe-09cadc1c0ce2\") " pod="openstack/cinder-volume-volume1-0" Oct 01 15:54:39 crc kubenswrapper[4869]: I1001 15:54:39.819330 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-cinder\" (UniqueName: \"kubernetes.io/host-path/456f4cbe-3d22-4705-abbe-09cadc1c0ce2-var-lib-cinder\") pod \"cinder-volume-volume1-0\" (UID: \"456f4cbe-3d22-4705-abbe-09cadc1c0ce2\") " pod="openstack/cinder-volume-volume1-0" Oct 01 15:54:39 crc kubenswrapper[4869]: I1001 15:54:39.819342 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/456f4cbe-3d22-4705-abbe-09cadc1c0ce2-scripts\") pod \"cinder-volume-volume1-0\" (UID: \"456f4cbe-3d22-4705-abbe-09cadc1c0ce2\") " pod="openstack/cinder-volume-volume1-0" Oct 01 15:54:39 crc kubenswrapper[4869]: I1001 15:54:39.819379 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run\" (UniqueName: \"kubernetes.io/host-path/456f4cbe-3d22-4705-abbe-09cadc1c0ce2-run\") pod \"cinder-volume-volume1-0\" (UID: \"456f4cbe-3d22-4705-abbe-09cadc1c0ce2\") " pod="openstack/cinder-volume-volume1-0" Oct 01 15:54:39 crc kubenswrapper[4869]: I1001 15:54:39.819381 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-iscsi\" (UniqueName: \"kubernetes.io/host-path/456f4cbe-3d22-4705-abbe-09cadc1c0ce2-etc-iscsi\") pod \"cinder-volume-volume1-0\" (UID: \"456f4cbe-3d22-4705-abbe-09cadc1c0ce2\") " pod="openstack/cinder-volume-volume1-0" Oct 01 15:54:39 crc kubenswrapper[4869]: I1001 15:54:39.819399 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sys\" (UniqueName: \"kubernetes.io/host-path/cd72163f-65c7-4984-a967-de8f42861de4-sys\") pod \"cinder-backup-0\" (UID: \"cd72163f-65c7-4984-a967-de8f42861de4\") " pod="openstack/cinder-backup-0" Oct 01 15:54:39 crc kubenswrapper[4869]: I1001 15:54:39.819505 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sys\" (UniqueName: \"kubernetes.io/host-path/cd72163f-65c7-4984-a967-de8f42861de4-sys\") pod \"cinder-backup-0\" (UID: \"cd72163f-65c7-4984-a967-de8f42861de4\") " pod="openstack/cinder-backup-0" Oct 01 15:54:39 crc kubenswrapper[4869]: I1001 15:54:39.819539 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run\" (UniqueName: \"kubernetes.io/host-path/456f4cbe-3d22-4705-abbe-09cadc1c0ce2-run\") pod \"cinder-volume-volume1-0\" (UID: \"456f4cbe-3d22-4705-abbe-09cadc1c0ce2\") " pod="openstack/cinder-volume-volume1-0" Oct 01 15:54:39 crc kubenswrapper[4869]: I1001 15:54:39.824144 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/cd72163f-65c7-4984-a967-de8f42861de4-ceph\") pod \"cinder-backup-0\" (UID: \"cd72163f-65c7-4984-a967-de8f42861de4\") " pod="openstack/cinder-backup-0" Oct 01 15:54:39 crc kubenswrapper[4869]: I1001 15:54:39.824327 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cd72163f-65c7-4984-a967-de8f42861de4-combined-ca-bundle\") pod \"cinder-backup-0\" (UID: \"cd72163f-65c7-4984-a967-de8f42861de4\") " pod="openstack/cinder-backup-0" Oct 01 15:54:39 crc kubenswrapper[4869]: I1001 15:54:39.824418 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/456f4cbe-3d22-4705-abbe-09cadc1c0ce2-ceph\") pod \"cinder-volume-volume1-0\" (UID: \"456f4cbe-3d22-4705-abbe-09cadc1c0ce2\") " pod="openstack/cinder-volume-volume1-0" Oct 01 15:54:39 crc kubenswrapper[4869]: I1001 15:54:39.825093 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/456f4cbe-3d22-4705-abbe-09cadc1c0ce2-config-data-custom\") pod \"cinder-volume-volume1-0\" (UID: \"456f4cbe-3d22-4705-abbe-09cadc1c0ce2\") " pod="openstack/cinder-volume-volume1-0" Oct 01 15:54:39 crc kubenswrapper[4869]: I1001 15:54:39.825785 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/456f4cbe-3d22-4705-abbe-09cadc1c0ce2-scripts\") pod \"cinder-volume-volume1-0\" (UID: \"456f4cbe-3d22-4705-abbe-09cadc1c0ce2\") " pod="openstack/cinder-volume-volume1-0" Oct 01 15:54:39 crc kubenswrapper[4869]: I1001 15:54:39.825893 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/cd72163f-65c7-4984-a967-de8f42861de4-scripts\") pod \"cinder-backup-0\" (UID: \"cd72163f-65c7-4984-a967-de8f42861de4\") " pod="openstack/cinder-backup-0" Oct 01 15:54:39 crc kubenswrapper[4869]: I1001 15:54:39.827317 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cd72163f-65c7-4984-a967-de8f42861de4-config-data\") pod \"cinder-backup-0\" (UID: \"cd72163f-65c7-4984-a967-de8f42861de4\") " pod="openstack/cinder-backup-0" Oct 01 15:54:39 crc kubenswrapper[4869]: I1001 15:54:39.827628 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/456f4cbe-3d22-4705-abbe-09cadc1c0ce2-config-data\") pod \"cinder-volume-volume1-0\" (UID: \"456f4cbe-3d22-4705-abbe-09cadc1c0ce2\") " pod="openstack/cinder-volume-volume1-0" Oct 01 15:54:39 crc kubenswrapper[4869]: I1001 15:54:39.831829 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/456f4cbe-3d22-4705-abbe-09cadc1c0ce2-combined-ca-bundle\") pod \"cinder-volume-volume1-0\" (UID: \"456f4cbe-3d22-4705-abbe-09cadc1c0ce2\") " pod="openstack/cinder-volume-volume1-0" Oct 01 15:54:39 crc kubenswrapper[4869]: I1001 15:54:39.832694 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/cd72163f-65c7-4984-a967-de8f42861de4-config-data-custom\") pod \"cinder-backup-0\" (UID: \"cd72163f-65c7-4984-a967-de8f42861de4\") " pod="openstack/cinder-backup-0" Oct 01 15:54:39 crc kubenswrapper[4869]: I1001 15:54:39.835250 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-d2w9p\" (UniqueName: \"kubernetes.io/projected/cd72163f-65c7-4984-a967-de8f42861de4-kube-api-access-d2w9p\") pod \"cinder-backup-0\" (UID: \"cd72163f-65c7-4984-a967-de8f42861de4\") " pod="openstack/cinder-backup-0" Oct 01 15:54:39 crc kubenswrapper[4869]: I1001 15:54:39.838851 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lszlj\" (UniqueName: \"kubernetes.io/projected/456f4cbe-3d22-4705-abbe-09cadc1c0ce2-kube-api-access-lszlj\") pod \"cinder-volume-volume1-0\" (UID: \"456f4cbe-3d22-4705-abbe-09cadc1c0ce2\") " pod="openstack/cinder-volume-volume1-0" Oct 01 15:54:39 crc kubenswrapper[4869]: I1001 15:54:39.948883 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-backup-0" Oct 01 15:54:39 crc kubenswrapper[4869]: I1001 15:54:39.959094 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-volume-volume1-0" Oct 01 15:54:40 crc kubenswrapper[4869]: I1001 15:54:40.191311 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/manila-db-create-4vqnt"] Oct 01 15:54:40 crc kubenswrapper[4869]: I1001 15:54:40.192682 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/manila-db-create-4vqnt" Oct 01 15:54:40 crc kubenswrapper[4869]: I1001 15:54:40.202585 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/manila-db-create-4vqnt"] Oct 01 15:54:40 crc kubenswrapper[4869]: I1001 15:54:40.334213 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kxh4v\" (UniqueName: \"kubernetes.io/projected/c44eaba4-c497-45e1-9f35-bb8b579b70fd-kube-api-access-kxh4v\") pod \"manila-db-create-4vqnt\" (UID: \"c44eaba4-c497-45e1-9f35-bb8b579b70fd\") " pod="openstack/manila-db-create-4vqnt" Oct 01 15:54:40 crc kubenswrapper[4869]: I1001 15:54:40.435635 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kxh4v\" (UniqueName: \"kubernetes.io/projected/c44eaba4-c497-45e1-9f35-bb8b579b70fd-kube-api-access-kxh4v\") pod \"manila-db-create-4vqnt\" (UID: \"c44eaba4-c497-45e1-9f35-bb8b579b70fd\") " pod="openstack/manila-db-create-4vqnt" Oct 01 15:54:40 crc kubenswrapper[4869]: I1001 15:54:40.462030 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kxh4v\" (UniqueName: \"kubernetes.io/projected/c44eaba4-c497-45e1-9f35-bb8b579b70fd-kube-api-access-kxh4v\") pod \"manila-db-create-4vqnt\" (UID: \"c44eaba4-c497-45e1-9f35-bb8b579b70fd\") " pod="openstack/manila-db-create-4vqnt" Oct 01 15:54:40 crc kubenswrapper[4869]: I1001 15:54:40.468040 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-external-api-0"] Oct 01 15:54:40 crc kubenswrapper[4869]: I1001 15:54:40.469677 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Oct 01 15:54:40 crc kubenswrapper[4869]: I1001 15:54:40.474677 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-scripts" Oct 01 15:54:40 crc kubenswrapper[4869]: I1001 15:54:40.474920 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-glance-default-public-svc" Oct 01 15:54:40 crc kubenswrapper[4869]: I1001 15:54:40.475158 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-external-config-data" Oct 01 15:54:40 crc kubenswrapper[4869]: I1001 15:54:40.475367 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-glance-dockercfg-df8gc" Oct 01 15:54:40 crc kubenswrapper[4869]: I1001 15:54:40.475534 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Oct 01 15:54:40 crc kubenswrapper[4869]: I1001 15:54:40.528005 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/manila-db-create-4vqnt" Oct 01 15:54:40 crc kubenswrapper[4869]: I1001 15:54:40.543587 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/734729c7-8311-4c3f-ab6c-55592dfcf7c2-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"734729c7-8311-4c3f-ab6c-55592dfcf7c2\") " pod="openstack/glance-default-external-api-0" Oct 01 15:54:40 crc kubenswrapper[4869]: I1001 15:54:40.544026 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/734729c7-8311-4c3f-ab6c-55592dfcf7c2-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"734729c7-8311-4c3f-ab6c-55592dfcf7c2\") " pod="openstack/glance-default-external-api-0" Oct 01 15:54:40 crc kubenswrapper[4869]: I1001 15:54:40.544201 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/734729c7-8311-4c3f-ab6c-55592dfcf7c2-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"734729c7-8311-4c3f-ab6c-55592dfcf7c2\") " pod="openstack/glance-default-external-api-0" Oct 01 15:54:40 crc kubenswrapper[4869]: I1001 15:54:40.544543 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-56lgb\" (UniqueName: \"kubernetes.io/projected/734729c7-8311-4c3f-ab6c-55592dfcf7c2-kube-api-access-56lgb\") pod \"glance-default-external-api-0\" (UID: \"734729c7-8311-4c3f-ab6c-55592dfcf7c2\") " pod="openstack/glance-default-external-api-0" Oct 01 15:54:40 crc kubenswrapper[4869]: I1001 15:54:40.544850 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"glance-default-external-api-0\" (UID: \"734729c7-8311-4c3f-ab6c-55592dfcf7c2\") " pod="openstack/glance-default-external-api-0" Oct 01 15:54:40 crc kubenswrapper[4869]: I1001 15:54:40.545087 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/734729c7-8311-4c3f-ab6c-55592dfcf7c2-config-data\") pod \"glance-default-external-api-0\" (UID: \"734729c7-8311-4c3f-ab6c-55592dfcf7c2\") " pod="openstack/glance-default-external-api-0" Oct 01 15:54:40 crc kubenswrapper[4869]: I1001 15:54:40.545505 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/734729c7-8311-4c3f-ab6c-55592dfcf7c2-logs\") pod \"glance-default-external-api-0\" (UID: \"734729c7-8311-4c3f-ab6c-55592dfcf7c2\") " pod="openstack/glance-default-external-api-0" Oct 01 15:54:40 crc kubenswrapper[4869]: I1001 15:54:40.545658 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/734729c7-8311-4c3f-ab6c-55592dfcf7c2-scripts\") pod \"glance-default-external-api-0\" (UID: \"734729c7-8311-4c3f-ab6c-55592dfcf7c2\") " pod="openstack/glance-default-external-api-0" Oct 01 15:54:40 crc kubenswrapper[4869]: I1001 15:54:40.545926 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/734729c7-8311-4c3f-ab6c-55592dfcf7c2-ceph\") pod \"glance-default-external-api-0\" (UID: \"734729c7-8311-4c3f-ab6c-55592dfcf7c2\") " pod="openstack/glance-default-external-api-0" Oct 01 15:54:40 crc kubenswrapper[4869]: I1001 15:54:40.546771 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-internal-api-0"] Oct 01 15:54:40 crc kubenswrapper[4869]: I1001 15:54:40.550601 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Oct 01 15:54:40 crc kubenswrapper[4869]: I1001 15:54:40.555740 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-glance-default-internal-svc" Oct 01 15:54:40 crc kubenswrapper[4869]: I1001 15:54:40.557090 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-internal-config-data" Oct 01 15:54:40 crc kubenswrapper[4869]: I1001 15:54:40.565302 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Oct 01 15:54:40 crc kubenswrapper[4869]: I1001 15:54:40.585279 4869 scope.go:117] "RemoveContainer" containerID="ea87da209cc47118cc41bdb3107264d5dd2e07bc832e620553f0ac1be9456693" Oct 01 15:54:40 crc kubenswrapper[4869]: E1001 15:54:40.585708 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-c86m8_openshift-machine-config-operator(a4b64f7f-0b03-4f47-965b-9fde048b735c)\"" pod="openshift-machine-config-operator/machine-config-daemon-c86m8" podUID="a4b64f7f-0b03-4f47-965b-9fde048b735c" Oct 01 15:54:40 crc kubenswrapper[4869]: I1001 15:54:40.623766 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-volume-volume1-0"] Oct 01 15:54:40 crc kubenswrapper[4869]: I1001 15:54:40.648662 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lcd2n\" (UniqueName: \"kubernetes.io/projected/2a8c2f28-3cab-44c4-911a-c32162850921-kube-api-access-lcd2n\") pod \"glance-default-internal-api-0\" (UID: \"2a8c2f28-3cab-44c4-911a-c32162850921\") " pod="openstack/glance-default-internal-api-0" Oct 01 15:54:40 crc kubenswrapper[4869]: I1001 15:54:40.648783 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"glance-default-internal-api-0\" (UID: \"2a8c2f28-3cab-44c4-911a-c32162850921\") " pod="openstack/glance-default-internal-api-0" Oct 01 15:54:40 crc kubenswrapper[4869]: I1001 15:54:40.648832 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/2a8c2f28-3cab-44c4-911a-c32162850921-logs\") pod \"glance-default-internal-api-0\" (UID: \"2a8c2f28-3cab-44c4-911a-c32162850921\") " pod="openstack/glance-default-internal-api-0" Oct 01 15:54:40 crc kubenswrapper[4869]: I1001 15:54:40.648865 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/734729c7-8311-4c3f-ab6c-55592dfcf7c2-ceph\") pod \"glance-default-external-api-0\" (UID: \"734729c7-8311-4c3f-ab6c-55592dfcf7c2\") " pod="openstack/glance-default-external-api-0" Oct 01 15:54:40 crc kubenswrapper[4869]: I1001 15:54:40.648909 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/734729c7-8311-4c3f-ab6c-55592dfcf7c2-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"734729c7-8311-4c3f-ab6c-55592dfcf7c2\") " pod="openstack/glance-default-external-api-0" Oct 01 15:54:40 crc kubenswrapper[4869]: I1001 15:54:40.648935 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/2a8c2f28-3cab-44c4-911a-c32162850921-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"2a8c2f28-3cab-44c4-911a-c32162850921\") " pod="openstack/glance-default-internal-api-0" Oct 01 15:54:40 crc kubenswrapper[4869]: I1001 15:54:40.648956 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/734729c7-8311-4c3f-ab6c-55592dfcf7c2-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"734729c7-8311-4c3f-ab6c-55592dfcf7c2\") " pod="openstack/glance-default-external-api-0" Oct 01 15:54:40 crc kubenswrapper[4869]: I1001 15:54:40.648979 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/734729c7-8311-4c3f-ab6c-55592dfcf7c2-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"734729c7-8311-4c3f-ab6c-55592dfcf7c2\") " pod="openstack/glance-default-external-api-0" Oct 01 15:54:40 crc kubenswrapper[4869]: I1001 15:54:40.649018 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/2a8c2f28-3cab-44c4-911a-c32162850921-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"2a8c2f28-3cab-44c4-911a-c32162850921\") " pod="openstack/glance-default-internal-api-0" Oct 01 15:54:40 crc kubenswrapper[4869]: I1001 15:54:40.649036 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-56lgb\" (UniqueName: \"kubernetes.io/projected/734729c7-8311-4c3f-ab6c-55592dfcf7c2-kube-api-access-56lgb\") pod \"glance-default-external-api-0\" (UID: \"734729c7-8311-4c3f-ab6c-55592dfcf7c2\") " pod="openstack/glance-default-external-api-0" Oct 01 15:54:40 crc kubenswrapper[4869]: I1001 15:54:40.649062 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/2a8c2f28-3cab-44c4-911a-c32162850921-ceph\") pod \"glance-default-internal-api-0\" (UID: \"2a8c2f28-3cab-44c4-911a-c32162850921\") " pod="openstack/glance-default-internal-api-0" Oct 01 15:54:40 crc kubenswrapper[4869]: I1001 15:54:40.649104 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"glance-default-external-api-0\" (UID: \"734729c7-8311-4c3f-ab6c-55592dfcf7c2\") " pod="openstack/glance-default-external-api-0" Oct 01 15:54:40 crc kubenswrapper[4869]: I1001 15:54:40.649120 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2a8c2f28-3cab-44c4-911a-c32162850921-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"2a8c2f28-3cab-44c4-911a-c32162850921\") " pod="openstack/glance-default-internal-api-0" Oct 01 15:54:40 crc kubenswrapper[4869]: I1001 15:54:40.649237 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/734729c7-8311-4c3f-ab6c-55592dfcf7c2-config-data\") pod \"glance-default-external-api-0\" (UID: \"734729c7-8311-4c3f-ab6c-55592dfcf7c2\") " pod="openstack/glance-default-external-api-0" Oct 01 15:54:40 crc kubenswrapper[4869]: I1001 15:54:40.649319 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2a8c2f28-3cab-44c4-911a-c32162850921-config-data\") pod \"glance-default-internal-api-0\" (UID: \"2a8c2f28-3cab-44c4-911a-c32162850921\") " pod="openstack/glance-default-internal-api-0" Oct 01 15:54:40 crc kubenswrapper[4869]: I1001 15:54:40.649342 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/734729c7-8311-4c3f-ab6c-55592dfcf7c2-logs\") pod \"glance-default-external-api-0\" (UID: \"734729c7-8311-4c3f-ab6c-55592dfcf7c2\") " pod="openstack/glance-default-external-api-0" Oct 01 15:54:40 crc kubenswrapper[4869]: I1001 15:54:40.649395 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/734729c7-8311-4c3f-ab6c-55592dfcf7c2-scripts\") pod \"glance-default-external-api-0\" (UID: \"734729c7-8311-4c3f-ab6c-55592dfcf7c2\") " pod="openstack/glance-default-external-api-0" Oct 01 15:54:40 crc kubenswrapper[4869]: I1001 15:54:40.649415 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2a8c2f28-3cab-44c4-911a-c32162850921-scripts\") pod \"glance-default-internal-api-0\" (UID: \"2a8c2f28-3cab-44c4-911a-c32162850921\") " pod="openstack/glance-default-internal-api-0" Oct 01 15:54:40 crc kubenswrapper[4869]: I1001 15:54:40.649779 4869 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"glance-default-external-api-0\" (UID: \"734729c7-8311-4c3f-ab6c-55592dfcf7c2\") device mount path \"/mnt/openstack/pv06\"" pod="openstack/glance-default-external-api-0" Oct 01 15:54:40 crc kubenswrapper[4869]: I1001 15:54:40.650883 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/734729c7-8311-4c3f-ab6c-55592dfcf7c2-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"734729c7-8311-4c3f-ab6c-55592dfcf7c2\") " pod="openstack/glance-default-external-api-0" Oct 01 15:54:40 crc kubenswrapper[4869]: I1001 15:54:40.651397 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/734729c7-8311-4c3f-ab6c-55592dfcf7c2-logs\") pod \"glance-default-external-api-0\" (UID: \"734729c7-8311-4c3f-ab6c-55592dfcf7c2\") " pod="openstack/glance-default-external-api-0" Oct 01 15:54:40 crc kubenswrapper[4869]: I1001 15:54:40.653740 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/734729c7-8311-4c3f-ab6c-55592dfcf7c2-ceph\") pod \"glance-default-external-api-0\" (UID: \"734729c7-8311-4c3f-ab6c-55592dfcf7c2\") " pod="openstack/glance-default-external-api-0" Oct 01 15:54:40 crc kubenswrapper[4869]: I1001 15:54:40.655457 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/734729c7-8311-4c3f-ab6c-55592dfcf7c2-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"734729c7-8311-4c3f-ab6c-55592dfcf7c2\") " pod="openstack/glance-default-external-api-0" Oct 01 15:54:40 crc kubenswrapper[4869]: I1001 15:54:40.655744 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/734729c7-8311-4c3f-ab6c-55592dfcf7c2-scripts\") pod \"glance-default-external-api-0\" (UID: \"734729c7-8311-4c3f-ab6c-55592dfcf7c2\") " pod="openstack/glance-default-external-api-0" Oct 01 15:54:40 crc kubenswrapper[4869]: I1001 15:54:40.661063 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/734729c7-8311-4c3f-ab6c-55592dfcf7c2-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"734729c7-8311-4c3f-ab6c-55592dfcf7c2\") " pod="openstack/glance-default-external-api-0" Oct 01 15:54:40 crc kubenswrapper[4869]: I1001 15:54:40.661496 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/734729c7-8311-4c3f-ab6c-55592dfcf7c2-config-data\") pod \"glance-default-external-api-0\" (UID: \"734729c7-8311-4c3f-ab6c-55592dfcf7c2\") " pod="openstack/glance-default-external-api-0" Oct 01 15:54:40 crc kubenswrapper[4869]: I1001 15:54:40.667764 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-56lgb\" (UniqueName: \"kubernetes.io/projected/734729c7-8311-4c3f-ab6c-55592dfcf7c2-kube-api-access-56lgb\") pod \"glance-default-external-api-0\" (UID: \"734729c7-8311-4c3f-ab6c-55592dfcf7c2\") " pod="openstack/glance-default-external-api-0" Oct 01 15:54:40 crc kubenswrapper[4869]: I1001 15:54:40.681526 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"glance-default-external-api-0\" (UID: \"734729c7-8311-4c3f-ab6c-55592dfcf7c2\") " pod="openstack/glance-default-external-api-0" Oct 01 15:54:40 crc kubenswrapper[4869]: I1001 15:54:40.750685 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lcd2n\" (UniqueName: \"kubernetes.io/projected/2a8c2f28-3cab-44c4-911a-c32162850921-kube-api-access-lcd2n\") pod \"glance-default-internal-api-0\" (UID: \"2a8c2f28-3cab-44c4-911a-c32162850921\") " pod="openstack/glance-default-internal-api-0" Oct 01 15:54:40 crc kubenswrapper[4869]: I1001 15:54:40.750735 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"glance-default-internal-api-0\" (UID: \"2a8c2f28-3cab-44c4-911a-c32162850921\") " pod="openstack/glance-default-internal-api-0" Oct 01 15:54:40 crc kubenswrapper[4869]: I1001 15:54:40.750777 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/2a8c2f28-3cab-44c4-911a-c32162850921-logs\") pod \"glance-default-internal-api-0\" (UID: \"2a8c2f28-3cab-44c4-911a-c32162850921\") " pod="openstack/glance-default-internal-api-0" Oct 01 15:54:40 crc kubenswrapper[4869]: I1001 15:54:40.750832 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/2a8c2f28-3cab-44c4-911a-c32162850921-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"2a8c2f28-3cab-44c4-911a-c32162850921\") " pod="openstack/glance-default-internal-api-0" Oct 01 15:54:40 crc kubenswrapper[4869]: I1001 15:54:40.750883 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/2a8c2f28-3cab-44c4-911a-c32162850921-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"2a8c2f28-3cab-44c4-911a-c32162850921\") " pod="openstack/glance-default-internal-api-0" Oct 01 15:54:40 crc kubenswrapper[4869]: I1001 15:54:40.750914 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/2a8c2f28-3cab-44c4-911a-c32162850921-ceph\") pod \"glance-default-internal-api-0\" (UID: \"2a8c2f28-3cab-44c4-911a-c32162850921\") " pod="openstack/glance-default-internal-api-0" Oct 01 15:54:40 crc kubenswrapper[4869]: I1001 15:54:40.750936 4869 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"glance-default-internal-api-0\" (UID: \"2a8c2f28-3cab-44c4-911a-c32162850921\") device mount path \"/mnt/openstack/pv04\"" pod="openstack/glance-default-internal-api-0" Oct 01 15:54:40 crc kubenswrapper[4869]: I1001 15:54:40.750951 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2a8c2f28-3cab-44c4-911a-c32162850921-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"2a8c2f28-3cab-44c4-911a-c32162850921\") " pod="openstack/glance-default-internal-api-0" Oct 01 15:54:40 crc kubenswrapper[4869]: I1001 15:54:40.751410 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2a8c2f28-3cab-44c4-911a-c32162850921-config-data\") pod \"glance-default-internal-api-0\" (UID: \"2a8c2f28-3cab-44c4-911a-c32162850921\") " pod="openstack/glance-default-internal-api-0" Oct 01 15:54:40 crc kubenswrapper[4869]: I1001 15:54:40.751515 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2a8c2f28-3cab-44c4-911a-c32162850921-scripts\") pod \"glance-default-internal-api-0\" (UID: \"2a8c2f28-3cab-44c4-911a-c32162850921\") " pod="openstack/glance-default-internal-api-0" Oct 01 15:54:40 crc kubenswrapper[4869]: I1001 15:54:40.751881 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/2a8c2f28-3cab-44c4-911a-c32162850921-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"2a8c2f28-3cab-44c4-911a-c32162850921\") " pod="openstack/glance-default-internal-api-0" Oct 01 15:54:40 crc kubenswrapper[4869]: I1001 15:54:40.751932 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/2a8c2f28-3cab-44c4-911a-c32162850921-logs\") pod \"glance-default-internal-api-0\" (UID: \"2a8c2f28-3cab-44c4-911a-c32162850921\") " pod="openstack/glance-default-internal-api-0" Oct 01 15:54:40 crc kubenswrapper[4869]: I1001 15:54:40.754321 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/2a8c2f28-3cab-44c4-911a-c32162850921-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"2a8c2f28-3cab-44c4-911a-c32162850921\") " pod="openstack/glance-default-internal-api-0" Oct 01 15:54:40 crc kubenswrapper[4869]: I1001 15:54:40.755465 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2a8c2f28-3cab-44c4-911a-c32162850921-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"2a8c2f28-3cab-44c4-911a-c32162850921\") " pod="openstack/glance-default-internal-api-0" Oct 01 15:54:40 crc kubenswrapper[4869]: I1001 15:54:40.756403 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2a8c2f28-3cab-44c4-911a-c32162850921-config-data\") pod \"glance-default-internal-api-0\" (UID: \"2a8c2f28-3cab-44c4-911a-c32162850921\") " pod="openstack/glance-default-internal-api-0" Oct 01 15:54:40 crc kubenswrapper[4869]: I1001 15:54:40.759932 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/2a8c2f28-3cab-44c4-911a-c32162850921-ceph\") pod \"glance-default-internal-api-0\" (UID: \"2a8c2f28-3cab-44c4-911a-c32162850921\") " pod="openstack/glance-default-internal-api-0" Oct 01 15:54:40 crc kubenswrapper[4869]: I1001 15:54:40.768082 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lcd2n\" (UniqueName: \"kubernetes.io/projected/2a8c2f28-3cab-44c4-911a-c32162850921-kube-api-access-lcd2n\") pod \"glance-default-internal-api-0\" (UID: \"2a8c2f28-3cab-44c4-911a-c32162850921\") " pod="openstack/glance-default-internal-api-0" Oct 01 15:54:40 crc kubenswrapper[4869]: I1001 15:54:40.770931 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2a8c2f28-3cab-44c4-911a-c32162850921-scripts\") pod \"glance-default-internal-api-0\" (UID: \"2a8c2f28-3cab-44c4-911a-c32162850921\") " pod="openstack/glance-default-internal-api-0" Oct 01 15:54:40 crc kubenswrapper[4869]: I1001 15:54:40.794557 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"glance-default-internal-api-0\" (UID: \"2a8c2f28-3cab-44c4-911a-c32162850921\") " pod="openstack/glance-default-internal-api-0" Oct 01 15:54:40 crc kubenswrapper[4869]: I1001 15:54:40.796170 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Oct 01 15:54:40 crc kubenswrapper[4869]: I1001 15:54:40.814599 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-volume-volume1-0" event={"ID":"456f4cbe-3d22-4705-abbe-09cadc1c0ce2","Type":"ContainerStarted","Data":"92c5a9ce7f10b83adfbeebd429303837c8fb2284ebdffff34e553d87a89a7acb"} Oct 01 15:54:40 crc kubenswrapper[4869]: I1001 15:54:40.870935 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/manila-db-create-4vqnt"] Oct 01 15:54:40 crc kubenswrapper[4869]: I1001 15:54:40.889373 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Oct 01 15:54:41 crc kubenswrapper[4869]: I1001 15:54:41.159498 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-backup-0"] Oct 01 15:54:41 crc kubenswrapper[4869]: I1001 15:54:41.384176 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Oct 01 15:54:41 crc kubenswrapper[4869]: W1001 15:54:41.385407 4869 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod734729c7_8311_4c3f_ab6c_55592dfcf7c2.slice/crio-270df365c22fbfc16deee538e53d954da4c1d32b896c702e30fd262878135e64 WatchSource:0}: Error finding container 270df365c22fbfc16deee538e53d954da4c1d32b896c702e30fd262878135e64: Status 404 returned error can't find the container with id 270df365c22fbfc16deee538e53d954da4c1d32b896c702e30fd262878135e64 Oct 01 15:54:41 crc kubenswrapper[4869]: I1001 15:54:41.473850 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Oct 01 15:54:41 crc kubenswrapper[4869]: W1001 15:54:41.477315 4869 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod2a8c2f28_3cab_44c4_911a_c32162850921.slice/crio-bbf523be71cede6c11c61334e84af5314203476eb70fb6421f9b98dffd5c1631 WatchSource:0}: Error finding container bbf523be71cede6c11c61334e84af5314203476eb70fb6421f9b98dffd5c1631: Status 404 returned error can't find the container with id bbf523be71cede6c11c61334e84af5314203476eb70fb6421f9b98dffd5c1631 Oct 01 15:54:41 crc kubenswrapper[4869]: I1001 15:54:41.837123 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"734729c7-8311-4c3f-ab6c-55592dfcf7c2","Type":"ContainerStarted","Data":"270df365c22fbfc16deee538e53d954da4c1d32b896c702e30fd262878135e64"} Oct 01 15:54:41 crc kubenswrapper[4869]: I1001 15:54:41.838693 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-backup-0" event={"ID":"cd72163f-65c7-4984-a967-de8f42861de4","Type":"ContainerStarted","Data":"620e813e15b191a129da2ec463af8040dbe86a94c599dc2bcf01a34724f71033"} Oct 01 15:54:41 crc kubenswrapper[4869]: I1001 15:54:41.840049 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"2a8c2f28-3cab-44c4-911a-c32162850921","Type":"ContainerStarted","Data":"bbf523be71cede6c11c61334e84af5314203476eb70fb6421f9b98dffd5c1631"} Oct 01 15:54:41 crc kubenswrapper[4869]: I1001 15:54:41.846605 4869 generic.go:334] "Generic (PLEG): container finished" podID="c44eaba4-c497-45e1-9f35-bb8b579b70fd" containerID="2194ffacdd19e2540036abb8d7fe48806c491d8172647164f1d9fc99b49a8c53" exitCode=0 Oct 01 15:54:41 crc kubenswrapper[4869]: I1001 15:54:41.846643 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-db-create-4vqnt" event={"ID":"c44eaba4-c497-45e1-9f35-bb8b579b70fd","Type":"ContainerDied","Data":"2194ffacdd19e2540036abb8d7fe48806c491d8172647164f1d9fc99b49a8c53"} Oct 01 15:54:41 crc kubenswrapper[4869]: I1001 15:54:41.846717 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-db-create-4vqnt" event={"ID":"c44eaba4-c497-45e1-9f35-bb8b579b70fd","Type":"ContainerStarted","Data":"b4a647f79f470db13a9ce81b6e8cbfa2d2b3af158af4d72b3f9de95a13671825"} Oct 01 15:54:42 crc kubenswrapper[4869]: I1001 15:54:42.857290 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"2a8c2f28-3cab-44c4-911a-c32162850921","Type":"ContainerStarted","Data":"7a98882c836b4d48a22300ed61f83ecd718ebc938e989542df0ae67b1c7dd2a4"} Oct 01 15:54:42 crc kubenswrapper[4869]: I1001 15:54:42.857881 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"2a8c2f28-3cab-44c4-911a-c32162850921","Type":"ContainerStarted","Data":"5aa52d53716d3d3c702280daf711e3813a449e4a484ee34f12971857d1566bb8"} Oct 01 15:54:42 crc kubenswrapper[4869]: I1001 15:54:42.860037 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-volume-volume1-0" event={"ID":"456f4cbe-3d22-4705-abbe-09cadc1c0ce2","Type":"ContainerStarted","Data":"738d88f69aefd9464c4af6d0b9a45b7df1c442afecbd31456bca29142fbaa6bf"} Oct 01 15:54:42 crc kubenswrapper[4869]: I1001 15:54:42.860064 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-volume-volume1-0" event={"ID":"456f4cbe-3d22-4705-abbe-09cadc1c0ce2","Type":"ContainerStarted","Data":"4ba93f40541d90ad453c8a89ca2b5e5bfd165c9198713bd247e31332b9dfa7cf"} Oct 01 15:54:42 crc kubenswrapper[4869]: I1001 15:54:42.865978 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"734729c7-8311-4c3f-ab6c-55592dfcf7c2","Type":"ContainerStarted","Data":"2a6131ad73bb681ca6142dc44a1c5509a28759b1912dd65767fa3bf629c253f7"} Oct 01 15:54:42 crc kubenswrapper[4869]: I1001 15:54:42.866018 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"734729c7-8311-4c3f-ab6c-55592dfcf7c2","Type":"ContainerStarted","Data":"8e3b940a37c90def3753820a3eb98562bbae393ed55c724dc0fe8c67a5aa273e"} Oct 01 15:54:42 crc kubenswrapper[4869]: I1001 15:54:42.869586 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-backup-0" event={"ID":"cd72163f-65c7-4984-a967-de8f42861de4","Type":"ContainerStarted","Data":"e6d58486d742bac6b349b05f2c7bddab03ce3ee019adc8b8ede4277a68a0c9da"} Oct 01 15:54:42 crc kubenswrapper[4869]: I1001 15:54:42.869665 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-backup-0" event={"ID":"cd72163f-65c7-4984-a967-de8f42861de4","Type":"ContainerStarted","Data":"d514696ecf50e0eae67e6b2a579fa18fa0f6f30a892dd70f1f66504143bb1ff8"} Oct 01 15:54:42 crc kubenswrapper[4869]: I1001 15:54:42.913703 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-internal-api-0" podStartSLOduration=3.913682718 podStartE2EDuration="3.913682718s" podCreationTimestamp="2025-10-01 15:54:39 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-01 15:54:42.884907622 +0000 UTC m=+2992.031750738" watchObservedRunningTime="2025-10-01 15:54:42.913682718 +0000 UTC m=+2992.060525834" Oct 01 15:54:42 crc kubenswrapper[4869]: I1001 15:54:42.947479 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-volume-volume1-0" podStartSLOduration=2.706956106 podStartE2EDuration="3.94745357s" podCreationTimestamp="2025-10-01 15:54:39 +0000 UTC" firstStartedPulling="2025-10-01 15:54:40.646640666 +0000 UTC m=+2989.793483782" lastFinishedPulling="2025-10-01 15:54:41.88713813 +0000 UTC m=+2991.033981246" observedRunningTime="2025-10-01 15:54:42.921575217 +0000 UTC m=+2992.068418333" watchObservedRunningTime="2025-10-01 15:54:42.94745357 +0000 UTC m=+2992.094296706" Oct 01 15:54:42 crc kubenswrapper[4869]: I1001 15:54:42.975155 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-external-api-0" podStartSLOduration=3.975135968 podStartE2EDuration="3.975135968s" podCreationTimestamp="2025-10-01 15:54:39 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-01 15:54:42.938527635 +0000 UTC m=+2992.085370771" watchObservedRunningTime="2025-10-01 15:54:42.975135968 +0000 UTC m=+2992.121979084" Oct 01 15:54:43 crc kubenswrapper[4869]: I1001 15:54:43.017473 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-backup-0" podStartSLOduration=2.92631037 podStartE2EDuration="4.017453126s" podCreationTimestamp="2025-10-01 15:54:39 +0000 UTC" firstStartedPulling="2025-10-01 15:54:41.170364579 +0000 UTC m=+2990.317207695" lastFinishedPulling="2025-10-01 15:54:42.261507325 +0000 UTC m=+2991.408350451" observedRunningTime="2025-10-01 15:54:43.008983442 +0000 UTC m=+2992.155826558" watchObservedRunningTime="2025-10-01 15:54:43.017453126 +0000 UTC m=+2992.164296242" Oct 01 15:54:43 crc kubenswrapper[4869]: I1001 15:54:43.327820 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/manila-db-create-4vqnt" Oct 01 15:54:43 crc kubenswrapper[4869]: I1001 15:54:43.441070 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kxh4v\" (UniqueName: \"kubernetes.io/projected/c44eaba4-c497-45e1-9f35-bb8b579b70fd-kube-api-access-kxh4v\") pod \"c44eaba4-c497-45e1-9f35-bb8b579b70fd\" (UID: \"c44eaba4-c497-45e1-9f35-bb8b579b70fd\") " Oct 01 15:54:43 crc kubenswrapper[4869]: I1001 15:54:43.453338 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c44eaba4-c497-45e1-9f35-bb8b579b70fd-kube-api-access-kxh4v" (OuterVolumeSpecName: "kube-api-access-kxh4v") pod "c44eaba4-c497-45e1-9f35-bb8b579b70fd" (UID: "c44eaba4-c497-45e1-9f35-bb8b579b70fd"). InnerVolumeSpecName "kube-api-access-kxh4v". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 15:54:43 crc kubenswrapper[4869]: I1001 15:54:43.543696 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kxh4v\" (UniqueName: \"kubernetes.io/projected/c44eaba4-c497-45e1-9f35-bb8b579b70fd-kube-api-access-kxh4v\") on node \"crc\" DevicePath \"\"" Oct 01 15:54:43 crc kubenswrapper[4869]: I1001 15:54:43.888333 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-db-create-4vqnt" event={"ID":"c44eaba4-c497-45e1-9f35-bb8b579b70fd","Type":"ContainerDied","Data":"b4a647f79f470db13a9ce81b6e8cbfa2d2b3af158af4d72b3f9de95a13671825"} Oct 01 15:54:43 crc kubenswrapper[4869]: I1001 15:54:43.888379 4869 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="b4a647f79f470db13a9ce81b6e8cbfa2d2b3af158af4d72b3f9de95a13671825" Oct 01 15:54:43 crc kubenswrapper[4869]: I1001 15:54:43.888436 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/manila-db-create-4vqnt" Oct 01 15:54:44 crc kubenswrapper[4869]: I1001 15:54:44.949611 4869 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/cinder-backup-0" Oct 01 15:54:44 crc kubenswrapper[4869]: I1001 15:54:44.960087 4869 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/cinder-volume-volume1-0" Oct 01 15:54:50 crc kubenswrapper[4869]: I1001 15:54:50.143684 4869 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/cinder-backup-0" Oct 01 15:54:50 crc kubenswrapper[4869]: I1001 15:54:50.150606 4869 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/cinder-volume-volume1-0" Oct 01 15:54:50 crc kubenswrapper[4869]: I1001 15:54:50.320481 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/manila-fc6c-account-create-vdtbr"] Oct 01 15:54:50 crc kubenswrapper[4869]: E1001 15:54:50.321835 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c44eaba4-c497-45e1-9f35-bb8b579b70fd" containerName="mariadb-database-create" Oct 01 15:54:50 crc kubenswrapper[4869]: I1001 15:54:50.322046 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="c44eaba4-c497-45e1-9f35-bb8b579b70fd" containerName="mariadb-database-create" Oct 01 15:54:50 crc kubenswrapper[4869]: I1001 15:54:50.322642 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="c44eaba4-c497-45e1-9f35-bb8b579b70fd" containerName="mariadb-database-create" Oct 01 15:54:50 crc kubenswrapper[4869]: I1001 15:54:50.323873 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/manila-fc6c-account-create-vdtbr" Oct 01 15:54:50 crc kubenswrapper[4869]: I1001 15:54:50.326155 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"manila-db-secret" Oct 01 15:54:50 crc kubenswrapper[4869]: I1001 15:54:50.329639 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/manila-fc6c-account-create-vdtbr"] Oct 01 15:54:50 crc kubenswrapper[4869]: I1001 15:54:50.497309 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xqkmz\" (UniqueName: \"kubernetes.io/projected/73c04986-4ca1-4de0-b432-6d29a0f0b8f9-kube-api-access-xqkmz\") pod \"manila-fc6c-account-create-vdtbr\" (UID: \"73c04986-4ca1-4de0-b432-6d29a0f0b8f9\") " pod="openstack/manila-fc6c-account-create-vdtbr" Oct 01 15:54:50 crc kubenswrapper[4869]: I1001 15:54:50.599631 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xqkmz\" (UniqueName: \"kubernetes.io/projected/73c04986-4ca1-4de0-b432-6d29a0f0b8f9-kube-api-access-xqkmz\") pod \"manila-fc6c-account-create-vdtbr\" (UID: \"73c04986-4ca1-4de0-b432-6d29a0f0b8f9\") " pod="openstack/manila-fc6c-account-create-vdtbr" Oct 01 15:54:50 crc kubenswrapper[4869]: I1001 15:54:50.622380 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xqkmz\" (UniqueName: \"kubernetes.io/projected/73c04986-4ca1-4de0-b432-6d29a0f0b8f9-kube-api-access-xqkmz\") pod \"manila-fc6c-account-create-vdtbr\" (UID: \"73c04986-4ca1-4de0-b432-6d29a0f0b8f9\") " pod="openstack/manila-fc6c-account-create-vdtbr" Oct 01 15:54:50 crc kubenswrapper[4869]: I1001 15:54:50.653224 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/manila-fc6c-account-create-vdtbr" Oct 01 15:54:50 crc kubenswrapper[4869]: I1001 15:54:50.796997 4869 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-external-api-0" Oct 01 15:54:50 crc kubenswrapper[4869]: I1001 15:54:50.798463 4869 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-external-api-0" Oct 01 15:54:50 crc kubenswrapper[4869]: I1001 15:54:50.887750 4869 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-external-api-0" Oct 01 15:54:50 crc kubenswrapper[4869]: I1001 15:54:50.892534 4869 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-internal-api-0" Oct 01 15:54:50 crc kubenswrapper[4869]: I1001 15:54:50.892571 4869 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-internal-api-0" Oct 01 15:54:50 crc kubenswrapper[4869]: I1001 15:54:50.905469 4869 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-external-api-0" Oct 01 15:54:50 crc kubenswrapper[4869]: I1001 15:54:50.990155 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-external-api-0" Oct 01 15:54:50 crc kubenswrapper[4869]: I1001 15:54:50.990180 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-external-api-0" Oct 01 15:54:51 crc kubenswrapper[4869]: I1001 15:54:51.040803 4869 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-internal-api-0" Oct 01 15:54:51 crc kubenswrapper[4869]: I1001 15:54:51.041900 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-internal-api-0" Oct 01 15:54:51 crc kubenswrapper[4869]: I1001 15:54:51.061061 4869 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-internal-api-0" Oct 01 15:54:51 crc kubenswrapper[4869]: I1001 15:54:51.301521 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/manila-fc6c-account-create-vdtbr"] Oct 01 15:54:52 crc kubenswrapper[4869]: I1001 15:54:52.007754 4869 generic.go:334] "Generic (PLEG): container finished" podID="73c04986-4ca1-4de0-b432-6d29a0f0b8f9" containerID="6db8090beb2d0a726b31cc9798a1bf965d66a1d0357cdb88f12938f9e0f0f3d2" exitCode=0 Oct 01 15:54:52 crc kubenswrapper[4869]: I1001 15:54:52.007894 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-fc6c-account-create-vdtbr" event={"ID":"73c04986-4ca1-4de0-b432-6d29a0f0b8f9","Type":"ContainerDied","Data":"6db8090beb2d0a726b31cc9798a1bf965d66a1d0357cdb88f12938f9e0f0f3d2"} Oct 01 15:54:52 crc kubenswrapper[4869]: I1001 15:54:52.008285 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-fc6c-account-create-vdtbr" event={"ID":"73c04986-4ca1-4de0-b432-6d29a0f0b8f9","Type":"ContainerStarted","Data":"adba4964a6c316f4c2ce5ec85c36a5efea0d54492697ff3ee4e271daed61addf"} Oct 01 15:54:52 crc kubenswrapper[4869]: I1001 15:54:52.009311 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-internal-api-0" Oct 01 15:54:53 crc kubenswrapper[4869]: I1001 15:54:53.017302 4869 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Oct 01 15:54:53 crc kubenswrapper[4869]: I1001 15:54:53.017707 4869 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Oct 01 15:54:53 crc kubenswrapper[4869]: I1001 15:54:53.017975 4869 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Oct 01 15:54:53 crc kubenswrapper[4869]: I1001 15:54:53.289157 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-external-api-0" Oct 01 15:54:53 crc kubenswrapper[4869]: I1001 15:54:53.289567 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-external-api-0" Oct 01 15:54:53 crc kubenswrapper[4869]: I1001 15:54:53.336460 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-internal-api-0" Oct 01 15:54:53 crc kubenswrapper[4869]: I1001 15:54:53.406553 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/manila-fc6c-account-create-vdtbr" Oct 01 15:54:53 crc kubenswrapper[4869]: I1001 15:54:53.468233 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xqkmz\" (UniqueName: \"kubernetes.io/projected/73c04986-4ca1-4de0-b432-6d29a0f0b8f9-kube-api-access-xqkmz\") pod \"73c04986-4ca1-4de0-b432-6d29a0f0b8f9\" (UID: \"73c04986-4ca1-4de0-b432-6d29a0f0b8f9\") " Oct 01 15:54:53 crc kubenswrapper[4869]: I1001 15:54:53.484249 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/73c04986-4ca1-4de0-b432-6d29a0f0b8f9-kube-api-access-xqkmz" (OuterVolumeSpecName: "kube-api-access-xqkmz") pod "73c04986-4ca1-4de0-b432-6d29a0f0b8f9" (UID: "73c04986-4ca1-4de0-b432-6d29a0f0b8f9"). InnerVolumeSpecName "kube-api-access-xqkmz". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 15:54:53 crc kubenswrapper[4869]: I1001 15:54:53.569916 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xqkmz\" (UniqueName: \"kubernetes.io/projected/73c04986-4ca1-4de0-b432-6d29a0f0b8f9-kube-api-access-xqkmz\") on node \"crc\" DevicePath \"\"" Oct 01 15:54:54 crc kubenswrapper[4869]: I1001 15:54:54.025829 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-fc6c-account-create-vdtbr" event={"ID":"73c04986-4ca1-4de0-b432-6d29a0f0b8f9","Type":"ContainerDied","Data":"adba4964a6c316f4c2ce5ec85c36a5efea0d54492697ff3ee4e271daed61addf"} Oct 01 15:54:54 crc kubenswrapper[4869]: I1001 15:54:54.025864 4869 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Oct 01 15:54:54 crc kubenswrapper[4869]: I1001 15:54:54.025869 4869 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="adba4964a6c316f4c2ce5ec85c36a5efea0d54492697ff3ee4e271daed61addf" Oct 01 15:54:54 crc kubenswrapper[4869]: I1001 15:54:54.025924 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/manila-fc6c-account-create-vdtbr" Oct 01 15:54:54 crc kubenswrapper[4869]: I1001 15:54:54.066605 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-internal-api-0" Oct 01 15:54:54 crc kubenswrapper[4869]: I1001 15:54:54.581407 4869 scope.go:117] "RemoveContainer" containerID="ea87da209cc47118cc41bdb3107264d5dd2e07bc832e620553f0ac1be9456693" Oct 01 15:54:54 crc kubenswrapper[4869]: E1001 15:54:54.582067 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-c86m8_openshift-machine-config-operator(a4b64f7f-0b03-4f47-965b-9fde048b735c)\"" pod="openshift-machine-config-operator/machine-config-daemon-c86m8" podUID="a4b64f7f-0b03-4f47-965b-9fde048b735c" Oct 01 15:54:55 crc kubenswrapper[4869]: I1001 15:54:55.561121 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/manila-db-sync-mlfvt"] Oct 01 15:54:55 crc kubenswrapper[4869]: E1001 15:54:55.561980 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="73c04986-4ca1-4de0-b432-6d29a0f0b8f9" containerName="mariadb-account-create" Oct 01 15:54:55 crc kubenswrapper[4869]: I1001 15:54:55.562001 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="73c04986-4ca1-4de0-b432-6d29a0f0b8f9" containerName="mariadb-account-create" Oct 01 15:54:55 crc kubenswrapper[4869]: I1001 15:54:55.562242 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="73c04986-4ca1-4de0-b432-6d29a0f0b8f9" containerName="mariadb-account-create" Oct 01 15:54:55 crc kubenswrapper[4869]: I1001 15:54:55.563013 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/manila-db-sync-mlfvt" Oct 01 15:54:55 crc kubenswrapper[4869]: I1001 15:54:55.565646 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"manila-config-data" Oct 01 15:54:55 crc kubenswrapper[4869]: I1001 15:54:55.570504 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/manila-db-sync-mlfvt"] Oct 01 15:54:55 crc kubenswrapper[4869]: I1001 15:54:55.574175 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"manila-manila-dockercfg-qst2g" Oct 01 15:54:55 crc kubenswrapper[4869]: I1001 15:54:55.711036 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ffc8a9b9-ff6f-4d41-b65e-9b57aebf660e-combined-ca-bundle\") pod \"manila-db-sync-mlfvt\" (UID: \"ffc8a9b9-ff6f-4d41-b65e-9b57aebf660e\") " pod="openstack/manila-db-sync-mlfvt" Oct 01 15:54:55 crc kubenswrapper[4869]: I1001 15:54:55.711124 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mfjh6\" (UniqueName: \"kubernetes.io/projected/ffc8a9b9-ff6f-4d41-b65e-9b57aebf660e-kube-api-access-mfjh6\") pod \"manila-db-sync-mlfvt\" (UID: \"ffc8a9b9-ff6f-4d41-b65e-9b57aebf660e\") " pod="openstack/manila-db-sync-mlfvt" Oct 01 15:54:55 crc kubenswrapper[4869]: I1001 15:54:55.711170 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"job-config-data\" (UniqueName: \"kubernetes.io/secret/ffc8a9b9-ff6f-4d41-b65e-9b57aebf660e-job-config-data\") pod \"manila-db-sync-mlfvt\" (UID: \"ffc8a9b9-ff6f-4d41-b65e-9b57aebf660e\") " pod="openstack/manila-db-sync-mlfvt" Oct 01 15:54:55 crc kubenswrapper[4869]: I1001 15:54:55.711339 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ffc8a9b9-ff6f-4d41-b65e-9b57aebf660e-config-data\") pod \"manila-db-sync-mlfvt\" (UID: \"ffc8a9b9-ff6f-4d41-b65e-9b57aebf660e\") " pod="openstack/manila-db-sync-mlfvt" Oct 01 15:54:55 crc kubenswrapper[4869]: I1001 15:54:55.812754 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mfjh6\" (UniqueName: \"kubernetes.io/projected/ffc8a9b9-ff6f-4d41-b65e-9b57aebf660e-kube-api-access-mfjh6\") pod \"manila-db-sync-mlfvt\" (UID: \"ffc8a9b9-ff6f-4d41-b65e-9b57aebf660e\") " pod="openstack/manila-db-sync-mlfvt" Oct 01 15:54:55 crc kubenswrapper[4869]: I1001 15:54:55.812816 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"job-config-data\" (UniqueName: \"kubernetes.io/secret/ffc8a9b9-ff6f-4d41-b65e-9b57aebf660e-job-config-data\") pod \"manila-db-sync-mlfvt\" (UID: \"ffc8a9b9-ff6f-4d41-b65e-9b57aebf660e\") " pod="openstack/manila-db-sync-mlfvt" Oct 01 15:54:55 crc kubenswrapper[4869]: I1001 15:54:55.812937 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ffc8a9b9-ff6f-4d41-b65e-9b57aebf660e-config-data\") pod \"manila-db-sync-mlfvt\" (UID: \"ffc8a9b9-ff6f-4d41-b65e-9b57aebf660e\") " pod="openstack/manila-db-sync-mlfvt" Oct 01 15:54:55 crc kubenswrapper[4869]: I1001 15:54:55.813000 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ffc8a9b9-ff6f-4d41-b65e-9b57aebf660e-combined-ca-bundle\") pod \"manila-db-sync-mlfvt\" (UID: \"ffc8a9b9-ff6f-4d41-b65e-9b57aebf660e\") " pod="openstack/manila-db-sync-mlfvt" Oct 01 15:54:55 crc kubenswrapper[4869]: I1001 15:54:55.820693 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ffc8a9b9-ff6f-4d41-b65e-9b57aebf660e-combined-ca-bundle\") pod \"manila-db-sync-mlfvt\" (UID: \"ffc8a9b9-ff6f-4d41-b65e-9b57aebf660e\") " pod="openstack/manila-db-sync-mlfvt" Oct 01 15:54:55 crc kubenswrapper[4869]: I1001 15:54:55.821630 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ffc8a9b9-ff6f-4d41-b65e-9b57aebf660e-config-data\") pod \"manila-db-sync-mlfvt\" (UID: \"ffc8a9b9-ff6f-4d41-b65e-9b57aebf660e\") " pod="openstack/manila-db-sync-mlfvt" Oct 01 15:54:55 crc kubenswrapper[4869]: I1001 15:54:55.830378 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mfjh6\" (UniqueName: \"kubernetes.io/projected/ffc8a9b9-ff6f-4d41-b65e-9b57aebf660e-kube-api-access-mfjh6\") pod \"manila-db-sync-mlfvt\" (UID: \"ffc8a9b9-ff6f-4d41-b65e-9b57aebf660e\") " pod="openstack/manila-db-sync-mlfvt" Oct 01 15:54:55 crc kubenswrapper[4869]: I1001 15:54:55.832546 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"job-config-data\" (UniqueName: \"kubernetes.io/secret/ffc8a9b9-ff6f-4d41-b65e-9b57aebf660e-job-config-data\") pod \"manila-db-sync-mlfvt\" (UID: \"ffc8a9b9-ff6f-4d41-b65e-9b57aebf660e\") " pod="openstack/manila-db-sync-mlfvt" Oct 01 15:54:55 crc kubenswrapper[4869]: I1001 15:54:55.892800 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/manila-db-sync-mlfvt" Oct 01 15:54:56 crc kubenswrapper[4869]: I1001 15:54:56.470598 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/manila-db-sync-mlfvt"] Oct 01 15:54:57 crc kubenswrapper[4869]: I1001 15:54:57.058334 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-db-sync-mlfvt" event={"ID":"ffc8a9b9-ff6f-4d41-b65e-9b57aebf660e","Type":"ContainerStarted","Data":"c3c4df6461ee5ce4f16e335cebd62a232e1f6d7081683cdd8c8409ea87a4e54d"} Oct 01 15:55:01 crc kubenswrapper[4869]: I1001 15:55:01.096372 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-db-sync-mlfvt" event={"ID":"ffc8a9b9-ff6f-4d41-b65e-9b57aebf660e","Type":"ContainerStarted","Data":"9a6ff92c736b56b7e0c8e4755710c6604d708a1425dd8a03279af01354109637"} Oct 01 15:55:01 crc kubenswrapper[4869]: I1001 15:55:01.117729 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/manila-db-sync-mlfvt" podStartSLOduration=2.238599566 podStartE2EDuration="6.117704947s" podCreationTimestamp="2025-10-01 15:54:55 +0000 UTC" firstStartedPulling="2025-10-01 15:54:56.477591097 +0000 UTC m=+3005.624434213" lastFinishedPulling="2025-10-01 15:55:00.356696438 +0000 UTC m=+3009.503539594" observedRunningTime="2025-10-01 15:55:01.115883401 +0000 UTC m=+3010.262726537" watchObservedRunningTime="2025-10-01 15:55:01.117704947 +0000 UTC m=+3010.264548093" Oct 01 15:55:07 crc kubenswrapper[4869]: I1001 15:55:07.581953 4869 scope.go:117] "RemoveContainer" containerID="ea87da209cc47118cc41bdb3107264d5dd2e07bc832e620553f0ac1be9456693" Oct 01 15:55:07 crc kubenswrapper[4869]: E1001 15:55:07.583213 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-c86m8_openshift-machine-config-operator(a4b64f7f-0b03-4f47-965b-9fde048b735c)\"" pod="openshift-machine-config-operator/machine-config-daemon-c86m8" podUID="a4b64f7f-0b03-4f47-965b-9fde048b735c" Oct 01 15:55:21 crc kubenswrapper[4869]: I1001 15:55:21.316832 4869 generic.go:334] "Generic (PLEG): container finished" podID="ffc8a9b9-ff6f-4d41-b65e-9b57aebf660e" containerID="9a6ff92c736b56b7e0c8e4755710c6604d708a1425dd8a03279af01354109637" exitCode=0 Oct 01 15:55:21 crc kubenswrapper[4869]: I1001 15:55:21.316968 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-db-sync-mlfvt" event={"ID":"ffc8a9b9-ff6f-4d41-b65e-9b57aebf660e","Type":"ContainerDied","Data":"9a6ff92c736b56b7e0c8e4755710c6604d708a1425dd8a03279af01354109637"} Oct 01 15:55:21 crc kubenswrapper[4869]: I1001 15:55:21.596113 4869 scope.go:117] "RemoveContainer" containerID="ea87da209cc47118cc41bdb3107264d5dd2e07bc832e620553f0ac1be9456693" Oct 01 15:55:21 crc kubenswrapper[4869]: E1001 15:55:21.596898 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-c86m8_openshift-machine-config-operator(a4b64f7f-0b03-4f47-965b-9fde048b735c)\"" pod="openshift-machine-config-operator/machine-config-daemon-c86m8" podUID="a4b64f7f-0b03-4f47-965b-9fde048b735c" Oct 01 15:55:22 crc kubenswrapper[4869]: I1001 15:55:22.765861 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/manila-db-sync-mlfvt" Oct 01 15:55:22 crc kubenswrapper[4869]: I1001 15:55:22.926783 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ffc8a9b9-ff6f-4d41-b65e-9b57aebf660e-config-data\") pod \"ffc8a9b9-ff6f-4d41-b65e-9b57aebf660e\" (UID: \"ffc8a9b9-ff6f-4d41-b65e-9b57aebf660e\") " Oct 01 15:55:22 crc kubenswrapper[4869]: I1001 15:55:22.926922 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mfjh6\" (UniqueName: \"kubernetes.io/projected/ffc8a9b9-ff6f-4d41-b65e-9b57aebf660e-kube-api-access-mfjh6\") pod \"ffc8a9b9-ff6f-4d41-b65e-9b57aebf660e\" (UID: \"ffc8a9b9-ff6f-4d41-b65e-9b57aebf660e\") " Oct 01 15:55:22 crc kubenswrapper[4869]: I1001 15:55:22.927876 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"job-config-data\" (UniqueName: \"kubernetes.io/secret/ffc8a9b9-ff6f-4d41-b65e-9b57aebf660e-job-config-data\") pod \"ffc8a9b9-ff6f-4d41-b65e-9b57aebf660e\" (UID: \"ffc8a9b9-ff6f-4d41-b65e-9b57aebf660e\") " Oct 01 15:55:22 crc kubenswrapper[4869]: I1001 15:55:22.927965 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ffc8a9b9-ff6f-4d41-b65e-9b57aebf660e-combined-ca-bundle\") pod \"ffc8a9b9-ff6f-4d41-b65e-9b57aebf660e\" (UID: \"ffc8a9b9-ff6f-4d41-b65e-9b57aebf660e\") " Oct 01 15:55:22 crc kubenswrapper[4869]: I1001 15:55:22.934153 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ffc8a9b9-ff6f-4d41-b65e-9b57aebf660e-kube-api-access-mfjh6" (OuterVolumeSpecName: "kube-api-access-mfjh6") pod "ffc8a9b9-ff6f-4d41-b65e-9b57aebf660e" (UID: "ffc8a9b9-ff6f-4d41-b65e-9b57aebf660e"). InnerVolumeSpecName "kube-api-access-mfjh6". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 15:55:22 crc kubenswrapper[4869]: I1001 15:55:22.934906 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ffc8a9b9-ff6f-4d41-b65e-9b57aebf660e-job-config-data" (OuterVolumeSpecName: "job-config-data") pod "ffc8a9b9-ff6f-4d41-b65e-9b57aebf660e" (UID: "ffc8a9b9-ff6f-4d41-b65e-9b57aebf660e"). InnerVolumeSpecName "job-config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 15:55:22 crc kubenswrapper[4869]: I1001 15:55:22.941221 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ffc8a9b9-ff6f-4d41-b65e-9b57aebf660e-config-data" (OuterVolumeSpecName: "config-data") pod "ffc8a9b9-ff6f-4d41-b65e-9b57aebf660e" (UID: "ffc8a9b9-ff6f-4d41-b65e-9b57aebf660e"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 15:55:22 crc kubenswrapper[4869]: I1001 15:55:22.971016 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ffc8a9b9-ff6f-4d41-b65e-9b57aebf660e-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "ffc8a9b9-ff6f-4d41-b65e-9b57aebf660e" (UID: "ffc8a9b9-ff6f-4d41-b65e-9b57aebf660e"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 15:55:23 crc kubenswrapper[4869]: I1001 15:55:23.029977 4869 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ffc8a9b9-ff6f-4d41-b65e-9b57aebf660e-config-data\") on node \"crc\" DevicePath \"\"" Oct 01 15:55:23 crc kubenswrapper[4869]: I1001 15:55:23.030022 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mfjh6\" (UniqueName: \"kubernetes.io/projected/ffc8a9b9-ff6f-4d41-b65e-9b57aebf660e-kube-api-access-mfjh6\") on node \"crc\" DevicePath \"\"" Oct 01 15:55:23 crc kubenswrapper[4869]: I1001 15:55:23.030035 4869 reconciler_common.go:293] "Volume detached for volume \"job-config-data\" (UniqueName: \"kubernetes.io/secret/ffc8a9b9-ff6f-4d41-b65e-9b57aebf660e-job-config-data\") on node \"crc\" DevicePath \"\"" Oct 01 15:55:23 crc kubenswrapper[4869]: I1001 15:55:23.030051 4869 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ffc8a9b9-ff6f-4d41-b65e-9b57aebf660e-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 01 15:55:23 crc kubenswrapper[4869]: I1001 15:55:23.336761 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-db-sync-mlfvt" event={"ID":"ffc8a9b9-ff6f-4d41-b65e-9b57aebf660e","Type":"ContainerDied","Data":"c3c4df6461ee5ce4f16e335cebd62a232e1f6d7081683cdd8c8409ea87a4e54d"} Oct 01 15:55:23 crc kubenswrapper[4869]: I1001 15:55:23.337087 4869 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="c3c4df6461ee5ce4f16e335cebd62a232e1f6d7081683cdd8c8409ea87a4e54d" Oct 01 15:55:23 crc kubenswrapper[4869]: I1001 15:55:23.336824 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/manila-db-sync-mlfvt" Oct 01 15:55:23 crc kubenswrapper[4869]: I1001 15:55:23.730515 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/manila-share-share1-0"] Oct 01 15:55:23 crc kubenswrapper[4869]: E1001 15:55:23.731156 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ffc8a9b9-ff6f-4d41-b65e-9b57aebf660e" containerName="manila-db-sync" Oct 01 15:55:23 crc kubenswrapper[4869]: I1001 15:55:23.731192 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="ffc8a9b9-ff6f-4d41-b65e-9b57aebf660e" containerName="manila-db-sync" Oct 01 15:55:23 crc kubenswrapper[4869]: I1001 15:55:23.731580 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="ffc8a9b9-ff6f-4d41-b65e-9b57aebf660e" containerName="manila-db-sync" Oct 01 15:55:23 crc kubenswrapper[4869]: I1001 15:55:23.733213 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/manila-share-share1-0" Oct 01 15:55:23 crc kubenswrapper[4869]: I1001 15:55:23.734829 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"manila-manila-dockercfg-qst2g" Oct 01 15:55:23 crc kubenswrapper[4869]: I1001 15:55:23.735210 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"manila-share-share1-config-data" Oct 01 15:55:23 crc kubenswrapper[4869]: I1001 15:55:23.735910 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"manila-scripts" Oct 01 15:55:23 crc kubenswrapper[4869]: I1001 15:55:23.736115 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"manila-config-data" Oct 01 15:55:23 crc kubenswrapper[4869]: I1001 15:55:23.746437 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/manila-scheduler-0"] Oct 01 15:55:23 crc kubenswrapper[4869]: I1001 15:55:23.748146 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/manila-scheduler-0" Oct 01 15:55:23 crc kubenswrapper[4869]: I1001 15:55:23.754080 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"manila-scheduler-config-data" Oct 01 15:55:23 crc kubenswrapper[4869]: I1001 15:55:23.772151 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/manila-share-share1-0"] Oct 01 15:55:23 crc kubenswrapper[4869]: I1001 15:55:23.788759 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/manila-scheduler-0"] Oct 01 15:55:23 crc kubenswrapper[4869]: I1001 15:55:23.844252 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kk9hz\" (UniqueName: \"kubernetes.io/projected/ad1db0ac-1359-46bf-b6d6-6ae089968105-kube-api-access-kk9hz\") pod \"manila-scheduler-0\" (UID: \"ad1db0ac-1359-46bf-b6d6-6ae089968105\") " pod="openstack/manila-scheduler-0" Oct 01 15:55:23 crc kubenswrapper[4869]: I1001 15:55:23.844340 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-j64zd\" (UniqueName: \"kubernetes.io/projected/d413715e-3ed1-4ea2-8b2f-0a56e1c2677e-kube-api-access-j64zd\") pod \"manila-share-share1-0\" (UID: \"d413715e-3ed1-4ea2-8b2f-0a56e1c2677e\") " pod="openstack/manila-share-share1-0" Oct 01 15:55:23 crc kubenswrapper[4869]: I1001 15:55:23.844371 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/d413715e-3ed1-4ea2-8b2f-0a56e1c2677e-config-data-custom\") pod \"manila-share-share1-0\" (UID: \"d413715e-3ed1-4ea2-8b2f-0a56e1c2677e\") " pod="openstack/manila-share-share1-0" Oct 01 15:55:23 crc kubenswrapper[4869]: I1001 15:55:23.844401 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/ad1db0ac-1359-46bf-b6d6-6ae089968105-config-data-custom\") pod \"manila-scheduler-0\" (UID: \"ad1db0ac-1359-46bf-b6d6-6ae089968105\") " pod="openstack/manila-scheduler-0" Oct 01 15:55:23 crc kubenswrapper[4869]: I1001 15:55:23.844427 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/ad1db0ac-1359-46bf-b6d6-6ae089968105-etc-machine-id\") pod \"manila-scheduler-0\" (UID: \"ad1db0ac-1359-46bf-b6d6-6ae089968105\") " pod="openstack/manila-scheduler-0" Oct 01 15:55:23 crc kubenswrapper[4869]: I1001 15:55:23.844461 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-manila\" (UniqueName: \"kubernetes.io/host-path/d413715e-3ed1-4ea2-8b2f-0a56e1c2677e-var-lib-manila\") pod \"manila-share-share1-0\" (UID: \"d413715e-3ed1-4ea2-8b2f-0a56e1c2677e\") " pod="openstack/manila-share-share1-0" Oct 01 15:55:23 crc kubenswrapper[4869]: I1001 15:55:23.844476 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d413715e-3ed1-4ea2-8b2f-0a56e1c2677e-config-data\") pod \"manila-share-share1-0\" (UID: \"d413715e-3ed1-4ea2-8b2f-0a56e1c2677e\") " pod="openstack/manila-share-share1-0" Oct 01 15:55:23 crc kubenswrapper[4869]: I1001 15:55:23.844492 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ad1db0ac-1359-46bf-b6d6-6ae089968105-combined-ca-bundle\") pod \"manila-scheduler-0\" (UID: \"ad1db0ac-1359-46bf-b6d6-6ae089968105\") " pod="openstack/manila-scheduler-0" Oct 01 15:55:23 crc kubenswrapper[4869]: I1001 15:55:23.844509 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/d413715e-3ed1-4ea2-8b2f-0a56e1c2677e-etc-machine-id\") pod \"manila-share-share1-0\" (UID: \"d413715e-3ed1-4ea2-8b2f-0a56e1c2677e\") " pod="openstack/manila-share-share1-0" Oct 01 15:55:23 crc kubenswrapper[4869]: I1001 15:55:23.844527 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ad1db0ac-1359-46bf-b6d6-6ae089968105-scripts\") pod \"manila-scheduler-0\" (UID: \"ad1db0ac-1359-46bf-b6d6-6ae089968105\") " pod="openstack/manila-scheduler-0" Oct 01 15:55:23 crc kubenswrapper[4869]: I1001 15:55:23.844543 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d413715e-3ed1-4ea2-8b2f-0a56e1c2677e-scripts\") pod \"manila-share-share1-0\" (UID: \"d413715e-3ed1-4ea2-8b2f-0a56e1c2677e\") " pod="openstack/manila-share-share1-0" Oct 01 15:55:23 crc kubenswrapper[4869]: I1001 15:55:23.844572 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/d413715e-3ed1-4ea2-8b2f-0a56e1c2677e-ceph\") pod \"manila-share-share1-0\" (UID: \"d413715e-3ed1-4ea2-8b2f-0a56e1c2677e\") " pod="openstack/manila-share-share1-0" Oct 01 15:55:23 crc kubenswrapper[4869]: I1001 15:55:23.844611 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ad1db0ac-1359-46bf-b6d6-6ae089968105-config-data\") pod \"manila-scheduler-0\" (UID: \"ad1db0ac-1359-46bf-b6d6-6ae089968105\") " pod="openstack/manila-scheduler-0" Oct 01 15:55:23 crc kubenswrapper[4869]: I1001 15:55:23.844630 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d413715e-3ed1-4ea2-8b2f-0a56e1c2677e-combined-ca-bundle\") pod \"manila-share-share1-0\" (UID: \"d413715e-3ed1-4ea2-8b2f-0a56e1c2677e\") " pod="openstack/manila-share-share1-0" Oct 01 15:55:23 crc kubenswrapper[4869]: I1001 15:55:23.894408 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-5b88556f9c-4f47f"] Oct 01 15:55:23 crc kubenswrapper[4869]: I1001 15:55:23.896101 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5b88556f9c-4f47f" Oct 01 15:55:23 crc kubenswrapper[4869]: I1001 15:55:23.912945 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5b88556f9c-4f47f"] Oct 01 15:55:23 crc kubenswrapper[4869]: I1001 15:55:23.946484 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-manila\" (UniqueName: \"kubernetes.io/host-path/d413715e-3ed1-4ea2-8b2f-0a56e1c2677e-var-lib-manila\") pod \"manila-share-share1-0\" (UID: \"d413715e-3ed1-4ea2-8b2f-0a56e1c2677e\") " pod="openstack/manila-share-share1-0" Oct 01 15:55:23 crc kubenswrapper[4869]: I1001 15:55:23.946539 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d413715e-3ed1-4ea2-8b2f-0a56e1c2677e-config-data\") pod \"manila-share-share1-0\" (UID: \"d413715e-3ed1-4ea2-8b2f-0a56e1c2677e\") " pod="openstack/manila-share-share1-0" Oct 01 15:55:23 crc kubenswrapper[4869]: I1001 15:55:23.946555 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ad1db0ac-1359-46bf-b6d6-6ae089968105-combined-ca-bundle\") pod \"manila-scheduler-0\" (UID: \"ad1db0ac-1359-46bf-b6d6-6ae089968105\") " pod="openstack/manila-scheduler-0" Oct 01 15:55:23 crc kubenswrapper[4869]: I1001 15:55:23.946584 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/d413715e-3ed1-4ea2-8b2f-0a56e1c2677e-etc-machine-id\") pod \"manila-share-share1-0\" (UID: \"d413715e-3ed1-4ea2-8b2f-0a56e1c2677e\") " pod="openstack/manila-share-share1-0" Oct 01 15:55:23 crc kubenswrapper[4869]: I1001 15:55:23.946611 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ad1db0ac-1359-46bf-b6d6-6ae089968105-scripts\") pod \"manila-scheduler-0\" (UID: \"ad1db0ac-1359-46bf-b6d6-6ae089968105\") " pod="openstack/manila-scheduler-0" Oct 01 15:55:23 crc kubenswrapper[4869]: I1001 15:55:23.946628 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d413715e-3ed1-4ea2-8b2f-0a56e1c2677e-scripts\") pod \"manila-share-share1-0\" (UID: \"d413715e-3ed1-4ea2-8b2f-0a56e1c2677e\") " pod="openstack/manila-share-share1-0" Oct 01 15:55:23 crc kubenswrapper[4869]: I1001 15:55:23.946689 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/d413715e-3ed1-4ea2-8b2f-0a56e1c2677e-ceph\") pod \"manila-share-share1-0\" (UID: \"d413715e-3ed1-4ea2-8b2f-0a56e1c2677e\") " pod="openstack/manila-share-share1-0" Oct 01 15:55:23 crc kubenswrapper[4869]: I1001 15:55:23.946760 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ad1db0ac-1359-46bf-b6d6-6ae089968105-config-data\") pod \"manila-scheduler-0\" (UID: \"ad1db0ac-1359-46bf-b6d6-6ae089968105\") " pod="openstack/manila-scheduler-0" Oct 01 15:55:23 crc kubenswrapper[4869]: I1001 15:55:23.946779 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d413715e-3ed1-4ea2-8b2f-0a56e1c2677e-combined-ca-bundle\") pod \"manila-share-share1-0\" (UID: \"d413715e-3ed1-4ea2-8b2f-0a56e1c2677e\") " pod="openstack/manila-share-share1-0" Oct 01 15:55:23 crc kubenswrapper[4869]: I1001 15:55:23.946806 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kk9hz\" (UniqueName: \"kubernetes.io/projected/ad1db0ac-1359-46bf-b6d6-6ae089968105-kube-api-access-kk9hz\") pod \"manila-scheduler-0\" (UID: \"ad1db0ac-1359-46bf-b6d6-6ae089968105\") " pod="openstack/manila-scheduler-0" Oct 01 15:55:23 crc kubenswrapper[4869]: I1001 15:55:23.946869 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-j64zd\" (UniqueName: \"kubernetes.io/projected/d413715e-3ed1-4ea2-8b2f-0a56e1c2677e-kube-api-access-j64zd\") pod \"manila-share-share1-0\" (UID: \"d413715e-3ed1-4ea2-8b2f-0a56e1c2677e\") " pod="openstack/manila-share-share1-0" Oct 01 15:55:23 crc kubenswrapper[4869]: I1001 15:55:23.946910 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/d413715e-3ed1-4ea2-8b2f-0a56e1c2677e-config-data-custom\") pod \"manila-share-share1-0\" (UID: \"d413715e-3ed1-4ea2-8b2f-0a56e1c2677e\") " pod="openstack/manila-share-share1-0" Oct 01 15:55:23 crc kubenswrapper[4869]: I1001 15:55:23.946938 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/ad1db0ac-1359-46bf-b6d6-6ae089968105-config-data-custom\") pod \"manila-scheduler-0\" (UID: \"ad1db0ac-1359-46bf-b6d6-6ae089968105\") " pod="openstack/manila-scheduler-0" Oct 01 15:55:23 crc kubenswrapper[4869]: I1001 15:55:23.946976 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/ad1db0ac-1359-46bf-b6d6-6ae089968105-etc-machine-id\") pod \"manila-scheduler-0\" (UID: \"ad1db0ac-1359-46bf-b6d6-6ae089968105\") " pod="openstack/manila-scheduler-0" Oct 01 15:55:23 crc kubenswrapper[4869]: I1001 15:55:23.947067 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/ad1db0ac-1359-46bf-b6d6-6ae089968105-etc-machine-id\") pod \"manila-scheduler-0\" (UID: \"ad1db0ac-1359-46bf-b6d6-6ae089968105\") " pod="openstack/manila-scheduler-0" Oct 01 15:55:23 crc kubenswrapper[4869]: I1001 15:55:23.947241 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-manila\" (UniqueName: \"kubernetes.io/host-path/d413715e-3ed1-4ea2-8b2f-0a56e1c2677e-var-lib-manila\") pod \"manila-share-share1-0\" (UID: \"d413715e-3ed1-4ea2-8b2f-0a56e1c2677e\") " pod="openstack/manila-share-share1-0" Oct 01 15:55:23 crc kubenswrapper[4869]: I1001 15:55:23.955436 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/d413715e-3ed1-4ea2-8b2f-0a56e1c2677e-etc-machine-id\") pod \"manila-share-share1-0\" (UID: \"d413715e-3ed1-4ea2-8b2f-0a56e1c2677e\") " pod="openstack/manila-share-share1-0" Oct 01 15:55:23 crc kubenswrapper[4869]: I1001 15:55:23.956724 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/d413715e-3ed1-4ea2-8b2f-0a56e1c2677e-config-data-custom\") pod \"manila-share-share1-0\" (UID: \"d413715e-3ed1-4ea2-8b2f-0a56e1c2677e\") " pod="openstack/manila-share-share1-0" Oct 01 15:55:23 crc kubenswrapper[4869]: I1001 15:55:23.957828 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d413715e-3ed1-4ea2-8b2f-0a56e1c2677e-combined-ca-bundle\") pod \"manila-share-share1-0\" (UID: \"d413715e-3ed1-4ea2-8b2f-0a56e1c2677e\") " pod="openstack/manila-share-share1-0" Oct 01 15:55:23 crc kubenswrapper[4869]: I1001 15:55:23.959278 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d413715e-3ed1-4ea2-8b2f-0a56e1c2677e-config-data\") pod \"manila-share-share1-0\" (UID: \"d413715e-3ed1-4ea2-8b2f-0a56e1c2677e\") " pod="openstack/manila-share-share1-0" Oct 01 15:55:23 crc kubenswrapper[4869]: I1001 15:55:23.962783 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ad1db0ac-1359-46bf-b6d6-6ae089968105-scripts\") pod \"manila-scheduler-0\" (UID: \"ad1db0ac-1359-46bf-b6d6-6ae089968105\") " pod="openstack/manila-scheduler-0" Oct 01 15:55:23 crc kubenswrapper[4869]: I1001 15:55:23.977524 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ad1db0ac-1359-46bf-b6d6-6ae089968105-config-data\") pod \"manila-scheduler-0\" (UID: \"ad1db0ac-1359-46bf-b6d6-6ae089968105\") " pod="openstack/manila-scheduler-0" Oct 01 15:55:23 crc kubenswrapper[4869]: I1001 15:55:23.991463 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d413715e-3ed1-4ea2-8b2f-0a56e1c2677e-scripts\") pod \"manila-share-share1-0\" (UID: \"d413715e-3ed1-4ea2-8b2f-0a56e1c2677e\") " pod="openstack/manila-share-share1-0" Oct 01 15:55:23 crc kubenswrapper[4869]: I1001 15:55:23.992770 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/ad1db0ac-1359-46bf-b6d6-6ae089968105-config-data-custom\") pod \"manila-scheduler-0\" (UID: \"ad1db0ac-1359-46bf-b6d6-6ae089968105\") " pod="openstack/manila-scheduler-0" Oct 01 15:55:23 crc kubenswrapper[4869]: I1001 15:55:23.995765 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/d413715e-3ed1-4ea2-8b2f-0a56e1c2677e-ceph\") pod \"manila-share-share1-0\" (UID: \"d413715e-3ed1-4ea2-8b2f-0a56e1c2677e\") " pod="openstack/manila-share-share1-0" Oct 01 15:55:23 crc kubenswrapper[4869]: I1001 15:55:23.995808 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kk9hz\" (UniqueName: \"kubernetes.io/projected/ad1db0ac-1359-46bf-b6d6-6ae089968105-kube-api-access-kk9hz\") pod \"manila-scheduler-0\" (UID: \"ad1db0ac-1359-46bf-b6d6-6ae089968105\") " pod="openstack/manila-scheduler-0" Oct 01 15:55:23 crc kubenswrapper[4869]: I1001 15:55:23.995890 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ad1db0ac-1359-46bf-b6d6-6ae089968105-combined-ca-bundle\") pod \"manila-scheduler-0\" (UID: \"ad1db0ac-1359-46bf-b6d6-6ae089968105\") " pod="openstack/manila-scheduler-0" Oct 01 15:55:24 crc kubenswrapper[4869]: I1001 15:55:24.008108 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-j64zd\" (UniqueName: \"kubernetes.io/projected/d413715e-3ed1-4ea2-8b2f-0a56e1c2677e-kube-api-access-j64zd\") pod \"manila-share-share1-0\" (UID: \"d413715e-3ed1-4ea2-8b2f-0a56e1c2677e\") " pod="openstack/manila-share-share1-0" Oct 01 15:55:24 crc kubenswrapper[4869]: I1001 15:55:24.052348 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/e4066add-a6a5-4393-a327-5c22cc8bd69e-ovsdbserver-sb\") pod \"dnsmasq-dns-5b88556f9c-4f47f\" (UID: \"e4066add-a6a5-4393-a327-5c22cc8bd69e\") " pod="openstack/dnsmasq-dns-5b88556f9c-4f47f" Oct 01 15:55:24 crc kubenswrapper[4869]: I1001 15:55:24.052401 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e4066add-a6a5-4393-a327-5c22cc8bd69e-config\") pod \"dnsmasq-dns-5b88556f9c-4f47f\" (UID: \"e4066add-a6a5-4393-a327-5c22cc8bd69e\") " pod="openstack/dnsmasq-dns-5b88556f9c-4f47f" Oct 01 15:55:24 crc kubenswrapper[4869]: I1001 15:55:24.052451 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2xqbc\" (UniqueName: \"kubernetes.io/projected/e4066add-a6a5-4393-a327-5c22cc8bd69e-kube-api-access-2xqbc\") pod \"dnsmasq-dns-5b88556f9c-4f47f\" (UID: \"e4066add-a6a5-4393-a327-5c22cc8bd69e\") " pod="openstack/dnsmasq-dns-5b88556f9c-4f47f" Oct 01 15:55:24 crc kubenswrapper[4869]: I1001 15:55:24.052535 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/e4066add-a6a5-4393-a327-5c22cc8bd69e-openstack-edpm-ipam\") pod \"dnsmasq-dns-5b88556f9c-4f47f\" (UID: \"e4066add-a6a5-4393-a327-5c22cc8bd69e\") " pod="openstack/dnsmasq-dns-5b88556f9c-4f47f" Oct 01 15:55:24 crc kubenswrapper[4869]: I1001 15:55:24.052554 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/e4066add-a6a5-4393-a327-5c22cc8bd69e-ovsdbserver-nb\") pod \"dnsmasq-dns-5b88556f9c-4f47f\" (UID: \"e4066add-a6a5-4393-a327-5c22cc8bd69e\") " pod="openstack/dnsmasq-dns-5b88556f9c-4f47f" Oct 01 15:55:24 crc kubenswrapper[4869]: I1001 15:55:24.052577 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/e4066add-a6a5-4393-a327-5c22cc8bd69e-dns-svc\") pod \"dnsmasq-dns-5b88556f9c-4f47f\" (UID: \"e4066add-a6a5-4393-a327-5c22cc8bd69e\") " pod="openstack/dnsmasq-dns-5b88556f9c-4f47f" Oct 01 15:55:24 crc kubenswrapper[4869]: I1001 15:55:24.070748 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/manila-share-share1-0" Oct 01 15:55:24 crc kubenswrapper[4869]: I1001 15:55:24.089489 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/manila-scheduler-0" Oct 01 15:55:24 crc kubenswrapper[4869]: I1001 15:55:24.108387 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/manila-api-0"] Oct 01 15:55:24 crc kubenswrapper[4869]: I1001 15:55:24.116989 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/manila-api-0" Oct 01 15:55:24 crc kubenswrapper[4869]: I1001 15:55:24.121918 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/manila-api-0"] Oct 01 15:55:24 crc kubenswrapper[4869]: I1001 15:55:24.122838 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"manila-api-config-data" Oct 01 15:55:24 crc kubenswrapper[4869]: I1001 15:55:24.154060 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2xqbc\" (UniqueName: \"kubernetes.io/projected/e4066add-a6a5-4393-a327-5c22cc8bd69e-kube-api-access-2xqbc\") pod \"dnsmasq-dns-5b88556f9c-4f47f\" (UID: \"e4066add-a6a5-4393-a327-5c22cc8bd69e\") " pod="openstack/dnsmasq-dns-5b88556f9c-4f47f" Oct 01 15:55:24 crc kubenswrapper[4869]: I1001 15:55:24.154201 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/e4066add-a6a5-4393-a327-5c22cc8bd69e-openstack-edpm-ipam\") pod \"dnsmasq-dns-5b88556f9c-4f47f\" (UID: \"e4066add-a6a5-4393-a327-5c22cc8bd69e\") " pod="openstack/dnsmasq-dns-5b88556f9c-4f47f" Oct 01 15:55:24 crc kubenswrapper[4869]: I1001 15:55:24.154232 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/e4066add-a6a5-4393-a327-5c22cc8bd69e-ovsdbserver-nb\") pod \"dnsmasq-dns-5b88556f9c-4f47f\" (UID: \"e4066add-a6a5-4393-a327-5c22cc8bd69e\") " pod="openstack/dnsmasq-dns-5b88556f9c-4f47f" Oct 01 15:55:24 crc kubenswrapper[4869]: I1001 15:55:24.154286 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/e4066add-a6a5-4393-a327-5c22cc8bd69e-dns-svc\") pod \"dnsmasq-dns-5b88556f9c-4f47f\" (UID: \"e4066add-a6a5-4393-a327-5c22cc8bd69e\") " pod="openstack/dnsmasq-dns-5b88556f9c-4f47f" Oct 01 15:55:24 crc kubenswrapper[4869]: I1001 15:55:24.154353 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/e4066add-a6a5-4393-a327-5c22cc8bd69e-ovsdbserver-sb\") pod \"dnsmasq-dns-5b88556f9c-4f47f\" (UID: \"e4066add-a6a5-4393-a327-5c22cc8bd69e\") " pod="openstack/dnsmasq-dns-5b88556f9c-4f47f" Oct 01 15:55:24 crc kubenswrapper[4869]: I1001 15:55:24.154388 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e4066add-a6a5-4393-a327-5c22cc8bd69e-config\") pod \"dnsmasq-dns-5b88556f9c-4f47f\" (UID: \"e4066add-a6a5-4393-a327-5c22cc8bd69e\") " pod="openstack/dnsmasq-dns-5b88556f9c-4f47f" Oct 01 15:55:24 crc kubenswrapper[4869]: I1001 15:55:24.155866 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e4066add-a6a5-4393-a327-5c22cc8bd69e-config\") pod \"dnsmasq-dns-5b88556f9c-4f47f\" (UID: \"e4066add-a6a5-4393-a327-5c22cc8bd69e\") " pod="openstack/dnsmasq-dns-5b88556f9c-4f47f" Oct 01 15:55:24 crc kubenswrapper[4869]: I1001 15:55:24.155866 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/e4066add-a6a5-4393-a327-5c22cc8bd69e-openstack-edpm-ipam\") pod \"dnsmasq-dns-5b88556f9c-4f47f\" (UID: \"e4066add-a6a5-4393-a327-5c22cc8bd69e\") " pod="openstack/dnsmasq-dns-5b88556f9c-4f47f" Oct 01 15:55:24 crc kubenswrapper[4869]: I1001 15:55:24.156435 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/e4066add-a6a5-4393-a327-5c22cc8bd69e-ovsdbserver-sb\") pod \"dnsmasq-dns-5b88556f9c-4f47f\" (UID: \"e4066add-a6a5-4393-a327-5c22cc8bd69e\") " pod="openstack/dnsmasq-dns-5b88556f9c-4f47f" Oct 01 15:55:24 crc kubenswrapper[4869]: I1001 15:55:24.156610 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/e4066add-a6a5-4393-a327-5c22cc8bd69e-ovsdbserver-nb\") pod \"dnsmasq-dns-5b88556f9c-4f47f\" (UID: \"e4066add-a6a5-4393-a327-5c22cc8bd69e\") " pod="openstack/dnsmasq-dns-5b88556f9c-4f47f" Oct 01 15:55:24 crc kubenswrapper[4869]: I1001 15:55:24.158421 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/e4066add-a6a5-4393-a327-5c22cc8bd69e-dns-svc\") pod \"dnsmasq-dns-5b88556f9c-4f47f\" (UID: \"e4066add-a6a5-4393-a327-5c22cc8bd69e\") " pod="openstack/dnsmasq-dns-5b88556f9c-4f47f" Oct 01 15:55:24 crc kubenswrapper[4869]: I1001 15:55:24.173038 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2xqbc\" (UniqueName: \"kubernetes.io/projected/e4066add-a6a5-4393-a327-5c22cc8bd69e-kube-api-access-2xqbc\") pod \"dnsmasq-dns-5b88556f9c-4f47f\" (UID: \"e4066add-a6a5-4393-a327-5c22cc8bd69e\") " pod="openstack/dnsmasq-dns-5b88556f9c-4f47f" Oct 01 15:55:24 crc kubenswrapper[4869]: I1001 15:55:24.211154 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5b88556f9c-4f47f" Oct 01 15:55:24 crc kubenswrapper[4869]: I1001 15:55:24.256488 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/618b4066-6eed-43e5-97c9-b8551da111df-scripts\") pod \"manila-api-0\" (UID: \"618b4066-6eed-43e5-97c9-b8551da111df\") " pod="openstack/manila-api-0" Oct 01 15:55:24 crc kubenswrapper[4869]: I1001 15:55:24.256562 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ncwd4\" (UniqueName: \"kubernetes.io/projected/618b4066-6eed-43e5-97c9-b8551da111df-kube-api-access-ncwd4\") pod \"manila-api-0\" (UID: \"618b4066-6eed-43e5-97c9-b8551da111df\") " pod="openstack/manila-api-0" Oct 01 15:55:24 crc kubenswrapper[4869]: I1001 15:55:24.256668 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/618b4066-6eed-43e5-97c9-b8551da111df-config-data-custom\") pod \"manila-api-0\" (UID: \"618b4066-6eed-43e5-97c9-b8551da111df\") " pod="openstack/manila-api-0" Oct 01 15:55:24 crc kubenswrapper[4869]: I1001 15:55:24.256706 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/618b4066-6eed-43e5-97c9-b8551da111df-etc-machine-id\") pod \"manila-api-0\" (UID: \"618b4066-6eed-43e5-97c9-b8551da111df\") " pod="openstack/manila-api-0" Oct 01 15:55:24 crc kubenswrapper[4869]: I1001 15:55:24.256742 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/618b4066-6eed-43e5-97c9-b8551da111df-config-data\") pod \"manila-api-0\" (UID: \"618b4066-6eed-43e5-97c9-b8551da111df\") " pod="openstack/manila-api-0" Oct 01 15:55:24 crc kubenswrapper[4869]: I1001 15:55:24.256768 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/618b4066-6eed-43e5-97c9-b8551da111df-combined-ca-bundle\") pod \"manila-api-0\" (UID: \"618b4066-6eed-43e5-97c9-b8551da111df\") " pod="openstack/manila-api-0" Oct 01 15:55:24 crc kubenswrapper[4869]: I1001 15:55:24.256836 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/618b4066-6eed-43e5-97c9-b8551da111df-logs\") pod \"manila-api-0\" (UID: \"618b4066-6eed-43e5-97c9-b8551da111df\") " pod="openstack/manila-api-0" Oct 01 15:55:24 crc kubenswrapper[4869]: I1001 15:55:24.359505 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/618b4066-6eed-43e5-97c9-b8551da111df-logs\") pod \"manila-api-0\" (UID: \"618b4066-6eed-43e5-97c9-b8551da111df\") " pod="openstack/manila-api-0" Oct 01 15:55:24 crc kubenswrapper[4869]: I1001 15:55:24.359946 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/618b4066-6eed-43e5-97c9-b8551da111df-logs\") pod \"manila-api-0\" (UID: \"618b4066-6eed-43e5-97c9-b8551da111df\") " pod="openstack/manila-api-0" Oct 01 15:55:24 crc kubenswrapper[4869]: I1001 15:55:24.360036 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/618b4066-6eed-43e5-97c9-b8551da111df-scripts\") pod \"manila-api-0\" (UID: \"618b4066-6eed-43e5-97c9-b8551da111df\") " pod="openstack/manila-api-0" Oct 01 15:55:24 crc kubenswrapper[4869]: I1001 15:55:24.360522 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ncwd4\" (UniqueName: \"kubernetes.io/projected/618b4066-6eed-43e5-97c9-b8551da111df-kube-api-access-ncwd4\") pod \"manila-api-0\" (UID: \"618b4066-6eed-43e5-97c9-b8551da111df\") " pod="openstack/manila-api-0" Oct 01 15:55:24 crc kubenswrapper[4869]: I1001 15:55:24.360633 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/618b4066-6eed-43e5-97c9-b8551da111df-config-data-custom\") pod \"manila-api-0\" (UID: \"618b4066-6eed-43e5-97c9-b8551da111df\") " pod="openstack/manila-api-0" Oct 01 15:55:24 crc kubenswrapper[4869]: I1001 15:55:24.360711 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/618b4066-6eed-43e5-97c9-b8551da111df-etc-machine-id\") pod \"manila-api-0\" (UID: \"618b4066-6eed-43e5-97c9-b8551da111df\") " pod="openstack/manila-api-0" Oct 01 15:55:24 crc kubenswrapper[4869]: I1001 15:55:24.360738 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/618b4066-6eed-43e5-97c9-b8551da111df-config-data\") pod \"manila-api-0\" (UID: \"618b4066-6eed-43e5-97c9-b8551da111df\") " pod="openstack/manila-api-0" Oct 01 15:55:24 crc kubenswrapper[4869]: I1001 15:55:24.360767 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/618b4066-6eed-43e5-97c9-b8551da111df-combined-ca-bundle\") pod \"manila-api-0\" (UID: \"618b4066-6eed-43e5-97c9-b8551da111df\") " pod="openstack/manila-api-0" Oct 01 15:55:24 crc kubenswrapper[4869]: I1001 15:55:24.360993 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/618b4066-6eed-43e5-97c9-b8551da111df-etc-machine-id\") pod \"manila-api-0\" (UID: \"618b4066-6eed-43e5-97c9-b8551da111df\") " pod="openstack/manila-api-0" Oct 01 15:55:24 crc kubenswrapper[4869]: I1001 15:55:24.366638 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/618b4066-6eed-43e5-97c9-b8551da111df-config-data-custom\") pod \"manila-api-0\" (UID: \"618b4066-6eed-43e5-97c9-b8551da111df\") " pod="openstack/manila-api-0" Oct 01 15:55:24 crc kubenswrapper[4869]: I1001 15:55:24.366662 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/618b4066-6eed-43e5-97c9-b8551da111df-scripts\") pod \"manila-api-0\" (UID: \"618b4066-6eed-43e5-97c9-b8551da111df\") " pod="openstack/manila-api-0" Oct 01 15:55:24 crc kubenswrapper[4869]: I1001 15:55:24.366677 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/618b4066-6eed-43e5-97c9-b8551da111df-combined-ca-bundle\") pod \"manila-api-0\" (UID: \"618b4066-6eed-43e5-97c9-b8551da111df\") " pod="openstack/manila-api-0" Oct 01 15:55:24 crc kubenswrapper[4869]: I1001 15:55:24.367367 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/618b4066-6eed-43e5-97c9-b8551da111df-config-data\") pod \"manila-api-0\" (UID: \"618b4066-6eed-43e5-97c9-b8551da111df\") " pod="openstack/manila-api-0" Oct 01 15:55:24 crc kubenswrapper[4869]: I1001 15:55:24.375339 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ncwd4\" (UniqueName: \"kubernetes.io/projected/618b4066-6eed-43e5-97c9-b8551da111df-kube-api-access-ncwd4\") pod \"manila-api-0\" (UID: \"618b4066-6eed-43e5-97c9-b8551da111df\") " pod="openstack/manila-api-0" Oct 01 15:55:24 crc kubenswrapper[4869]: I1001 15:55:24.454894 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/manila-api-0" Oct 01 15:55:24 crc kubenswrapper[4869]: I1001 15:55:24.640026 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/manila-scheduler-0"] Oct 01 15:55:24 crc kubenswrapper[4869]: I1001 15:55:24.762236 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/manila-share-share1-0"] Oct 01 15:55:24 crc kubenswrapper[4869]: I1001 15:55:24.798143 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5b88556f9c-4f47f"] Oct 01 15:55:25 crc kubenswrapper[4869]: I1001 15:55:25.100583 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/manila-api-0"] Oct 01 15:55:25 crc kubenswrapper[4869]: I1001 15:55:25.365969 4869 generic.go:334] "Generic (PLEG): container finished" podID="e4066add-a6a5-4393-a327-5c22cc8bd69e" containerID="092b1751d1abe1b7722523b442e14d54d0ccb7143a84c1ef1fc02e904b9f9355" exitCode=0 Oct 01 15:55:25 crc kubenswrapper[4869]: I1001 15:55:25.366059 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5b88556f9c-4f47f" event={"ID":"e4066add-a6a5-4393-a327-5c22cc8bd69e","Type":"ContainerDied","Data":"092b1751d1abe1b7722523b442e14d54d0ccb7143a84c1ef1fc02e904b9f9355"} Oct 01 15:55:25 crc kubenswrapper[4869]: I1001 15:55:25.366085 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5b88556f9c-4f47f" event={"ID":"e4066add-a6a5-4393-a327-5c22cc8bd69e","Type":"ContainerStarted","Data":"5e8b0c1f4b77d80c758fb6d0d7a2ca141e9890ddaa2fd60bae57976331932242"} Oct 01 15:55:25 crc kubenswrapper[4869]: I1001 15:55:25.370731 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-api-0" event={"ID":"618b4066-6eed-43e5-97c9-b8551da111df","Type":"ContainerStarted","Data":"68ce5c53ae21c993719b49eb613f27af25ebfe98eb8d5ab87f2b604ece94d2e5"} Oct 01 15:55:25 crc kubenswrapper[4869]: I1001 15:55:25.381665 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-scheduler-0" event={"ID":"ad1db0ac-1359-46bf-b6d6-6ae089968105","Type":"ContainerStarted","Data":"f3dad3a91977a40d1c6447eaa9c769e9cce3747dd6079e062fbbd0c6e8d50521"} Oct 01 15:55:25 crc kubenswrapper[4869]: I1001 15:55:25.387735 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-share-share1-0" event={"ID":"d413715e-3ed1-4ea2-8b2f-0a56e1c2677e","Type":"ContainerStarted","Data":"77f9257542da8ef5a245ce388d7052a4067e152c3ec30184f9195fe99665e1fe"} Oct 01 15:55:26 crc kubenswrapper[4869]: I1001 15:55:26.399636 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5b88556f9c-4f47f" event={"ID":"e4066add-a6a5-4393-a327-5c22cc8bd69e","Type":"ContainerStarted","Data":"659d125fe6dafdf1c6b0bf2761727456097537071ac337f875061b44e7a149a2"} Oct 01 15:55:26 crc kubenswrapper[4869]: I1001 15:55:26.400106 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-5b88556f9c-4f47f" Oct 01 15:55:26 crc kubenswrapper[4869]: I1001 15:55:26.404898 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-api-0" event={"ID":"618b4066-6eed-43e5-97c9-b8551da111df","Type":"ContainerStarted","Data":"bffe0d916ba9a20ebfecfc7047d634b54b53ff2fcf95271bbee1f15a168b5b0b"} Oct 01 15:55:26 crc kubenswrapper[4869]: I1001 15:55:26.404943 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-api-0" event={"ID":"618b4066-6eed-43e5-97c9-b8551da111df","Type":"ContainerStarted","Data":"7b5914c9ff44ea90ca6e9fd54804f35267c6f8c46b223a7f461887c16ed1972f"} Oct 01 15:55:26 crc kubenswrapper[4869]: I1001 15:55:26.406387 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/manila-api-0" Oct 01 15:55:26 crc kubenswrapper[4869]: I1001 15:55:26.408142 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-scheduler-0" event={"ID":"ad1db0ac-1359-46bf-b6d6-6ae089968105","Type":"ContainerStarted","Data":"f18a2f9d660ec959412c7c816017b9531ca71df61884dd1ff143b7052b966d11"} Oct 01 15:55:26 crc kubenswrapper[4869]: I1001 15:55:26.428231 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-5b88556f9c-4f47f" podStartSLOduration=3.42820993 podStartE2EDuration="3.42820993s" podCreationTimestamp="2025-10-01 15:55:23 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-01 15:55:26.415449738 +0000 UTC m=+3035.562292864" watchObservedRunningTime="2025-10-01 15:55:26.42820993 +0000 UTC m=+3035.575053046" Oct 01 15:55:26 crc kubenswrapper[4869]: I1001 15:55:26.445831 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/manila-api-0" podStartSLOduration=2.4458109439999998 podStartE2EDuration="2.445810944s" podCreationTimestamp="2025-10-01 15:55:24 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-01 15:55:26.43375061 +0000 UTC m=+3035.580593726" watchObservedRunningTime="2025-10-01 15:55:26.445810944 +0000 UTC m=+3035.592654060" Oct 01 15:55:26 crc kubenswrapper[4869]: I1001 15:55:26.813639 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/manila-api-0"] Oct 01 15:55:27 crc kubenswrapper[4869]: I1001 15:55:27.419239 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-scheduler-0" event={"ID":"ad1db0ac-1359-46bf-b6d6-6ae089968105","Type":"ContainerStarted","Data":"39986cca38322ac80b9db03b4ed3e3038dcf7b530a41087e87f2870438450193"} Oct 01 15:55:27 crc kubenswrapper[4869]: I1001 15:55:27.438422 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/manila-scheduler-0" podStartSLOduration=3.552506387 podStartE2EDuration="4.438385195s" podCreationTimestamp="2025-10-01 15:55:23 +0000 UTC" firstStartedPulling="2025-10-01 15:55:24.646279507 +0000 UTC m=+3033.793122623" lastFinishedPulling="2025-10-01 15:55:25.532158315 +0000 UTC m=+3034.679001431" observedRunningTime="2025-10-01 15:55:27.434922857 +0000 UTC m=+3036.581765973" watchObservedRunningTime="2025-10-01 15:55:27.438385195 +0000 UTC m=+3036.585228311" Oct 01 15:55:28 crc kubenswrapper[4869]: I1001 15:55:28.429548 4869 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/manila-api-0" podUID="618b4066-6eed-43e5-97c9-b8551da111df" containerName="manila-api-log" containerID="cri-o://7b5914c9ff44ea90ca6e9fd54804f35267c6f8c46b223a7f461887c16ed1972f" gracePeriod=30 Oct 01 15:55:28 crc kubenswrapper[4869]: I1001 15:55:28.429971 4869 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/manila-api-0" podUID="618b4066-6eed-43e5-97c9-b8551da111df" containerName="manila-api" containerID="cri-o://bffe0d916ba9a20ebfecfc7047d634b54b53ff2fcf95271bbee1f15a168b5b0b" gracePeriod=30 Oct 01 15:55:28 crc kubenswrapper[4869]: I1001 15:55:28.712091 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Oct 01 15:55:28 crc kubenswrapper[4869]: I1001 15:55:28.712646 4869 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="a85f4a7e-3c28-4c54-be1f-723b7bca17cb" containerName="ceilometer-central-agent" containerID="cri-o://5d7f6bb569c2d509ac156b44fe77860c43340f6cebae00646167e172b3843863" gracePeriod=30 Oct 01 15:55:28 crc kubenswrapper[4869]: I1001 15:55:28.712777 4869 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="a85f4a7e-3c28-4c54-be1f-723b7bca17cb" containerName="proxy-httpd" containerID="cri-o://50af1c3ce7191283bfc6bc3fe08fa17509298223aa1e97b01c5d0f266373f2b5" gracePeriod=30 Oct 01 15:55:28 crc kubenswrapper[4869]: I1001 15:55:28.712812 4869 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="a85f4a7e-3c28-4c54-be1f-723b7bca17cb" containerName="sg-core" containerID="cri-o://27c896f79ef9992f021af92cb85e0b552a056402feee8e6d6d999b744c9f0674" gracePeriod=30 Oct 01 15:55:28 crc kubenswrapper[4869]: I1001 15:55:28.712844 4869 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="a85f4a7e-3c28-4c54-be1f-723b7bca17cb" containerName="ceilometer-notification-agent" containerID="cri-o://7e608337fd0018c6ebc75695e00d468c4bd848f5712c6466fcb75bbe530c7950" gracePeriod=30 Oct 01 15:55:28 crc kubenswrapper[4869]: E1001 15:55:28.713470 4869 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod618b4066_6eed_43e5_97c9_b8551da111df.slice/crio-bffe0d916ba9a20ebfecfc7047d634b54b53ff2fcf95271bbee1f15a168b5b0b.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod618b4066_6eed_43e5_97c9_b8551da111df.slice/crio-conmon-bffe0d916ba9a20ebfecfc7047d634b54b53ff2fcf95271bbee1f15a168b5b0b.scope\": RecentStats: unable to find data in memory cache]" Oct 01 15:55:29 crc kubenswrapper[4869]: I1001 15:55:29.441125 4869 generic.go:334] "Generic (PLEG): container finished" podID="a85f4a7e-3c28-4c54-be1f-723b7bca17cb" containerID="50af1c3ce7191283bfc6bc3fe08fa17509298223aa1e97b01c5d0f266373f2b5" exitCode=0 Oct 01 15:55:29 crc kubenswrapper[4869]: I1001 15:55:29.441459 4869 generic.go:334] "Generic (PLEG): container finished" podID="a85f4a7e-3c28-4c54-be1f-723b7bca17cb" containerID="27c896f79ef9992f021af92cb85e0b552a056402feee8e6d6d999b744c9f0674" exitCode=2 Oct 01 15:55:29 crc kubenswrapper[4869]: I1001 15:55:29.441471 4869 generic.go:334] "Generic (PLEG): container finished" podID="a85f4a7e-3c28-4c54-be1f-723b7bca17cb" containerID="5d7f6bb569c2d509ac156b44fe77860c43340f6cebae00646167e172b3843863" exitCode=0 Oct 01 15:55:29 crc kubenswrapper[4869]: I1001 15:55:29.441337 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"a85f4a7e-3c28-4c54-be1f-723b7bca17cb","Type":"ContainerDied","Data":"50af1c3ce7191283bfc6bc3fe08fa17509298223aa1e97b01c5d0f266373f2b5"} Oct 01 15:55:29 crc kubenswrapper[4869]: I1001 15:55:29.441537 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"a85f4a7e-3c28-4c54-be1f-723b7bca17cb","Type":"ContainerDied","Data":"27c896f79ef9992f021af92cb85e0b552a056402feee8e6d6d999b744c9f0674"} Oct 01 15:55:29 crc kubenswrapper[4869]: I1001 15:55:29.441550 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"a85f4a7e-3c28-4c54-be1f-723b7bca17cb","Type":"ContainerDied","Data":"5d7f6bb569c2d509ac156b44fe77860c43340f6cebae00646167e172b3843863"} Oct 01 15:55:29 crc kubenswrapper[4869]: I1001 15:55:29.444323 4869 generic.go:334] "Generic (PLEG): container finished" podID="618b4066-6eed-43e5-97c9-b8551da111df" containerID="bffe0d916ba9a20ebfecfc7047d634b54b53ff2fcf95271bbee1f15a168b5b0b" exitCode=0 Oct 01 15:55:29 crc kubenswrapper[4869]: I1001 15:55:29.444351 4869 generic.go:334] "Generic (PLEG): container finished" podID="618b4066-6eed-43e5-97c9-b8551da111df" containerID="7b5914c9ff44ea90ca6e9fd54804f35267c6f8c46b223a7f461887c16ed1972f" exitCode=143 Oct 01 15:55:29 crc kubenswrapper[4869]: I1001 15:55:29.444371 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-api-0" event={"ID":"618b4066-6eed-43e5-97c9-b8551da111df","Type":"ContainerDied","Data":"bffe0d916ba9a20ebfecfc7047d634b54b53ff2fcf95271bbee1f15a168b5b0b"} Oct 01 15:55:29 crc kubenswrapper[4869]: I1001 15:55:29.444398 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-api-0" event={"ID":"618b4066-6eed-43e5-97c9-b8551da111df","Type":"ContainerDied","Data":"7b5914c9ff44ea90ca6e9fd54804f35267c6f8c46b223a7f461887c16ed1972f"} Oct 01 15:55:30 crc kubenswrapper[4869]: I1001 15:55:30.457326 4869 generic.go:334] "Generic (PLEG): container finished" podID="a85f4a7e-3c28-4c54-be1f-723b7bca17cb" containerID="7e608337fd0018c6ebc75695e00d468c4bd848f5712c6466fcb75bbe530c7950" exitCode=0 Oct 01 15:55:30 crc kubenswrapper[4869]: I1001 15:55:30.457400 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"a85f4a7e-3c28-4c54-be1f-723b7bca17cb","Type":"ContainerDied","Data":"7e608337fd0018c6ebc75695e00d468c4bd848f5712c6466fcb75bbe530c7950"} Oct 01 15:55:30 crc kubenswrapper[4869]: I1001 15:55:30.800546 4869 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/ceilometer-0" podUID="a85f4a7e-3c28-4c54-be1f-723b7bca17cb" containerName="proxy-httpd" probeResult="failure" output="Get \"https://10.217.0.188:3000/\": dial tcp 10.217.0.188:3000: connect: connection refused" Oct 01 15:55:31 crc kubenswrapper[4869]: I1001 15:55:31.785083 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/manila-api-0" Oct 01 15:55:31 crc kubenswrapper[4869]: I1001 15:55:31.916142 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ncwd4\" (UniqueName: \"kubernetes.io/projected/618b4066-6eed-43e5-97c9-b8551da111df-kube-api-access-ncwd4\") pod \"618b4066-6eed-43e5-97c9-b8551da111df\" (UID: \"618b4066-6eed-43e5-97c9-b8551da111df\") " Oct 01 15:55:31 crc kubenswrapper[4869]: I1001 15:55:31.916187 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/618b4066-6eed-43e5-97c9-b8551da111df-config-data\") pod \"618b4066-6eed-43e5-97c9-b8551da111df\" (UID: \"618b4066-6eed-43e5-97c9-b8551da111df\") " Oct 01 15:55:31 crc kubenswrapper[4869]: I1001 15:55:31.916206 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/618b4066-6eed-43e5-97c9-b8551da111df-combined-ca-bundle\") pod \"618b4066-6eed-43e5-97c9-b8551da111df\" (UID: \"618b4066-6eed-43e5-97c9-b8551da111df\") " Oct 01 15:55:31 crc kubenswrapper[4869]: I1001 15:55:31.916247 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/618b4066-6eed-43e5-97c9-b8551da111df-config-data-custom\") pod \"618b4066-6eed-43e5-97c9-b8551da111df\" (UID: \"618b4066-6eed-43e5-97c9-b8551da111df\") " Oct 01 15:55:31 crc kubenswrapper[4869]: I1001 15:55:31.916498 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/618b4066-6eed-43e5-97c9-b8551da111df-scripts\") pod \"618b4066-6eed-43e5-97c9-b8551da111df\" (UID: \"618b4066-6eed-43e5-97c9-b8551da111df\") " Oct 01 15:55:31 crc kubenswrapper[4869]: I1001 15:55:31.916528 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/618b4066-6eed-43e5-97c9-b8551da111df-etc-machine-id\") pod \"618b4066-6eed-43e5-97c9-b8551da111df\" (UID: \"618b4066-6eed-43e5-97c9-b8551da111df\") " Oct 01 15:55:31 crc kubenswrapper[4869]: I1001 15:55:31.916557 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/618b4066-6eed-43e5-97c9-b8551da111df-logs\") pod \"618b4066-6eed-43e5-97c9-b8551da111df\" (UID: \"618b4066-6eed-43e5-97c9-b8551da111df\") " Oct 01 15:55:31 crc kubenswrapper[4869]: I1001 15:55:31.916991 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/618b4066-6eed-43e5-97c9-b8551da111df-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "618b4066-6eed-43e5-97c9-b8551da111df" (UID: "618b4066-6eed-43e5-97c9-b8551da111df"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 01 15:55:31 crc kubenswrapper[4869]: I1001 15:55:31.919547 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/618b4066-6eed-43e5-97c9-b8551da111df-logs" (OuterVolumeSpecName: "logs") pod "618b4066-6eed-43e5-97c9-b8551da111df" (UID: "618b4066-6eed-43e5-97c9-b8551da111df"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 15:55:31 crc kubenswrapper[4869]: I1001 15:55:31.923442 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/618b4066-6eed-43e5-97c9-b8551da111df-scripts" (OuterVolumeSpecName: "scripts") pod "618b4066-6eed-43e5-97c9-b8551da111df" (UID: "618b4066-6eed-43e5-97c9-b8551da111df"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 15:55:31 crc kubenswrapper[4869]: I1001 15:55:31.925305 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/618b4066-6eed-43e5-97c9-b8551da111df-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "618b4066-6eed-43e5-97c9-b8551da111df" (UID: "618b4066-6eed-43e5-97c9-b8551da111df"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 15:55:31 crc kubenswrapper[4869]: I1001 15:55:31.929191 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/618b4066-6eed-43e5-97c9-b8551da111df-kube-api-access-ncwd4" (OuterVolumeSpecName: "kube-api-access-ncwd4") pod "618b4066-6eed-43e5-97c9-b8551da111df" (UID: "618b4066-6eed-43e5-97c9-b8551da111df"). InnerVolumeSpecName "kube-api-access-ncwd4". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 15:55:31 crc kubenswrapper[4869]: I1001 15:55:31.991295 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/618b4066-6eed-43e5-97c9-b8551da111df-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "618b4066-6eed-43e5-97c9-b8551da111df" (UID: "618b4066-6eed-43e5-97c9-b8551da111df"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 15:55:31 crc kubenswrapper[4869]: I1001 15:55:31.997129 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/618b4066-6eed-43e5-97c9-b8551da111df-config-data" (OuterVolumeSpecName: "config-data") pod "618b4066-6eed-43e5-97c9-b8551da111df" (UID: "618b4066-6eed-43e5-97c9-b8551da111df"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 15:55:32 crc kubenswrapper[4869]: I1001 15:55:32.026271 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ncwd4\" (UniqueName: \"kubernetes.io/projected/618b4066-6eed-43e5-97c9-b8551da111df-kube-api-access-ncwd4\") on node \"crc\" DevicePath \"\"" Oct 01 15:55:32 crc kubenswrapper[4869]: I1001 15:55:32.026310 4869 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/618b4066-6eed-43e5-97c9-b8551da111df-config-data\") on node \"crc\" DevicePath \"\"" Oct 01 15:55:32 crc kubenswrapper[4869]: I1001 15:55:32.026324 4869 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/618b4066-6eed-43e5-97c9-b8551da111df-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 01 15:55:32 crc kubenswrapper[4869]: I1001 15:55:32.026335 4869 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/618b4066-6eed-43e5-97c9-b8551da111df-config-data-custom\") on node \"crc\" DevicePath \"\"" Oct 01 15:55:32 crc kubenswrapper[4869]: I1001 15:55:32.026349 4869 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/618b4066-6eed-43e5-97c9-b8551da111df-scripts\") on node \"crc\" DevicePath \"\"" Oct 01 15:55:32 crc kubenswrapper[4869]: I1001 15:55:32.026360 4869 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/618b4066-6eed-43e5-97c9-b8551da111df-etc-machine-id\") on node \"crc\" DevicePath \"\"" Oct 01 15:55:32 crc kubenswrapper[4869]: I1001 15:55:32.026370 4869 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/618b4066-6eed-43e5-97c9-b8551da111df-logs\") on node \"crc\" DevicePath \"\"" Oct 01 15:55:32 crc kubenswrapper[4869]: I1001 15:55:32.088755 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 01 15:55:32 crc kubenswrapper[4869]: I1001 15:55:32.230033 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vzlsc\" (UniqueName: \"kubernetes.io/projected/a85f4a7e-3c28-4c54-be1f-723b7bca17cb-kube-api-access-vzlsc\") pod \"a85f4a7e-3c28-4c54-be1f-723b7bca17cb\" (UID: \"a85f4a7e-3c28-4c54-be1f-723b7bca17cb\") " Oct 01 15:55:32 crc kubenswrapper[4869]: I1001 15:55:32.230159 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a85f4a7e-3c28-4c54-be1f-723b7bca17cb-scripts\") pod \"a85f4a7e-3c28-4c54-be1f-723b7bca17cb\" (UID: \"a85f4a7e-3c28-4c54-be1f-723b7bca17cb\") " Oct 01 15:55:32 crc kubenswrapper[4869]: I1001 15:55:32.230205 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a85f4a7e-3c28-4c54-be1f-723b7bca17cb-run-httpd\") pod \"a85f4a7e-3c28-4c54-be1f-723b7bca17cb\" (UID: \"a85f4a7e-3c28-4c54-be1f-723b7bca17cb\") " Oct 01 15:55:32 crc kubenswrapper[4869]: I1001 15:55:32.230246 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/a85f4a7e-3c28-4c54-be1f-723b7bca17cb-ceilometer-tls-certs\") pod \"a85f4a7e-3c28-4c54-be1f-723b7bca17cb\" (UID: \"a85f4a7e-3c28-4c54-be1f-723b7bca17cb\") " Oct 01 15:55:32 crc kubenswrapper[4869]: I1001 15:55:32.230295 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a85f4a7e-3c28-4c54-be1f-723b7bca17cb-config-data\") pod \"a85f4a7e-3c28-4c54-be1f-723b7bca17cb\" (UID: \"a85f4a7e-3c28-4c54-be1f-723b7bca17cb\") " Oct 01 15:55:32 crc kubenswrapper[4869]: I1001 15:55:32.230369 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a85f4a7e-3c28-4c54-be1f-723b7bca17cb-combined-ca-bundle\") pod \"a85f4a7e-3c28-4c54-be1f-723b7bca17cb\" (UID: \"a85f4a7e-3c28-4c54-be1f-723b7bca17cb\") " Oct 01 15:55:32 crc kubenswrapper[4869]: I1001 15:55:32.230461 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a85f4a7e-3c28-4c54-be1f-723b7bca17cb-log-httpd\") pod \"a85f4a7e-3c28-4c54-be1f-723b7bca17cb\" (UID: \"a85f4a7e-3c28-4c54-be1f-723b7bca17cb\") " Oct 01 15:55:32 crc kubenswrapper[4869]: I1001 15:55:32.230486 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/a85f4a7e-3c28-4c54-be1f-723b7bca17cb-sg-core-conf-yaml\") pod \"a85f4a7e-3c28-4c54-be1f-723b7bca17cb\" (UID: \"a85f4a7e-3c28-4c54-be1f-723b7bca17cb\") " Oct 01 15:55:32 crc kubenswrapper[4869]: I1001 15:55:32.230826 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a85f4a7e-3c28-4c54-be1f-723b7bca17cb-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "a85f4a7e-3c28-4c54-be1f-723b7bca17cb" (UID: "a85f4a7e-3c28-4c54-be1f-723b7bca17cb"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 15:55:32 crc kubenswrapper[4869]: I1001 15:55:32.231051 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a85f4a7e-3c28-4c54-be1f-723b7bca17cb-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "a85f4a7e-3c28-4c54-be1f-723b7bca17cb" (UID: "a85f4a7e-3c28-4c54-be1f-723b7bca17cb"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 15:55:32 crc kubenswrapper[4869]: I1001 15:55:32.236676 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a85f4a7e-3c28-4c54-be1f-723b7bca17cb-kube-api-access-vzlsc" (OuterVolumeSpecName: "kube-api-access-vzlsc") pod "a85f4a7e-3c28-4c54-be1f-723b7bca17cb" (UID: "a85f4a7e-3c28-4c54-be1f-723b7bca17cb"). InnerVolumeSpecName "kube-api-access-vzlsc". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 15:55:32 crc kubenswrapper[4869]: I1001 15:55:32.237439 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a85f4a7e-3c28-4c54-be1f-723b7bca17cb-scripts" (OuterVolumeSpecName: "scripts") pod "a85f4a7e-3c28-4c54-be1f-723b7bca17cb" (UID: "a85f4a7e-3c28-4c54-be1f-723b7bca17cb"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 15:55:32 crc kubenswrapper[4869]: I1001 15:55:32.269580 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a85f4a7e-3c28-4c54-be1f-723b7bca17cb-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "a85f4a7e-3c28-4c54-be1f-723b7bca17cb" (UID: "a85f4a7e-3c28-4c54-be1f-723b7bca17cb"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 15:55:32 crc kubenswrapper[4869]: I1001 15:55:32.286613 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a85f4a7e-3c28-4c54-be1f-723b7bca17cb-ceilometer-tls-certs" (OuterVolumeSpecName: "ceilometer-tls-certs") pod "a85f4a7e-3c28-4c54-be1f-723b7bca17cb" (UID: "a85f4a7e-3c28-4c54-be1f-723b7bca17cb"). InnerVolumeSpecName "ceilometer-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 15:55:32 crc kubenswrapper[4869]: I1001 15:55:32.316930 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a85f4a7e-3c28-4c54-be1f-723b7bca17cb-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "a85f4a7e-3c28-4c54-be1f-723b7bca17cb" (UID: "a85f4a7e-3c28-4c54-be1f-723b7bca17cb"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 15:55:32 crc kubenswrapper[4869]: I1001 15:55:32.330492 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a85f4a7e-3c28-4c54-be1f-723b7bca17cb-config-data" (OuterVolumeSpecName: "config-data") pod "a85f4a7e-3c28-4c54-be1f-723b7bca17cb" (UID: "a85f4a7e-3c28-4c54-be1f-723b7bca17cb"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 15:55:32 crc kubenswrapper[4869]: I1001 15:55:32.332931 4869 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a85f4a7e-3c28-4c54-be1f-723b7bca17cb-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 01 15:55:32 crc kubenswrapper[4869]: I1001 15:55:32.332962 4869 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a85f4a7e-3c28-4c54-be1f-723b7bca17cb-log-httpd\") on node \"crc\" DevicePath \"\"" Oct 01 15:55:32 crc kubenswrapper[4869]: I1001 15:55:32.332972 4869 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/a85f4a7e-3c28-4c54-be1f-723b7bca17cb-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Oct 01 15:55:32 crc kubenswrapper[4869]: I1001 15:55:32.332982 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vzlsc\" (UniqueName: \"kubernetes.io/projected/a85f4a7e-3c28-4c54-be1f-723b7bca17cb-kube-api-access-vzlsc\") on node \"crc\" DevicePath \"\"" Oct 01 15:55:32 crc kubenswrapper[4869]: I1001 15:55:32.332995 4869 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a85f4a7e-3c28-4c54-be1f-723b7bca17cb-scripts\") on node \"crc\" DevicePath \"\"" Oct 01 15:55:32 crc kubenswrapper[4869]: I1001 15:55:32.333004 4869 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a85f4a7e-3c28-4c54-be1f-723b7bca17cb-run-httpd\") on node \"crc\" DevicePath \"\"" Oct 01 15:55:32 crc kubenswrapper[4869]: I1001 15:55:32.333013 4869 reconciler_common.go:293] "Volume detached for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/a85f4a7e-3c28-4c54-be1f-723b7bca17cb-ceilometer-tls-certs\") on node \"crc\" DevicePath \"\"" Oct 01 15:55:32 crc kubenswrapper[4869]: I1001 15:55:32.333022 4869 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a85f4a7e-3c28-4c54-be1f-723b7bca17cb-config-data\") on node \"crc\" DevicePath \"\"" Oct 01 15:55:32 crc kubenswrapper[4869]: I1001 15:55:32.478174 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"a85f4a7e-3c28-4c54-be1f-723b7bca17cb","Type":"ContainerDied","Data":"f804d56478987f7113e177c4d908481bc49e8ef7b05f0e3daf28c8ae6cf91665"} Oct 01 15:55:32 crc kubenswrapper[4869]: I1001 15:55:32.478224 4869 scope.go:117] "RemoveContainer" containerID="50af1c3ce7191283bfc6bc3fe08fa17509298223aa1e97b01c5d0f266373f2b5" Oct 01 15:55:32 crc kubenswrapper[4869]: I1001 15:55:32.478229 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 01 15:55:32 crc kubenswrapper[4869]: I1001 15:55:32.480797 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-api-0" event={"ID":"618b4066-6eed-43e5-97c9-b8551da111df","Type":"ContainerDied","Data":"68ce5c53ae21c993719b49eb613f27af25ebfe98eb8d5ab87f2b604ece94d2e5"} Oct 01 15:55:32 crc kubenswrapper[4869]: I1001 15:55:32.480846 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/manila-api-0" Oct 01 15:55:32 crc kubenswrapper[4869]: I1001 15:55:32.483088 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-share-share1-0" event={"ID":"d413715e-3ed1-4ea2-8b2f-0a56e1c2677e","Type":"ContainerStarted","Data":"f111438f80d0f5055529c6244880ac9ee8a04edffa8c2d0c1641a89a1b5e6d53"} Oct 01 15:55:32 crc kubenswrapper[4869]: I1001 15:55:32.507559 4869 scope.go:117] "RemoveContainer" containerID="27c896f79ef9992f021af92cb85e0b552a056402feee8e6d6d999b744c9f0674" Oct 01 15:55:32 crc kubenswrapper[4869]: I1001 15:55:32.520417 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Oct 01 15:55:32 crc kubenswrapper[4869]: I1001 15:55:32.548400 4869 scope.go:117] "RemoveContainer" containerID="7e608337fd0018c6ebc75695e00d468c4bd848f5712c6466fcb75bbe530c7950" Oct 01 15:55:32 crc kubenswrapper[4869]: I1001 15:55:32.553113 4869 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Oct 01 15:55:32 crc kubenswrapper[4869]: I1001 15:55:32.564201 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Oct 01 15:55:32 crc kubenswrapper[4869]: E1001 15:55:32.564652 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a85f4a7e-3c28-4c54-be1f-723b7bca17cb" containerName="ceilometer-notification-agent" Oct 01 15:55:32 crc kubenswrapper[4869]: I1001 15:55:32.564664 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="a85f4a7e-3c28-4c54-be1f-723b7bca17cb" containerName="ceilometer-notification-agent" Oct 01 15:55:32 crc kubenswrapper[4869]: E1001 15:55:32.564675 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="618b4066-6eed-43e5-97c9-b8551da111df" containerName="manila-api" Oct 01 15:55:32 crc kubenswrapper[4869]: I1001 15:55:32.564681 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="618b4066-6eed-43e5-97c9-b8551da111df" containerName="manila-api" Oct 01 15:55:32 crc kubenswrapper[4869]: E1001 15:55:32.564696 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="618b4066-6eed-43e5-97c9-b8551da111df" containerName="manila-api-log" Oct 01 15:55:32 crc kubenswrapper[4869]: I1001 15:55:32.564702 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="618b4066-6eed-43e5-97c9-b8551da111df" containerName="manila-api-log" Oct 01 15:55:32 crc kubenswrapper[4869]: E1001 15:55:32.564720 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a85f4a7e-3c28-4c54-be1f-723b7bca17cb" containerName="sg-core" Oct 01 15:55:32 crc kubenswrapper[4869]: I1001 15:55:32.564727 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="a85f4a7e-3c28-4c54-be1f-723b7bca17cb" containerName="sg-core" Oct 01 15:55:32 crc kubenswrapper[4869]: E1001 15:55:32.564741 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a85f4a7e-3c28-4c54-be1f-723b7bca17cb" containerName="proxy-httpd" Oct 01 15:55:32 crc kubenswrapper[4869]: I1001 15:55:32.564747 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="a85f4a7e-3c28-4c54-be1f-723b7bca17cb" containerName="proxy-httpd" Oct 01 15:55:32 crc kubenswrapper[4869]: E1001 15:55:32.564759 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a85f4a7e-3c28-4c54-be1f-723b7bca17cb" containerName="ceilometer-central-agent" Oct 01 15:55:32 crc kubenswrapper[4869]: I1001 15:55:32.564766 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="a85f4a7e-3c28-4c54-be1f-723b7bca17cb" containerName="ceilometer-central-agent" Oct 01 15:55:32 crc kubenswrapper[4869]: I1001 15:55:32.564919 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="a85f4a7e-3c28-4c54-be1f-723b7bca17cb" containerName="ceilometer-central-agent" Oct 01 15:55:32 crc kubenswrapper[4869]: I1001 15:55:32.564938 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="a85f4a7e-3c28-4c54-be1f-723b7bca17cb" containerName="proxy-httpd" Oct 01 15:55:32 crc kubenswrapper[4869]: I1001 15:55:32.564946 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="618b4066-6eed-43e5-97c9-b8551da111df" containerName="manila-api" Oct 01 15:55:32 crc kubenswrapper[4869]: I1001 15:55:32.564954 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="618b4066-6eed-43e5-97c9-b8551da111df" containerName="manila-api-log" Oct 01 15:55:32 crc kubenswrapper[4869]: I1001 15:55:32.564963 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="a85f4a7e-3c28-4c54-be1f-723b7bca17cb" containerName="ceilometer-notification-agent" Oct 01 15:55:32 crc kubenswrapper[4869]: I1001 15:55:32.564972 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="a85f4a7e-3c28-4c54-be1f-723b7bca17cb" containerName="sg-core" Oct 01 15:55:32 crc kubenswrapper[4869]: I1001 15:55:32.568307 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 01 15:55:32 crc kubenswrapper[4869]: I1001 15:55:32.582436 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ceilometer-internal-svc" Oct 01 15:55:32 crc kubenswrapper[4869]: I1001 15:55:32.582709 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Oct 01 15:55:32 crc kubenswrapper[4869]: I1001 15:55:32.582824 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Oct 01 15:55:32 crc kubenswrapper[4869]: I1001 15:55:32.588898 4869 scope.go:117] "RemoveContainer" containerID="5d7f6bb569c2d509ac156b44fe77860c43340f6cebae00646167e172b3843863" Oct 01 15:55:32 crc kubenswrapper[4869]: I1001 15:55:32.591214 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/manila-api-0"] Oct 01 15:55:32 crc kubenswrapper[4869]: I1001 15:55:32.631115 4869 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/manila-api-0"] Oct 01 15:55:32 crc kubenswrapper[4869]: I1001 15:55:32.642446 4869 scope.go:117] "RemoveContainer" containerID="bffe0d916ba9a20ebfecfc7047d634b54b53ff2fcf95271bbee1f15a168b5b0b" Oct 01 15:55:32 crc kubenswrapper[4869]: I1001 15:55:32.643663 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/0a1f3c4c-d1e4-4602-9615-2587027c04d2-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"0a1f3c4c-d1e4-4602-9615-2587027c04d2\") " pod="openstack/ceilometer-0" Oct 01 15:55:32 crc kubenswrapper[4869]: I1001 15:55:32.643711 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/0a1f3c4c-d1e4-4602-9615-2587027c04d2-run-httpd\") pod \"ceilometer-0\" (UID: \"0a1f3c4c-d1e4-4602-9615-2587027c04d2\") " pod="openstack/ceilometer-0" Oct 01 15:55:32 crc kubenswrapper[4869]: I1001 15:55:32.644137 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jht9n\" (UniqueName: \"kubernetes.io/projected/0a1f3c4c-d1e4-4602-9615-2587027c04d2-kube-api-access-jht9n\") pod \"ceilometer-0\" (UID: \"0a1f3c4c-d1e4-4602-9615-2587027c04d2\") " pod="openstack/ceilometer-0" Oct 01 15:55:32 crc kubenswrapper[4869]: I1001 15:55:32.644182 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0a1f3c4c-d1e4-4602-9615-2587027c04d2-scripts\") pod \"ceilometer-0\" (UID: \"0a1f3c4c-d1e4-4602-9615-2587027c04d2\") " pod="openstack/ceilometer-0" Oct 01 15:55:32 crc kubenswrapper[4869]: I1001 15:55:32.644827 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0a1f3c4c-d1e4-4602-9615-2587027c04d2-config-data\") pod \"ceilometer-0\" (UID: \"0a1f3c4c-d1e4-4602-9615-2587027c04d2\") " pod="openstack/ceilometer-0" Oct 01 15:55:32 crc kubenswrapper[4869]: I1001 15:55:32.644872 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/0a1f3c4c-d1e4-4602-9615-2587027c04d2-log-httpd\") pod \"ceilometer-0\" (UID: \"0a1f3c4c-d1e4-4602-9615-2587027c04d2\") " pod="openstack/ceilometer-0" Oct 01 15:55:32 crc kubenswrapper[4869]: I1001 15:55:32.644996 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0a1f3c4c-d1e4-4602-9615-2587027c04d2-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"0a1f3c4c-d1e4-4602-9615-2587027c04d2\") " pod="openstack/ceilometer-0" Oct 01 15:55:32 crc kubenswrapper[4869]: I1001 15:55:32.645033 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/0a1f3c4c-d1e4-4602-9615-2587027c04d2-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"0a1f3c4c-d1e4-4602-9615-2587027c04d2\") " pod="openstack/ceilometer-0" Oct 01 15:55:32 crc kubenswrapper[4869]: I1001 15:55:32.646630 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Oct 01 15:55:32 crc kubenswrapper[4869]: I1001 15:55:32.663779 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/manila-api-0"] Oct 01 15:55:32 crc kubenswrapper[4869]: I1001 15:55:32.665424 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/manila-api-0" Oct 01 15:55:32 crc kubenswrapper[4869]: I1001 15:55:32.668375 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"manila-api-config-data" Oct 01 15:55:32 crc kubenswrapper[4869]: I1001 15:55:32.668674 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-manila-internal-svc" Oct 01 15:55:32 crc kubenswrapper[4869]: I1001 15:55:32.668691 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-manila-public-svc" Oct 01 15:55:32 crc kubenswrapper[4869]: I1001 15:55:32.676298 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/manila-api-0"] Oct 01 15:55:32 crc kubenswrapper[4869]: I1001 15:55:32.736962 4869 scope.go:117] "RemoveContainer" containerID="7b5914c9ff44ea90ca6e9fd54804f35267c6f8c46b223a7f461887c16ed1972f" Oct 01 15:55:32 crc kubenswrapper[4869]: I1001 15:55:32.747229 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jht9n\" (UniqueName: \"kubernetes.io/projected/0a1f3c4c-d1e4-4602-9615-2587027c04d2-kube-api-access-jht9n\") pod \"ceilometer-0\" (UID: \"0a1f3c4c-d1e4-4602-9615-2587027c04d2\") " pod="openstack/ceilometer-0" Oct 01 15:55:32 crc kubenswrapper[4869]: I1001 15:55:32.747292 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/6fecded1-d25b-40a1-b7ce-c7819d11f929-internal-tls-certs\") pod \"manila-api-0\" (UID: \"6fecded1-d25b-40a1-b7ce-c7819d11f929\") " pod="openstack/manila-api-0" Oct 01 15:55:32 crc kubenswrapper[4869]: I1001 15:55:32.747330 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0a1f3c4c-d1e4-4602-9615-2587027c04d2-scripts\") pod \"ceilometer-0\" (UID: \"0a1f3c4c-d1e4-4602-9615-2587027c04d2\") " pod="openstack/ceilometer-0" Oct 01 15:55:32 crc kubenswrapper[4869]: I1001 15:55:32.747356 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/6fecded1-d25b-40a1-b7ce-c7819d11f929-etc-machine-id\") pod \"manila-api-0\" (UID: \"6fecded1-d25b-40a1-b7ce-c7819d11f929\") " pod="openstack/manila-api-0" Oct 01 15:55:32 crc kubenswrapper[4869]: I1001 15:55:32.747386 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0a1f3c4c-d1e4-4602-9615-2587027c04d2-config-data\") pod \"ceilometer-0\" (UID: \"0a1f3c4c-d1e4-4602-9615-2587027c04d2\") " pod="openstack/ceilometer-0" Oct 01 15:55:32 crc kubenswrapper[4869]: I1001 15:55:32.747405 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/0a1f3c4c-d1e4-4602-9615-2587027c04d2-log-httpd\") pod \"ceilometer-0\" (UID: \"0a1f3c4c-d1e4-4602-9615-2587027c04d2\") " pod="openstack/ceilometer-0" Oct 01 15:55:32 crc kubenswrapper[4869]: I1001 15:55:32.747423 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/6fecded1-d25b-40a1-b7ce-c7819d11f929-public-tls-certs\") pod \"manila-api-0\" (UID: \"6fecded1-d25b-40a1-b7ce-c7819d11f929\") " pod="openstack/manila-api-0" Oct 01 15:55:32 crc kubenswrapper[4869]: I1001 15:55:32.747460 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wtxj7\" (UniqueName: \"kubernetes.io/projected/6fecded1-d25b-40a1-b7ce-c7819d11f929-kube-api-access-wtxj7\") pod \"manila-api-0\" (UID: \"6fecded1-d25b-40a1-b7ce-c7819d11f929\") " pod="openstack/manila-api-0" Oct 01 15:55:32 crc kubenswrapper[4869]: I1001 15:55:32.747497 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0a1f3c4c-d1e4-4602-9615-2587027c04d2-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"0a1f3c4c-d1e4-4602-9615-2587027c04d2\") " pod="openstack/ceilometer-0" Oct 01 15:55:32 crc kubenswrapper[4869]: I1001 15:55:32.747523 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/0a1f3c4c-d1e4-4602-9615-2587027c04d2-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"0a1f3c4c-d1e4-4602-9615-2587027c04d2\") " pod="openstack/ceilometer-0" Oct 01 15:55:32 crc kubenswrapper[4869]: I1001 15:55:32.747555 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6fecded1-d25b-40a1-b7ce-c7819d11f929-config-data\") pod \"manila-api-0\" (UID: \"6fecded1-d25b-40a1-b7ce-c7819d11f929\") " pod="openstack/manila-api-0" Oct 01 15:55:32 crc kubenswrapper[4869]: I1001 15:55:32.747591 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/6fecded1-d25b-40a1-b7ce-c7819d11f929-config-data-custom\") pod \"manila-api-0\" (UID: \"6fecded1-d25b-40a1-b7ce-c7819d11f929\") " pod="openstack/manila-api-0" Oct 01 15:55:32 crc kubenswrapper[4869]: I1001 15:55:32.747610 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/0a1f3c4c-d1e4-4602-9615-2587027c04d2-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"0a1f3c4c-d1e4-4602-9615-2587027c04d2\") " pod="openstack/ceilometer-0" Oct 01 15:55:32 crc kubenswrapper[4869]: I1001 15:55:32.747628 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/0a1f3c4c-d1e4-4602-9615-2587027c04d2-run-httpd\") pod \"ceilometer-0\" (UID: \"0a1f3c4c-d1e4-4602-9615-2587027c04d2\") " pod="openstack/ceilometer-0" Oct 01 15:55:32 crc kubenswrapper[4869]: I1001 15:55:32.747650 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6fecded1-d25b-40a1-b7ce-c7819d11f929-logs\") pod \"manila-api-0\" (UID: \"6fecded1-d25b-40a1-b7ce-c7819d11f929\") " pod="openstack/manila-api-0" Oct 01 15:55:32 crc kubenswrapper[4869]: I1001 15:55:32.747666 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6fecded1-d25b-40a1-b7ce-c7819d11f929-combined-ca-bundle\") pod \"manila-api-0\" (UID: \"6fecded1-d25b-40a1-b7ce-c7819d11f929\") " pod="openstack/manila-api-0" Oct 01 15:55:32 crc kubenswrapper[4869]: I1001 15:55:32.747682 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6fecded1-d25b-40a1-b7ce-c7819d11f929-scripts\") pod \"manila-api-0\" (UID: \"6fecded1-d25b-40a1-b7ce-c7819d11f929\") " pod="openstack/manila-api-0" Oct 01 15:55:32 crc kubenswrapper[4869]: I1001 15:55:32.748185 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/0a1f3c4c-d1e4-4602-9615-2587027c04d2-log-httpd\") pod \"ceilometer-0\" (UID: \"0a1f3c4c-d1e4-4602-9615-2587027c04d2\") " pod="openstack/ceilometer-0" Oct 01 15:55:32 crc kubenswrapper[4869]: I1001 15:55:32.748425 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/0a1f3c4c-d1e4-4602-9615-2587027c04d2-run-httpd\") pod \"ceilometer-0\" (UID: \"0a1f3c4c-d1e4-4602-9615-2587027c04d2\") " pod="openstack/ceilometer-0" Oct 01 15:55:32 crc kubenswrapper[4869]: I1001 15:55:32.751123 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0a1f3c4c-d1e4-4602-9615-2587027c04d2-config-data\") pod \"ceilometer-0\" (UID: \"0a1f3c4c-d1e4-4602-9615-2587027c04d2\") " pod="openstack/ceilometer-0" Oct 01 15:55:32 crc kubenswrapper[4869]: I1001 15:55:32.753967 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0a1f3c4c-d1e4-4602-9615-2587027c04d2-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"0a1f3c4c-d1e4-4602-9615-2587027c04d2\") " pod="openstack/ceilometer-0" Oct 01 15:55:32 crc kubenswrapper[4869]: I1001 15:55:32.754360 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0a1f3c4c-d1e4-4602-9615-2587027c04d2-scripts\") pod \"ceilometer-0\" (UID: \"0a1f3c4c-d1e4-4602-9615-2587027c04d2\") " pod="openstack/ceilometer-0" Oct 01 15:55:32 crc kubenswrapper[4869]: I1001 15:55:32.757223 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/0a1f3c4c-d1e4-4602-9615-2587027c04d2-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"0a1f3c4c-d1e4-4602-9615-2587027c04d2\") " pod="openstack/ceilometer-0" Oct 01 15:55:32 crc kubenswrapper[4869]: I1001 15:55:32.758419 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/0a1f3c4c-d1e4-4602-9615-2587027c04d2-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"0a1f3c4c-d1e4-4602-9615-2587027c04d2\") " pod="openstack/ceilometer-0" Oct 01 15:55:32 crc kubenswrapper[4869]: I1001 15:55:32.763350 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jht9n\" (UniqueName: \"kubernetes.io/projected/0a1f3c4c-d1e4-4602-9615-2587027c04d2-kube-api-access-jht9n\") pod \"ceilometer-0\" (UID: \"0a1f3c4c-d1e4-4602-9615-2587027c04d2\") " pod="openstack/ceilometer-0" Oct 01 15:55:32 crc kubenswrapper[4869]: I1001 15:55:32.850066 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/6fecded1-d25b-40a1-b7ce-c7819d11f929-etc-machine-id\") pod \"manila-api-0\" (UID: \"6fecded1-d25b-40a1-b7ce-c7819d11f929\") " pod="openstack/manila-api-0" Oct 01 15:55:32 crc kubenswrapper[4869]: I1001 15:55:32.850521 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/6fecded1-d25b-40a1-b7ce-c7819d11f929-public-tls-certs\") pod \"manila-api-0\" (UID: \"6fecded1-d25b-40a1-b7ce-c7819d11f929\") " pod="openstack/manila-api-0" Oct 01 15:55:32 crc kubenswrapper[4869]: I1001 15:55:32.850197 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/6fecded1-d25b-40a1-b7ce-c7819d11f929-etc-machine-id\") pod \"manila-api-0\" (UID: \"6fecded1-d25b-40a1-b7ce-c7819d11f929\") " pod="openstack/manila-api-0" Oct 01 15:55:32 crc kubenswrapper[4869]: I1001 15:55:32.850568 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wtxj7\" (UniqueName: \"kubernetes.io/projected/6fecded1-d25b-40a1-b7ce-c7819d11f929-kube-api-access-wtxj7\") pod \"manila-api-0\" (UID: \"6fecded1-d25b-40a1-b7ce-c7819d11f929\") " pod="openstack/manila-api-0" Oct 01 15:55:32 crc kubenswrapper[4869]: I1001 15:55:32.850666 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6fecded1-d25b-40a1-b7ce-c7819d11f929-config-data\") pod \"manila-api-0\" (UID: \"6fecded1-d25b-40a1-b7ce-c7819d11f929\") " pod="openstack/manila-api-0" Oct 01 15:55:32 crc kubenswrapper[4869]: I1001 15:55:32.850708 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/6fecded1-d25b-40a1-b7ce-c7819d11f929-config-data-custom\") pod \"manila-api-0\" (UID: \"6fecded1-d25b-40a1-b7ce-c7819d11f929\") " pod="openstack/manila-api-0" Oct 01 15:55:32 crc kubenswrapper[4869]: I1001 15:55:32.850736 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6fecded1-d25b-40a1-b7ce-c7819d11f929-logs\") pod \"manila-api-0\" (UID: \"6fecded1-d25b-40a1-b7ce-c7819d11f929\") " pod="openstack/manila-api-0" Oct 01 15:55:32 crc kubenswrapper[4869]: I1001 15:55:32.850750 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6fecded1-d25b-40a1-b7ce-c7819d11f929-combined-ca-bundle\") pod \"manila-api-0\" (UID: \"6fecded1-d25b-40a1-b7ce-c7819d11f929\") " pod="openstack/manila-api-0" Oct 01 15:55:32 crc kubenswrapper[4869]: I1001 15:55:32.850764 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6fecded1-d25b-40a1-b7ce-c7819d11f929-scripts\") pod \"manila-api-0\" (UID: \"6fecded1-d25b-40a1-b7ce-c7819d11f929\") " pod="openstack/manila-api-0" Oct 01 15:55:32 crc kubenswrapper[4869]: I1001 15:55:32.850808 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/6fecded1-d25b-40a1-b7ce-c7819d11f929-internal-tls-certs\") pod \"manila-api-0\" (UID: \"6fecded1-d25b-40a1-b7ce-c7819d11f929\") " pod="openstack/manila-api-0" Oct 01 15:55:32 crc kubenswrapper[4869]: I1001 15:55:32.851219 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6fecded1-d25b-40a1-b7ce-c7819d11f929-logs\") pod \"manila-api-0\" (UID: \"6fecded1-d25b-40a1-b7ce-c7819d11f929\") " pod="openstack/manila-api-0" Oct 01 15:55:32 crc kubenswrapper[4869]: I1001 15:55:32.854588 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/6fecded1-d25b-40a1-b7ce-c7819d11f929-public-tls-certs\") pod \"manila-api-0\" (UID: \"6fecded1-d25b-40a1-b7ce-c7819d11f929\") " pod="openstack/manila-api-0" Oct 01 15:55:32 crc kubenswrapper[4869]: I1001 15:55:32.856295 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/6fecded1-d25b-40a1-b7ce-c7819d11f929-internal-tls-certs\") pod \"manila-api-0\" (UID: \"6fecded1-d25b-40a1-b7ce-c7819d11f929\") " pod="openstack/manila-api-0" Oct 01 15:55:32 crc kubenswrapper[4869]: I1001 15:55:32.860643 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/6fecded1-d25b-40a1-b7ce-c7819d11f929-config-data-custom\") pod \"manila-api-0\" (UID: \"6fecded1-d25b-40a1-b7ce-c7819d11f929\") " pod="openstack/manila-api-0" Oct 01 15:55:32 crc kubenswrapper[4869]: I1001 15:55:32.861236 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6fecded1-d25b-40a1-b7ce-c7819d11f929-config-data\") pod \"manila-api-0\" (UID: \"6fecded1-d25b-40a1-b7ce-c7819d11f929\") " pod="openstack/manila-api-0" Oct 01 15:55:32 crc kubenswrapper[4869]: I1001 15:55:32.861484 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6fecded1-d25b-40a1-b7ce-c7819d11f929-scripts\") pod \"manila-api-0\" (UID: \"6fecded1-d25b-40a1-b7ce-c7819d11f929\") " pod="openstack/manila-api-0" Oct 01 15:55:32 crc kubenswrapper[4869]: I1001 15:55:32.861923 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6fecded1-d25b-40a1-b7ce-c7819d11f929-combined-ca-bundle\") pod \"manila-api-0\" (UID: \"6fecded1-d25b-40a1-b7ce-c7819d11f929\") " pod="openstack/manila-api-0" Oct 01 15:55:32 crc kubenswrapper[4869]: I1001 15:55:32.885844 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wtxj7\" (UniqueName: \"kubernetes.io/projected/6fecded1-d25b-40a1-b7ce-c7819d11f929-kube-api-access-wtxj7\") pod \"manila-api-0\" (UID: \"6fecded1-d25b-40a1-b7ce-c7819d11f929\") " pod="openstack/manila-api-0" Oct 01 15:55:32 crc kubenswrapper[4869]: I1001 15:55:32.915890 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 01 15:55:33 crc kubenswrapper[4869]: I1001 15:55:33.033103 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/manila-api-0" Oct 01 15:55:33 crc kubenswrapper[4869]: I1001 15:55:33.374783 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Oct 01 15:55:33 crc kubenswrapper[4869]: W1001 15:55:33.386378 4869 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod0a1f3c4c_d1e4_4602_9615_2587027c04d2.slice/crio-56aec8a2df85739fe9e28d66f2f6a8f8606ea3bee48f0bdca2bf2f3635e6e9fd WatchSource:0}: Error finding container 56aec8a2df85739fe9e28d66f2f6a8f8606ea3bee48f0bdca2bf2f3635e6e9fd: Status 404 returned error can't find the container with id 56aec8a2df85739fe9e28d66f2f6a8f8606ea3bee48f0bdca2bf2f3635e6e9fd Oct 01 15:55:33 crc kubenswrapper[4869]: I1001 15:55:33.495048 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-share-share1-0" event={"ID":"d413715e-3ed1-4ea2-8b2f-0a56e1c2677e","Type":"ContainerStarted","Data":"605a92c850e67049b917baa78be2cf05b4219bf2115ce553d543d6d7172ff3c3"} Oct 01 15:55:33 crc kubenswrapper[4869]: I1001 15:55:33.496006 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"0a1f3c4c-d1e4-4602-9615-2587027c04d2","Type":"ContainerStarted","Data":"56aec8a2df85739fe9e28d66f2f6a8f8606ea3bee48f0bdca2bf2f3635e6e9fd"} Oct 01 15:55:33 crc kubenswrapper[4869]: I1001 15:55:33.518549 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/manila-share-share1-0" podStartSLOduration=3.334949269 podStartE2EDuration="10.518526652s" podCreationTimestamp="2025-10-01 15:55:23 +0000 UTC" firstStartedPulling="2025-10-01 15:55:24.755165814 +0000 UTC m=+3033.902008940" lastFinishedPulling="2025-10-01 15:55:31.938743207 +0000 UTC m=+3041.085586323" observedRunningTime="2025-10-01 15:55:33.512659624 +0000 UTC m=+3042.659502740" watchObservedRunningTime="2025-10-01 15:55:33.518526652 +0000 UTC m=+3042.665369768" Oct 01 15:55:33 crc kubenswrapper[4869]: I1001 15:55:33.581050 4869 scope.go:117] "RemoveContainer" containerID="ea87da209cc47118cc41bdb3107264d5dd2e07bc832e620553f0ac1be9456693" Oct 01 15:55:33 crc kubenswrapper[4869]: E1001 15:55:33.581365 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-c86m8_openshift-machine-config-operator(a4b64f7f-0b03-4f47-965b-9fde048b735c)\"" pod="openshift-machine-config-operator/machine-config-daemon-c86m8" podUID="a4b64f7f-0b03-4f47-965b-9fde048b735c" Oct 01 15:55:33 crc kubenswrapper[4869]: I1001 15:55:33.593456 4869 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="618b4066-6eed-43e5-97c9-b8551da111df" path="/var/lib/kubelet/pods/618b4066-6eed-43e5-97c9-b8551da111df/volumes" Oct 01 15:55:33 crc kubenswrapper[4869]: I1001 15:55:33.594493 4869 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a85f4a7e-3c28-4c54-be1f-723b7bca17cb" path="/var/lib/kubelet/pods/a85f4a7e-3c28-4c54-be1f-723b7bca17cb/volumes" Oct 01 15:55:33 crc kubenswrapper[4869]: W1001 15:55:33.637155 4869 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod6fecded1_d25b_40a1_b7ce_c7819d11f929.slice/crio-f60c5a51ff007fbcf9d2fed78157c79a238f939511b85341c0039be78b0a9d75 WatchSource:0}: Error finding container f60c5a51ff007fbcf9d2fed78157c79a238f939511b85341c0039be78b0a9d75: Status 404 returned error can't find the container with id f60c5a51ff007fbcf9d2fed78157c79a238f939511b85341c0039be78b0a9d75 Oct 01 15:55:33 crc kubenswrapper[4869]: I1001 15:55:33.640595 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/manila-api-0"] Oct 01 15:55:34 crc kubenswrapper[4869]: I1001 15:55:34.072478 4869 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/manila-share-share1-0" Oct 01 15:55:34 crc kubenswrapper[4869]: I1001 15:55:34.090541 4869 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/manila-scheduler-0" Oct 01 15:55:34 crc kubenswrapper[4869]: I1001 15:55:34.213473 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-5b88556f9c-4f47f" Oct 01 15:55:34 crc kubenswrapper[4869]: I1001 15:55:34.279472 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-7bf874d8bf-rkktm"] Oct 01 15:55:34 crc kubenswrapper[4869]: I1001 15:55:34.280029 4869 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-7bf874d8bf-rkktm" podUID="2cc0314c-8edd-431b-a31a-3a2355225d9a" containerName="dnsmasq-dns" containerID="cri-o://c2c55fa28ea9e8a075a1d20574098592b5ec9e0bf1e20177b10b853ba95acb98" gracePeriod=10 Oct 01 15:55:34 crc kubenswrapper[4869]: I1001 15:55:34.509763 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-api-0" event={"ID":"6fecded1-d25b-40a1-b7ce-c7819d11f929","Type":"ContainerStarted","Data":"410cfc896d966ecf68bcf0a0aed1663c9ba4e841dd9319795ccb844ebcf03dfc"} Oct 01 15:55:34 crc kubenswrapper[4869]: I1001 15:55:34.509810 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-api-0" event={"ID":"6fecded1-d25b-40a1-b7ce-c7819d11f929","Type":"ContainerStarted","Data":"f60c5a51ff007fbcf9d2fed78157c79a238f939511b85341c0039be78b0a9d75"} Oct 01 15:55:34 crc kubenswrapper[4869]: I1001 15:55:34.511771 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"0a1f3c4c-d1e4-4602-9615-2587027c04d2","Type":"ContainerStarted","Data":"09100d9787d06b119729e2df3b364bb39d75ea1af3598b7d72f71cc344c86fa4"} Oct 01 15:55:34 crc kubenswrapper[4869]: I1001 15:55:34.519178 4869 generic.go:334] "Generic (PLEG): container finished" podID="2cc0314c-8edd-431b-a31a-3a2355225d9a" containerID="c2c55fa28ea9e8a075a1d20574098592b5ec9e0bf1e20177b10b853ba95acb98" exitCode=0 Oct 01 15:55:34 crc kubenswrapper[4869]: I1001 15:55:34.519328 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7bf874d8bf-rkktm" event={"ID":"2cc0314c-8edd-431b-a31a-3a2355225d9a","Type":"ContainerDied","Data":"c2c55fa28ea9e8a075a1d20574098592b5ec9e0bf1e20177b10b853ba95acb98"} Oct 01 15:55:34 crc kubenswrapper[4869]: I1001 15:55:34.791955 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7bf874d8bf-rkktm" Oct 01 15:55:34 crc kubenswrapper[4869]: I1001 15:55:34.899441 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2cc0314c-8edd-431b-a31a-3a2355225d9a-config\") pod \"2cc0314c-8edd-431b-a31a-3a2355225d9a\" (UID: \"2cc0314c-8edd-431b-a31a-3a2355225d9a\") " Oct 01 15:55:34 crc kubenswrapper[4869]: I1001 15:55:34.899504 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-j557c\" (UniqueName: \"kubernetes.io/projected/2cc0314c-8edd-431b-a31a-3a2355225d9a-kube-api-access-j557c\") pod \"2cc0314c-8edd-431b-a31a-3a2355225d9a\" (UID: \"2cc0314c-8edd-431b-a31a-3a2355225d9a\") " Oct 01 15:55:34 crc kubenswrapper[4869]: I1001 15:55:34.899555 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/2cc0314c-8edd-431b-a31a-3a2355225d9a-ovsdbserver-sb\") pod \"2cc0314c-8edd-431b-a31a-3a2355225d9a\" (UID: \"2cc0314c-8edd-431b-a31a-3a2355225d9a\") " Oct 01 15:55:34 crc kubenswrapper[4869]: I1001 15:55:34.899607 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/2cc0314c-8edd-431b-a31a-3a2355225d9a-openstack-edpm-ipam\") pod \"2cc0314c-8edd-431b-a31a-3a2355225d9a\" (UID: \"2cc0314c-8edd-431b-a31a-3a2355225d9a\") " Oct 01 15:55:34 crc kubenswrapper[4869]: I1001 15:55:34.899675 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/2cc0314c-8edd-431b-a31a-3a2355225d9a-dns-svc\") pod \"2cc0314c-8edd-431b-a31a-3a2355225d9a\" (UID: \"2cc0314c-8edd-431b-a31a-3a2355225d9a\") " Oct 01 15:55:34 crc kubenswrapper[4869]: I1001 15:55:34.899709 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/2cc0314c-8edd-431b-a31a-3a2355225d9a-ovsdbserver-nb\") pod \"2cc0314c-8edd-431b-a31a-3a2355225d9a\" (UID: \"2cc0314c-8edd-431b-a31a-3a2355225d9a\") " Oct 01 15:55:34 crc kubenswrapper[4869]: I1001 15:55:34.905469 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2cc0314c-8edd-431b-a31a-3a2355225d9a-kube-api-access-j557c" (OuterVolumeSpecName: "kube-api-access-j557c") pod "2cc0314c-8edd-431b-a31a-3a2355225d9a" (UID: "2cc0314c-8edd-431b-a31a-3a2355225d9a"). InnerVolumeSpecName "kube-api-access-j557c". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 15:55:34 crc kubenswrapper[4869]: I1001 15:55:34.947987 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2cc0314c-8edd-431b-a31a-3a2355225d9a-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "2cc0314c-8edd-431b-a31a-3a2355225d9a" (UID: "2cc0314c-8edd-431b-a31a-3a2355225d9a"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 15:55:34 crc kubenswrapper[4869]: I1001 15:55:34.952016 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2cc0314c-8edd-431b-a31a-3a2355225d9a-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "2cc0314c-8edd-431b-a31a-3a2355225d9a" (UID: "2cc0314c-8edd-431b-a31a-3a2355225d9a"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 15:55:34 crc kubenswrapper[4869]: I1001 15:55:34.952879 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2cc0314c-8edd-431b-a31a-3a2355225d9a-config" (OuterVolumeSpecName: "config") pod "2cc0314c-8edd-431b-a31a-3a2355225d9a" (UID: "2cc0314c-8edd-431b-a31a-3a2355225d9a"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 15:55:34 crc kubenswrapper[4869]: I1001 15:55:34.954439 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2cc0314c-8edd-431b-a31a-3a2355225d9a-openstack-edpm-ipam" (OuterVolumeSpecName: "openstack-edpm-ipam") pod "2cc0314c-8edd-431b-a31a-3a2355225d9a" (UID: "2cc0314c-8edd-431b-a31a-3a2355225d9a"). InnerVolumeSpecName "openstack-edpm-ipam". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 15:55:34 crc kubenswrapper[4869]: I1001 15:55:34.975913 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2cc0314c-8edd-431b-a31a-3a2355225d9a-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "2cc0314c-8edd-431b-a31a-3a2355225d9a" (UID: "2cc0314c-8edd-431b-a31a-3a2355225d9a"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 15:55:35 crc kubenswrapper[4869]: I1001 15:55:35.002567 4869 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2cc0314c-8edd-431b-a31a-3a2355225d9a-config\") on node \"crc\" DevicePath \"\"" Oct 01 15:55:35 crc kubenswrapper[4869]: I1001 15:55:35.002598 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-j557c\" (UniqueName: \"kubernetes.io/projected/2cc0314c-8edd-431b-a31a-3a2355225d9a-kube-api-access-j557c\") on node \"crc\" DevicePath \"\"" Oct 01 15:55:35 crc kubenswrapper[4869]: I1001 15:55:35.002609 4869 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/2cc0314c-8edd-431b-a31a-3a2355225d9a-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Oct 01 15:55:35 crc kubenswrapper[4869]: I1001 15:55:35.002619 4869 reconciler_common.go:293] "Volume detached for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/2cc0314c-8edd-431b-a31a-3a2355225d9a-openstack-edpm-ipam\") on node \"crc\" DevicePath \"\"" Oct 01 15:55:35 crc kubenswrapper[4869]: I1001 15:55:35.002627 4869 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/2cc0314c-8edd-431b-a31a-3a2355225d9a-dns-svc\") on node \"crc\" DevicePath \"\"" Oct 01 15:55:35 crc kubenswrapper[4869]: I1001 15:55:35.002638 4869 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/2cc0314c-8edd-431b-a31a-3a2355225d9a-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Oct 01 15:55:35 crc kubenswrapper[4869]: I1001 15:55:35.533246 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7bf874d8bf-rkktm" event={"ID":"2cc0314c-8edd-431b-a31a-3a2355225d9a","Type":"ContainerDied","Data":"ff41c40af4a73d155ef781b7e581143bcbb70a222310fca3437e85e698151452"} Oct 01 15:55:35 crc kubenswrapper[4869]: I1001 15:55:35.533570 4869 scope.go:117] "RemoveContainer" containerID="c2c55fa28ea9e8a075a1d20574098592b5ec9e0bf1e20177b10b853ba95acb98" Oct 01 15:55:35 crc kubenswrapper[4869]: I1001 15:55:35.533682 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7bf874d8bf-rkktm" Oct 01 15:55:35 crc kubenswrapper[4869]: I1001 15:55:35.539124 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-api-0" event={"ID":"6fecded1-d25b-40a1-b7ce-c7819d11f929","Type":"ContainerStarted","Data":"c652ac161875d7580735dfce8ca8b5568692483925245c829004531aeddc6499"} Oct 01 15:55:35 crc kubenswrapper[4869]: I1001 15:55:35.539181 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/manila-api-0" Oct 01 15:55:35 crc kubenswrapper[4869]: I1001 15:55:35.574132 4869 scope.go:117] "RemoveContainer" containerID="6f072b717d909b6290eb6a50be0e8b7870d72a18290c8ab18b68b7165afd3d6b" Oct 01 15:55:35 crc kubenswrapper[4869]: I1001 15:55:35.581748 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/manila-api-0" podStartSLOduration=3.581723701 podStartE2EDuration="3.581723701s" podCreationTimestamp="2025-10-01 15:55:32 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-01 15:55:35.56782116 +0000 UTC m=+3044.714664286" watchObservedRunningTime="2025-10-01 15:55:35.581723701 +0000 UTC m=+3044.728566837" Oct 01 15:55:35 crc kubenswrapper[4869]: I1001 15:55:35.630914 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-7bf874d8bf-rkktm"] Oct 01 15:55:35 crc kubenswrapper[4869]: I1001 15:55:35.644228 4869 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-7bf874d8bf-rkktm"] Oct 01 15:55:36 crc kubenswrapper[4869]: E1001 15:55:36.444497 4869 log.go:32] "PullImage from image service failed" err="rpc error: code = Unknown desc = initializing source docker://quay.io/openstack-k8s-operators/sg-core@sha256:09b5017c95d7697e66b9c64846bc48ef5826a009cba89b956ec54561e5f4a2d1: can't talk to a V1 container registry" image="quay.io/openstack-k8s-operators/sg-core@sha256:09b5017c95d7697e66b9c64846bc48ef5826a009cba89b956ec54561e5f4a2d1" Oct 01 15:55:36 crc kubenswrapper[4869]: E1001 15:55:36.444785 4869 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:sg-core,Image:quay.io/openstack-k8s-operators/sg-core@sha256:09b5017c95d7697e66b9c64846bc48ef5826a009cba89b956ec54561e5f4a2d1,Command:[],Args:[],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:sg-core-conf-yaml,ReadOnly:false,MountPath:/etc/sg-core.conf.yaml,SubPath:sg-core.conf.yaml,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-jht9n,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*0,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:nil,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod ceilometer-0_openstack(0a1f3c4c-d1e4-4602-9615-2587027c04d2): ErrImagePull: initializing source docker://quay.io/openstack-k8s-operators/sg-core@sha256:09b5017c95d7697e66b9c64846bc48ef5826a009cba89b956ec54561e5f4a2d1: can't talk to a V1 container registry" logger="UnhandledError" Oct 01 15:55:36 crc kubenswrapper[4869]: I1001 15:55:36.549822 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"0a1f3c4c-d1e4-4602-9615-2587027c04d2","Type":"ContainerStarted","Data":"219d0ec72dc070a6cdd0c532ee91c9ab9293a02e96dd395871688a1cde2f1372"} Oct 01 15:55:37 crc kubenswrapper[4869]: I1001 15:55:37.593682 4869 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2cc0314c-8edd-431b-a31a-3a2355225d9a" path="/var/lib/kubelet/pods/2cc0314c-8edd-431b-a31a-3a2355225d9a/volumes" Oct 01 15:55:38 crc kubenswrapper[4869]: I1001 15:55:38.979406 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Oct 01 15:55:39 crc kubenswrapper[4869]: E1001 15:55:39.055932 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"sg-core\" with ErrImagePull: \"initializing source docker://quay.io/openstack-k8s-operators/sg-core@sha256:09b5017c95d7697e66b9c64846bc48ef5826a009cba89b956ec54561e5f4a2d1: can't talk to a V1 container registry\"" pod="openstack/ceilometer-0" podUID="0a1f3c4c-d1e4-4602-9615-2587027c04d2" Oct 01 15:55:39 crc kubenswrapper[4869]: I1001 15:55:39.619400 4869 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="0a1f3c4c-d1e4-4602-9615-2587027c04d2" containerName="ceilometer-central-agent" containerID="cri-o://09100d9787d06b119729e2df3b364bb39d75ea1af3598b7d72f71cc344c86fa4" gracePeriod=30 Oct 01 15:55:39 crc kubenswrapper[4869]: I1001 15:55:39.619489 4869 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="0a1f3c4c-d1e4-4602-9615-2587027c04d2" containerName="proxy-httpd" containerID="cri-o://46cb5379dba7094fc3ba7a09409617a963186ea549e4d310c5db7e574ea03ce4" gracePeriod=30 Oct 01 15:55:39 crc kubenswrapper[4869]: I1001 15:55:39.619492 4869 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="0a1f3c4c-d1e4-4602-9615-2587027c04d2" containerName="ceilometer-notification-agent" containerID="cri-o://219d0ec72dc070a6cdd0c532ee91c9ab9293a02e96dd395871688a1cde2f1372" gracePeriod=30 Oct 01 15:55:39 crc kubenswrapper[4869]: I1001 15:55:39.622039 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"0a1f3c4c-d1e4-4602-9615-2587027c04d2","Type":"ContainerStarted","Data":"46cb5379dba7094fc3ba7a09409617a963186ea549e4d310c5db7e574ea03ce4"} Oct 01 15:55:39 crc kubenswrapper[4869]: I1001 15:55:39.622116 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Oct 01 15:55:40 crc kubenswrapper[4869]: I1001 15:55:40.640248 4869 generic.go:334] "Generic (PLEG): container finished" podID="0a1f3c4c-d1e4-4602-9615-2587027c04d2" containerID="46cb5379dba7094fc3ba7a09409617a963186ea549e4d310c5db7e574ea03ce4" exitCode=0 Oct 01 15:55:40 crc kubenswrapper[4869]: I1001 15:55:40.640513 4869 generic.go:334] "Generic (PLEG): container finished" podID="0a1f3c4c-d1e4-4602-9615-2587027c04d2" containerID="219d0ec72dc070a6cdd0c532ee91c9ab9293a02e96dd395871688a1cde2f1372" exitCode=0 Oct 01 15:55:40 crc kubenswrapper[4869]: I1001 15:55:40.640299 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"0a1f3c4c-d1e4-4602-9615-2587027c04d2","Type":"ContainerDied","Data":"46cb5379dba7094fc3ba7a09409617a963186ea549e4d310c5db7e574ea03ce4"} Oct 01 15:55:40 crc kubenswrapper[4869]: I1001 15:55:40.640552 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"0a1f3c4c-d1e4-4602-9615-2587027c04d2","Type":"ContainerDied","Data":"219d0ec72dc070a6cdd0c532ee91c9ab9293a02e96dd395871688a1cde2f1372"} Oct 01 15:55:40 crc kubenswrapper[4869]: I1001 15:55:40.640562 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"0a1f3c4c-d1e4-4602-9615-2587027c04d2","Type":"ContainerDied","Data":"09100d9787d06b119729e2df3b364bb39d75ea1af3598b7d72f71cc344c86fa4"} Oct 01 15:55:40 crc kubenswrapper[4869]: I1001 15:55:40.640523 4869 generic.go:334] "Generic (PLEG): container finished" podID="0a1f3c4c-d1e4-4602-9615-2587027c04d2" containerID="09100d9787d06b119729e2df3b364bb39d75ea1af3598b7d72f71cc344c86fa4" exitCode=0 Oct 01 15:55:41 crc kubenswrapper[4869]: I1001 15:55:41.129600 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 01 15:55:41 crc kubenswrapper[4869]: I1001 15:55:41.234698 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/0a1f3c4c-d1e4-4602-9615-2587027c04d2-ceilometer-tls-certs\") pod \"0a1f3c4c-d1e4-4602-9615-2587027c04d2\" (UID: \"0a1f3c4c-d1e4-4602-9615-2587027c04d2\") " Oct 01 15:55:41 crc kubenswrapper[4869]: I1001 15:55:41.234782 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0a1f3c4c-d1e4-4602-9615-2587027c04d2-scripts\") pod \"0a1f3c4c-d1e4-4602-9615-2587027c04d2\" (UID: \"0a1f3c4c-d1e4-4602-9615-2587027c04d2\") " Oct 01 15:55:41 crc kubenswrapper[4869]: I1001 15:55:41.234867 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0a1f3c4c-d1e4-4602-9615-2587027c04d2-combined-ca-bundle\") pod \"0a1f3c4c-d1e4-4602-9615-2587027c04d2\" (UID: \"0a1f3c4c-d1e4-4602-9615-2587027c04d2\") " Oct 01 15:55:41 crc kubenswrapper[4869]: I1001 15:55:41.234917 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/0a1f3c4c-d1e4-4602-9615-2587027c04d2-run-httpd\") pod \"0a1f3c4c-d1e4-4602-9615-2587027c04d2\" (UID: \"0a1f3c4c-d1e4-4602-9615-2587027c04d2\") " Oct 01 15:55:41 crc kubenswrapper[4869]: I1001 15:55:41.234976 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jht9n\" (UniqueName: \"kubernetes.io/projected/0a1f3c4c-d1e4-4602-9615-2587027c04d2-kube-api-access-jht9n\") pod \"0a1f3c4c-d1e4-4602-9615-2587027c04d2\" (UID: \"0a1f3c4c-d1e4-4602-9615-2587027c04d2\") " Oct 01 15:55:41 crc kubenswrapper[4869]: I1001 15:55:41.235000 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0a1f3c4c-d1e4-4602-9615-2587027c04d2-config-data\") pod \"0a1f3c4c-d1e4-4602-9615-2587027c04d2\" (UID: \"0a1f3c4c-d1e4-4602-9615-2587027c04d2\") " Oct 01 15:55:41 crc kubenswrapper[4869]: I1001 15:55:41.235060 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/0a1f3c4c-d1e4-4602-9615-2587027c04d2-log-httpd\") pod \"0a1f3c4c-d1e4-4602-9615-2587027c04d2\" (UID: \"0a1f3c4c-d1e4-4602-9615-2587027c04d2\") " Oct 01 15:55:41 crc kubenswrapper[4869]: I1001 15:55:41.235091 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/0a1f3c4c-d1e4-4602-9615-2587027c04d2-sg-core-conf-yaml\") pod \"0a1f3c4c-d1e4-4602-9615-2587027c04d2\" (UID: \"0a1f3c4c-d1e4-4602-9615-2587027c04d2\") " Oct 01 15:55:41 crc kubenswrapper[4869]: I1001 15:55:41.235895 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0a1f3c4c-d1e4-4602-9615-2587027c04d2-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "0a1f3c4c-d1e4-4602-9615-2587027c04d2" (UID: "0a1f3c4c-d1e4-4602-9615-2587027c04d2"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 15:55:41 crc kubenswrapper[4869]: I1001 15:55:41.236775 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0a1f3c4c-d1e4-4602-9615-2587027c04d2-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "0a1f3c4c-d1e4-4602-9615-2587027c04d2" (UID: "0a1f3c4c-d1e4-4602-9615-2587027c04d2"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 15:55:41 crc kubenswrapper[4869]: I1001 15:55:41.241787 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0a1f3c4c-d1e4-4602-9615-2587027c04d2-kube-api-access-jht9n" (OuterVolumeSpecName: "kube-api-access-jht9n") pod "0a1f3c4c-d1e4-4602-9615-2587027c04d2" (UID: "0a1f3c4c-d1e4-4602-9615-2587027c04d2"). InnerVolumeSpecName "kube-api-access-jht9n". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 15:55:41 crc kubenswrapper[4869]: I1001 15:55:41.246581 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0a1f3c4c-d1e4-4602-9615-2587027c04d2-scripts" (OuterVolumeSpecName: "scripts") pod "0a1f3c4c-d1e4-4602-9615-2587027c04d2" (UID: "0a1f3c4c-d1e4-4602-9615-2587027c04d2"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 15:55:41 crc kubenswrapper[4869]: I1001 15:55:41.246927 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0a1f3c4c-d1e4-4602-9615-2587027c04d2-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "0a1f3c4c-d1e4-4602-9615-2587027c04d2" (UID: "0a1f3c4c-d1e4-4602-9615-2587027c04d2"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 15:55:41 crc kubenswrapper[4869]: I1001 15:55:41.323603 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0a1f3c4c-d1e4-4602-9615-2587027c04d2-ceilometer-tls-certs" (OuterVolumeSpecName: "ceilometer-tls-certs") pod "0a1f3c4c-d1e4-4602-9615-2587027c04d2" (UID: "0a1f3c4c-d1e4-4602-9615-2587027c04d2"). InnerVolumeSpecName "ceilometer-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 15:55:41 crc kubenswrapper[4869]: I1001 15:55:41.338309 4869 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/0a1f3c4c-d1e4-4602-9615-2587027c04d2-run-httpd\") on node \"crc\" DevicePath \"\"" Oct 01 15:55:41 crc kubenswrapper[4869]: I1001 15:55:41.338381 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jht9n\" (UniqueName: \"kubernetes.io/projected/0a1f3c4c-d1e4-4602-9615-2587027c04d2-kube-api-access-jht9n\") on node \"crc\" DevicePath \"\"" Oct 01 15:55:41 crc kubenswrapper[4869]: I1001 15:55:41.338410 4869 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/0a1f3c4c-d1e4-4602-9615-2587027c04d2-log-httpd\") on node \"crc\" DevicePath \"\"" Oct 01 15:55:41 crc kubenswrapper[4869]: I1001 15:55:41.338426 4869 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/0a1f3c4c-d1e4-4602-9615-2587027c04d2-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Oct 01 15:55:41 crc kubenswrapper[4869]: I1001 15:55:41.338446 4869 reconciler_common.go:293] "Volume detached for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/0a1f3c4c-d1e4-4602-9615-2587027c04d2-ceilometer-tls-certs\") on node \"crc\" DevicePath \"\"" Oct 01 15:55:41 crc kubenswrapper[4869]: I1001 15:55:41.338470 4869 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0a1f3c4c-d1e4-4602-9615-2587027c04d2-scripts\") on node \"crc\" DevicePath \"\"" Oct 01 15:55:41 crc kubenswrapper[4869]: I1001 15:55:41.363319 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0a1f3c4c-d1e4-4602-9615-2587027c04d2-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "0a1f3c4c-d1e4-4602-9615-2587027c04d2" (UID: "0a1f3c4c-d1e4-4602-9615-2587027c04d2"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 15:55:41 crc kubenswrapper[4869]: I1001 15:55:41.397881 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0a1f3c4c-d1e4-4602-9615-2587027c04d2-config-data" (OuterVolumeSpecName: "config-data") pod "0a1f3c4c-d1e4-4602-9615-2587027c04d2" (UID: "0a1f3c4c-d1e4-4602-9615-2587027c04d2"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 15:55:41 crc kubenswrapper[4869]: I1001 15:55:41.440156 4869 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0a1f3c4c-d1e4-4602-9615-2587027c04d2-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 01 15:55:41 crc kubenswrapper[4869]: I1001 15:55:41.440202 4869 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0a1f3c4c-d1e4-4602-9615-2587027c04d2-config-data\") on node \"crc\" DevicePath \"\"" Oct 01 15:55:41 crc kubenswrapper[4869]: I1001 15:55:41.653767 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"0a1f3c4c-d1e4-4602-9615-2587027c04d2","Type":"ContainerDied","Data":"56aec8a2df85739fe9e28d66f2f6a8f8606ea3bee48f0bdca2bf2f3635e6e9fd"} Oct 01 15:55:41 crc kubenswrapper[4869]: I1001 15:55:41.653836 4869 scope.go:117] "RemoveContainer" containerID="46cb5379dba7094fc3ba7a09409617a963186ea549e4d310c5db7e574ea03ce4" Oct 01 15:55:41 crc kubenswrapper[4869]: I1001 15:55:41.653950 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 01 15:55:41 crc kubenswrapper[4869]: I1001 15:55:41.685960 4869 scope.go:117] "RemoveContainer" containerID="219d0ec72dc070a6cdd0c532ee91c9ab9293a02e96dd395871688a1cde2f1372" Oct 01 15:55:41 crc kubenswrapper[4869]: I1001 15:55:41.726337 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Oct 01 15:55:41 crc kubenswrapper[4869]: I1001 15:55:41.746003 4869 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Oct 01 15:55:41 crc kubenswrapper[4869]: I1001 15:55:41.754508 4869 scope.go:117] "RemoveContainer" containerID="09100d9787d06b119729e2df3b364bb39d75ea1af3598b7d72f71cc344c86fa4" Oct 01 15:55:41 crc kubenswrapper[4869]: I1001 15:55:41.765736 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Oct 01 15:55:41 crc kubenswrapper[4869]: E1001 15:55:41.766470 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0a1f3c4c-d1e4-4602-9615-2587027c04d2" containerName="proxy-httpd" Oct 01 15:55:41 crc kubenswrapper[4869]: I1001 15:55:41.766488 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="0a1f3c4c-d1e4-4602-9615-2587027c04d2" containerName="proxy-httpd" Oct 01 15:55:41 crc kubenswrapper[4869]: E1001 15:55:41.766506 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2cc0314c-8edd-431b-a31a-3a2355225d9a" containerName="init" Oct 01 15:55:41 crc kubenswrapper[4869]: I1001 15:55:41.766514 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="2cc0314c-8edd-431b-a31a-3a2355225d9a" containerName="init" Oct 01 15:55:41 crc kubenswrapper[4869]: E1001 15:55:41.766532 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0a1f3c4c-d1e4-4602-9615-2587027c04d2" containerName="ceilometer-notification-agent" Oct 01 15:55:41 crc kubenswrapper[4869]: I1001 15:55:41.766540 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="0a1f3c4c-d1e4-4602-9615-2587027c04d2" containerName="ceilometer-notification-agent" Oct 01 15:55:41 crc kubenswrapper[4869]: E1001 15:55:41.766549 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2cc0314c-8edd-431b-a31a-3a2355225d9a" containerName="dnsmasq-dns" Oct 01 15:55:41 crc kubenswrapper[4869]: I1001 15:55:41.766556 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="2cc0314c-8edd-431b-a31a-3a2355225d9a" containerName="dnsmasq-dns" Oct 01 15:55:41 crc kubenswrapper[4869]: E1001 15:55:41.766598 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0a1f3c4c-d1e4-4602-9615-2587027c04d2" containerName="ceilometer-central-agent" Oct 01 15:55:41 crc kubenswrapper[4869]: I1001 15:55:41.766606 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="0a1f3c4c-d1e4-4602-9615-2587027c04d2" containerName="ceilometer-central-agent" Oct 01 15:55:41 crc kubenswrapper[4869]: I1001 15:55:41.766842 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="2cc0314c-8edd-431b-a31a-3a2355225d9a" containerName="dnsmasq-dns" Oct 01 15:55:41 crc kubenswrapper[4869]: I1001 15:55:41.766869 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="0a1f3c4c-d1e4-4602-9615-2587027c04d2" containerName="proxy-httpd" Oct 01 15:55:41 crc kubenswrapper[4869]: I1001 15:55:41.766889 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="0a1f3c4c-d1e4-4602-9615-2587027c04d2" containerName="ceilometer-central-agent" Oct 01 15:55:41 crc kubenswrapper[4869]: I1001 15:55:41.766908 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="0a1f3c4c-d1e4-4602-9615-2587027c04d2" containerName="ceilometer-notification-agent" Oct 01 15:55:41 crc kubenswrapper[4869]: I1001 15:55:41.770348 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 01 15:55:41 crc kubenswrapper[4869]: I1001 15:55:41.774165 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ceilometer-internal-svc" Oct 01 15:55:41 crc kubenswrapper[4869]: I1001 15:55:41.775012 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Oct 01 15:55:41 crc kubenswrapper[4869]: I1001 15:55:41.774638 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Oct 01 15:55:41 crc kubenswrapper[4869]: I1001 15:55:41.793292 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Oct 01 15:55:41 crc kubenswrapper[4869]: I1001 15:55:41.847584 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5qwn8\" (UniqueName: \"kubernetes.io/projected/0458e249-c518-4c3a-83d7-dda2beb25763-kube-api-access-5qwn8\") pod \"ceilometer-0\" (UID: \"0458e249-c518-4c3a-83d7-dda2beb25763\") " pod="openstack/ceilometer-0" Oct 01 15:55:41 crc kubenswrapper[4869]: I1001 15:55:41.848392 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/0458e249-c518-4c3a-83d7-dda2beb25763-run-httpd\") pod \"ceilometer-0\" (UID: \"0458e249-c518-4c3a-83d7-dda2beb25763\") " pod="openstack/ceilometer-0" Oct 01 15:55:41 crc kubenswrapper[4869]: I1001 15:55:41.848458 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0458e249-c518-4c3a-83d7-dda2beb25763-scripts\") pod \"ceilometer-0\" (UID: \"0458e249-c518-4c3a-83d7-dda2beb25763\") " pod="openstack/ceilometer-0" Oct 01 15:55:41 crc kubenswrapper[4869]: I1001 15:55:41.848495 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0458e249-c518-4c3a-83d7-dda2beb25763-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"0458e249-c518-4c3a-83d7-dda2beb25763\") " pod="openstack/ceilometer-0" Oct 01 15:55:41 crc kubenswrapper[4869]: I1001 15:55:41.848515 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0458e249-c518-4c3a-83d7-dda2beb25763-config-data\") pod \"ceilometer-0\" (UID: \"0458e249-c518-4c3a-83d7-dda2beb25763\") " pod="openstack/ceilometer-0" Oct 01 15:55:41 crc kubenswrapper[4869]: I1001 15:55:41.848531 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/0458e249-c518-4c3a-83d7-dda2beb25763-log-httpd\") pod \"ceilometer-0\" (UID: \"0458e249-c518-4c3a-83d7-dda2beb25763\") " pod="openstack/ceilometer-0" Oct 01 15:55:41 crc kubenswrapper[4869]: I1001 15:55:41.848556 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/0458e249-c518-4c3a-83d7-dda2beb25763-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"0458e249-c518-4c3a-83d7-dda2beb25763\") " pod="openstack/ceilometer-0" Oct 01 15:55:41 crc kubenswrapper[4869]: I1001 15:55:41.848575 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/0458e249-c518-4c3a-83d7-dda2beb25763-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"0458e249-c518-4c3a-83d7-dda2beb25763\") " pod="openstack/ceilometer-0" Oct 01 15:55:41 crc kubenswrapper[4869]: I1001 15:55:41.950437 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0458e249-c518-4c3a-83d7-dda2beb25763-scripts\") pod \"ceilometer-0\" (UID: \"0458e249-c518-4c3a-83d7-dda2beb25763\") " pod="openstack/ceilometer-0" Oct 01 15:55:41 crc kubenswrapper[4869]: I1001 15:55:41.950549 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0458e249-c518-4c3a-83d7-dda2beb25763-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"0458e249-c518-4c3a-83d7-dda2beb25763\") " pod="openstack/ceilometer-0" Oct 01 15:55:41 crc kubenswrapper[4869]: I1001 15:55:41.950749 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0458e249-c518-4c3a-83d7-dda2beb25763-config-data\") pod \"ceilometer-0\" (UID: \"0458e249-c518-4c3a-83d7-dda2beb25763\") " pod="openstack/ceilometer-0" Oct 01 15:55:41 crc kubenswrapper[4869]: I1001 15:55:41.950781 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/0458e249-c518-4c3a-83d7-dda2beb25763-log-httpd\") pod \"ceilometer-0\" (UID: \"0458e249-c518-4c3a-83d7-dda2beb25763\") " pod="openstack/ceilometer-0" Oct 01 15:55:41 crc kubenswrapper[4869]: I1001 15:55:41.950849 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/0458e249-c518-4c3a-83d7-dda2beb25763-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"0458e249-c518-4c3a-83d7-dda2beb25763\") " pod="openstack/ceilometer-0" Oct 01 15:55:41 crc kubenswrapper[4869]: I1001 15:55:41.950903 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/0458e249-c518-4c3a-83d7-dda2beb25763-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"0458e249-c518-4c3a-83d7-dda2beb25763\") " pod="openstack/ceilometer-0" Oct 01 15:55:41 crc kubenswrapper[4869]: I1001 15:55:41.951236 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5qwn8\" (UniqueName: \"kubernetes.io/projected/0458e249-c518-4c3a-83d7-dda2beb25763-kube-api-access-5qwn8\") pod \"ceilometer-0\" (UID: \"0458e249-c518-4c3a-83d7-dda2beb25763\") " pod="openstack/ceilometer-0" Oct 01 15:55:41 crc kubenswrapper[4869]: I1001 15:55:41.951353 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/0458e249-c518-4c3a-83d7-dda2beb25763-run-httpd\") pod \"ceilometer-0\" (UID: \"0458e249-c518-4c3a-83d7-dda2beb25763\") " pod="openstack/ceilometer-0" Oct 01 15:55:41 crc kubenswrapper[4869]: I1001 15:55:41.952024 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/0458e249-c518-4c3a-83d7-dda2beb25763-run-httpd\") pod \"ceilometer-0\" (UID: \"0458e249-c518-4c3a-83d7-dda2beb25763\") " pod="openstack/ceilometer-0" Oct 01 15:55:41 crc kubenswrapper[4869]: I1001 15:55:41.952339 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/0458e249-c518-4c3a-83d7-dda2beb25763-log-httpd\") pod \"ceilometer-0\" (UID: \"0458e249-c518-4c3a-83d7-dda2beb25763\") " pod="openstack/ceilometer-0" Oct 01 15:55:41 crc kubenswrapper[4869]: I1001 15:55:41.958575 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/0458e249-c518-4c3a-83d7-dda2beb25763-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"0458e249-c518-4c3a-83d7-dda2beb25763\") " pod="openstack/ceilometer-0" Oct 01 15:55:41 crc kubenswrapper[4869]: I1001 15:55:41.958818 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0458e249-c518-4c3a-83d7-dda2beb25763-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"0458e249-c518-4c3a-83d7-dda2beb25763\") " pod="openstack/ceilometer-0" Oct 01 15:55:41 crc kubenswrapper[4869]: I1001 15:55:41.959963 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0458e249-c518-4c3a-83d7-dda2beb25763-scripts\") pod \"ceilometer-0\" (UID: \"0458e249-c518-4c3a-83d7-dda2beb25763\") " pod="openstack/ceilometer-0" Oct 01 15:55:41 crc kubenswrapper[4869]: I1001 15:55:41.962641 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/0458e249-c518-4c3a-83d7-dda2beb25763-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"0458e249-c518-4c3a-83d7-dda2beb25763\") " pod="openstack/ceilometer-0" Oct 01 15:55:41 crc kubenswrapper[4869]: I1001 15:55:41.965563 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0458e249-c518-4c3a-83d7-dda2beb25763-config-data\") pod \"ceilometer-0\" (UID: \"0458e249-c518-4c3a-83d7-dda2beb25763\") " pod="openstack/ceilometer-0" Oct 01 15:55:41 crc kubenswrapper[4869]: I1001 15:55:41.971767 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5qwn8\" (UniqueName: \"kubernetes.io/projected/0458e249-c518-4c3a-83d7-dda2beb25763-kube-api-access-5qwn8\") pod \"ceilometer-0\" (UID: \"0458e249-c518-4c3a-83d7-dda2beb25763\") " pod="openstack/ceilometer-0" Oct 01 15:55:42 crc kubenswrapper[4869]: I1001 15:55:42.093396 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 01 15:55:42 crc kubenswrapper[4869]: I1001 15:55:42.568222 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Oct 01 15:55:42 crc kubenswrapper[4869]: W1001 15:55:42.570291 4869 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod0458e249_c518_4c3a_83d7_dda2beb25763.slice/crio-8030726bb4bfb4309fe57cdcf0459bb2fc3e5090ae50efdee3c2b4fec0f33efd WatchSource:0}: Error finding container 8030726bb4bfb4309fe57cdcf0459bb2fc3e5090ae50efdee3c2b4fec0f33efd: Status 404 returned error can't find the container with id 8030726bb4bfb4309fe57cdcf0459bb2fc3e5090ae50efdee3c2b4fec0f33efd Oct 01 15:55:42 crc kubenswrapper[4869]: I1001 15:55:42.668539 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"0458e249-c518-4c3a-83d7-dda2beb25763","Type":"ContainerStarted","Data":"8030726bb4bfb4309fe57cdcf0459bb2fc3e5090ae50efdee3c2b4fec0f33efd"} Oct 01 15:55:43 crc kubenswrapper[4869]: I1001 15:55:43.599791 4869 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0a1f3c4c-d1e4-4602-9615-2587027c04d2" path="/var/lib/kubelet/pods/0a1f3c4c-d1e4-4602-9615-2587027c04d2/volumes" Oct 01 15:55:43 crc kubenswrapper[4869]: I1001 15:55:43.693715 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"0458e249-c518-4c3a-83d7-dda2beb25763","Type":"ContainerStarted","Data":"55eae5b96491c71ebc2af0c5e9def254850bdb5bbec0b27e4525f304b7efab15"} Oct 01 15:55:44 crc kubenswrapper[4869]: I1001 15:55:44.708435 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"0458e249-c518-4c3a-83d7-dda2beb25763","Type":"ContainerStarted","Data":"0673fbfc5b31435cb524864c0bd74b353eec31d391ddc5b2b1c5fa8948281a69"} Oct 01 15:55:45 crc kubenswrapper[4869]: I1001 15:55:45.581997 4869 scope.go:117] "RemoveContainer" containerID="ea87da209cc47118cc41bdb3107264d5dd2e07bc832e620553f0ac1be9456693" Oct 01 15:55:45 crc kubenswrapper[4869]: E1001 15:55:45.583371 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-c86m8_openshift-machine-config-operator(a4b64f7f-0b03-4f47-965b-9fde048b735c)\"" pod="openshift-machine-config-operator/machine-config-daemon-c86m8" podUID="a4b64f7f-0b03-4f47-965b-9fde048b735c" Oct 01 15:55:45 crc kubenswrapper[4869]: I1001 15:55:45.657154 4869 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/manila-share-share1-0" Oct 01 15:55:45 crc kubenswrapper[4869]: I1001 15:55:45.684414 4869 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/manila-scheduler-0" Oct 01 15:55:45 crc kubenswrapper[4869]: I1001 15:55:45.719874 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"0458e249-c518-4c3a-83d7-dda2beb25763","Type":"ContainerStarted","Data":"b976a1ac7675f4c2514ec05820c73af89727708255aea86df8bdb7945b1053dc"} Oct 01 15:55:45 crc kubenswrapper[4869]: I1001 15:55:45.734159 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/manila-share-share1-0"] Oct 01 15:55:45 crc kubenswrapper[4869]: I1001 15:55:45.734685 4869 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/manila-share-share1-0" podUID="d413715e-3ed1-4ea2-8b2f-0a56e1c2677e" containerName="manila-share" containerID="cri-o://f111438f80d0f5055529c6244880ac9ee8a04edffa8c2d0c1641a89a1b5e6d53" gracePeriod=30 Oct 01 15:55:45 crc kubenswrapper[4869]: I1001 15:55:45.735342 4869 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/manila-share-share1-0" podUID="d413715e-3ed1-4ea2-8b2f-0a56e1c2677e" containerName="probe" containerID="cri-o://605a92c850e67049b917baa78be2cf05b4219bf2115ce553d543d6d7172ff3c3" gracePeriod=30 Oct 01 15:55:45 crc kubenswrapper[4869]: I1001 15:55:45.783903 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/manila-scheduler-0"] Oct 01 15:55:45 crc kubenswrapper[4869]: I1001 15:55:45.784174 4869 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/manila-scheduler-0" podUID="ad1db0ac-1359-46bf-b6d6-6ae089968105" containerName="manila-scheduler" containerID="cri-o://f18a2f9d660ec959412c7c816017b9531ca71df61884dd1ff143b7052b966d11" gracePeriod=30 Oct 01 15:55:45 crc kubenswrapper[4869]: I1001 15:55:45.785732 4869 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/manila-scheduler-0" podUID="ad1db0ac-1359-46bf-b6d6-6ae089968105" containerName="probe" containerID="cri-o://39986cca38322ac80b9db03b4ed3e3038dcf7b530a41087e87f2870438450193" gracePeriod=30 Oct 01 15:55:46 crc kubenswrapper[4869]: I1001 15:55:46.740223 4869 generic.go:334] "Generic (PLEG): container finished" podID="ad1db0ac-1359-46bf-b6d6-6ae089968105" containerID="39986cca38322ac80b9db03b4ed3e3038dcf7b530a41087e87f2870438450193" exitCode=0 Oct 01 15:55:46 crc kubenswrapper[4869]: I1001 15:55:46.741059 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-scheduler-0" event={"ID":"ad1db0ac-1359-46bf-b6d6-6ae089968105","Type":"ContainerDied","Data":"39986cca38322ac80b9db03b4ed3e3038dcf7b530a41087e87f2870438450193"} Oct 01 15:55:46 crc kubenswrapper[4869]: I1001 15:55:46.747608 4869 generic.go:334] "Generic (PLEG): container finished" podID="d413715e-3ed1-4ea2-8b2f-0a56e1c2677e" containerID="605a92c850e67049b917baa78be2cf05b4219bf2115ce553d543d6d7172ff3c3" exitCode=0 Oct 01 15:55:46 crc kubenswrapper[4869]: I1001 15:55:46.747636 4869 generic.go:334] "Generic (PLEG): container finished" podID="d413715e-3ed1-4ea2-8b2f-0a56e1c2677e" containerID="f111438f80d0f5055529c6244880ac9ee8a04edffa8c2d0c1641a89a1b5e6d53" exitCode=1 Oct 01 15:55:46 crc kubenswrapper[4869]: I1001 15:55:46.747654 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-share-share1-0" event={"ID":"d413715e-3ed1-4ea2-8b2f-0a56e1c2677e","Type":"ContainerDied","Data":"605a92c850e67049b917baa78be2cf05b4219bf2115ce553d543d6d7172ff3c3"} Oct 01 15:55:46 crc kubenswrapper[4869]: I1001 15:55:46.747678 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-share-share1-0" event={"ID":"d413715e-3ed1-4ea2-8b2f-0a56e1c2677e","Type":"ContainerDied","Data":"f111438f80d0f5055529c6244880ac9ee8a04edffa8c2d0c1641a89a1b5e6d53"} Oct 01 15:55:46 crc kubenswrapper[4869]: I1001 15:55:46.747690 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-share-share1-0" event={"ID":"d413715e-3ed1-4ea2-8b2f-0a56e1c2677e","Type":"ContainerDied","Data":"77f9257542da8ef5a245ce388d7052a4067e152c3ec30184f9195fe99665e1fe"} Oct 01 15:55:46 crc kubenswrapper[4869]: I1001 15:55:46.747699 4869 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="77f9257542da8ef5a245ce388d7052a4067e152c3ec30184f9195fe99665e1fe" Oct 01 15:55:46 crc kubenswrapper[4869]: I1001 15:55:46.851473 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/manila-share-share1-0" Oct 01 15:55:47 crc kubenswrapper[4869]: I1001 15:55:47.000001 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-lib-manila\" (UniqueName: \"kubernetes.io/host-path/d413715e-3ed1-4ea2-8b2f-0a56e1c2677e-var-lib-manila\") pod \"d413715e-3ed1-4ea2-8b2f-0a56e1c2677e\" (UID: \"d413715e-3ed1-4ea2-8b2f-0a56e1c2677e\") " Oct 01 15:55:47 crc kubenswrapper[4869]: I1001 15:55:47.000146 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/d413715e-3ed1-4ea2-8b2f-0a56e1c2677e-var-lib-manila" (OuterVolumeSpecName: "var-lib-manila") pod "d413715e-3ed1-4ea2-8b2f-0a56e1c2677e" (UID: "d413715e-3ed1-4ea2-8b2f-0a56e1c2677e"). InnerVolumeSpecName "var-lib-manila". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 01 15:55:47 crc kubenswrapper[4869]: I1001 15:55:47.000280 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/d413715e-3ed1-4ea2-8b2f-0a56e1c2677e-ceph\") pod \"d413715e-3ed1-4ea2-8b2f-0a56e1c2677e\" (UID: \"d413715e-3ed1-4ea2-8b2f-0a56e1c2677e\") " Oct 01 15:55:47 crc kubenswrapper[4869]: I1001 15:55:47.000395 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d413715e-3ed1-4ea2-8b2f-0a56e1c2677e-scripts\") pod \"d413715e-3ed1-4ea2-8b2f-0a56e1c2677e\" (UID: \"d413715e-3ed1-4ea2-8b2f-0a56e1c2677e\") " Oct 01 15:55:47 crc kubenswrapper[4869]: I1001 15:55:47.000432 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/d413715e-3ed1-4ea2-8b2f-0a56e1c2677e-etc-machine-id\") pod \"d413715e-3ed1-4ea2-8b2f-0a56e1c2677e\" (UID: \"d413715e-3ed1-4ea2-8b2f-0a56e1c2677e\") " Oct 01 15:55:47 crc kubenswrapper[4869]: I1001 15:55:47.000597 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/d413715e-3ed1-4ea2-8b2f-0a56e1c2677e-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "d413715e-3ed1-4ea2-8b2f-0a56e1c2677e" (UID: "d413715e-3ed1-4ea2-8b2f-0a56e1c2677e"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 01 15:55:47 crc kubenswrapper[4869]: I1001 15:55:47.000635 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d413715e-3ed1-4ea2-8b2f-0a56e1c2677e-config-data\") pod \"d413715e-3ed1-4ea2-8b2f-0a56e1c2677e\" (UID: \"d413715e-3ed1-4ea2-8b2f-0a56e1c2677e\") " Oct 01 15:55:47 crc kubenswrapper[4869]: I1001 15:55:47.000684 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/d413715e-3ed1-4ea2-8b2f-0a56e1c2677e-config-data-custom\") pod \"d413715e-3ed1-4ea2-8b2f-0a56e1c2677e\" (UID: \"d413715e-3ed1-4ea2-8b2f-0a56e1c2677e\") " Oct 01 15:55:47 crc kubenswrapper[4869]: I1001 15:55:47.000709 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d413715e-3ed1-4ea2-8b2f-0a56e1c2677e-combined-ca-bundle\") pod \"d413715e-3ed1-4ea2-8b2f-0a56e1c2677e\" (UID: \"d413715e-3ed1-4ea2-8b2f-0a56e1c2677e\") " Oct 01 15:55:47 crc kubenswrapper[4869]: I1001 15:55:47.000741 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-j64zd\" (UniqueName: \"kubernetes.io/projected/d413715e-3ed1-4ea2-8b2f-0a56e1c2677e-kube-api-access-j64zd\") pod \"d413715e-3ed1-4ea2-8b2f-0a56e1c2677e\" (UID: \"d413715e-3ed1-4ea2-8b2f-0a56e1c2677e\") " Oct 01 15:55:47 crc kubenswrapper[4869]: I1001 15:55:47.001556 4869 reconciler_common.go:293] "Volume detached for volume \"var-lib-manila\" (UniqueName: \"kubernetes.io/host-path/d413715e-3ed1-4ea2-8b2f-0a56e1c2677e-var-lib-manila\") on node \"crc\" DevicePath \"\"" Oct 01 15:55:47 crc kubenswrapper[4869]: I1001 15:55:47.001576 4869 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/d413715e-3ed1-4ea2-8b2f-0a56e1c2677e-etc-machine-id\") on node \"crc\" DevicePath \"\"" Oct 01 15:55:47 crc kubenswrapper[4869]: I1001 15:55:47.006370 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d413715e-3ed1-4ea2-8b2f-0a56e1c2677e-ceph" (OuterVolumeSpecName: "ceph") pod "d413715e-3ed1-4ea2-8b2f-0a56e1c2677e" (UID: "d413715e-3ed1-4ea2-8b2f-0a56e1c2677e"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 15:55:47 crc kubenswrapper[4869]: I1001 15:55:47.006537 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d413715e-3ed1-4ea2-8b2f-0a56e1c2677e-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "d413715e-3ed1-4ea2-8b2f-0a56e1c2677e" (UID: "d413715e-3ed1-4ea2-8b2f-0a56e1c2677e"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 15:55:47 crc kubenswrapper[4869]: I1001 15:55:47.006575 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d413715e-3ed1-4ea2-8b2f-0a56e1c2677e-scripts" (OuterVolumeSpecName: "scripts") pod "d413715e-3ed1-4ea2-8b2f-0a56e1c2677e" (UID: "d413715e-3ed1-4ea2-8b2f-0a56e1c2677e"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 15:55:47 crc kubenswrapper[4869]: I1001 15:55:47.007423 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d413715e-3ed1-4ea2-8b2f-0a56e1c2677e-kube-api-access-j64zd" (OuterVolumeSpecName: "kube-api-access-j64zd") pod "d413715e-3ed1-4ea2-8b2f-0a56e1c2677e" (UID: "d413715e-3ed1-4ea2-8b2f-0a56e1c2677e"). InnerVolumeSpecName "kube-api-access-j64zd". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 15:55:47 crc kubenswrapper[4869]: I1001 15:55:47.047690 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d413715e-3ed1-4ea2-8b2f-0a56e1c2677e-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "d413715e-3ed1-4ea2-8b2f-0a56e1c2677e" (UID: "d413715e-3ed1-4ea2-8b2f-0a56e1c2677e"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 15:55:47 crc kubenswrapper[4869]: I1001 15:55:47.103225 4869 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/d413715e-3ed1-4ea2-8b2f-0a56e1c2677e-config-data-custom\") on node \"crc\" DevicePath \"\"" Oct 01 15:55:47 crc kubenswrapper[4869]: I1001 15:55:47.103505 4869 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d413715e-3ed1-4ea2-8b2f-0a56e1c2677e-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 01 15:55:47 crc kubenswrapper[4869]: I1001 15:55:47.103578 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-j64zd\" (UniqueName: \"kubernetes.io/projected/d413715e-3ed1-4ea2-8b2f-0a56e1c2677e-kube-api-access-j64zd\") on node \"crc\" DevicePath \"\"" Oct 01 15:55:47 crc kubenswrapper[4869]: I1001 15:55:47.103652 4869 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/d413715e-3ed1-4ea2-8b2f-0a56e1c2677e-ceph\") on node \"crc\" DevicePath \"\"" Oct 01 15:55:47 crc kubenswrapper[4869]: I1001 15:55:47.103712 4869 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d413715e-3ed1-4ea2-8b2f-0a56e1c2677e-scripts\") on node \"crc\" DevicePath \"\"" Oct 01 15:55:47 crc kubenswrapper[4869]: I1001 15:55:47.112608 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d413715e-3ed1-4ea2-8b2f-0a56e1c2677e-config-data" (OuterVolumeSpecName: "config-data") pod "d413715e-3ed1-4ea2-8b2f-0a56e1c2677e" (UID: "d413715e-3ed1-4ea2-8b2f-0a56e1c2677e"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 15:55:47 crc kubenswrapper[4869]: I1001 15:55:47.205009 4869 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d413715e-3ed1-4ea2-8b2f-0a56e1c2677e-config-data\") on node \"crc\" DevicePath \"\"" Oct 01 15:55:47 crc kubenswrapper[4869]: I1001 15:55:47.762014 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"0458e249-c518-4c3a-83d7-dda2beb25763","Type":"ContainerStarted","Data":"6a74aa341eb4e59168c9ec536c2b7e29d3de8596dc7206415a27b5e19cfe65e3"} Oct 01 15:55:47 crc kubenswrapper[4869]: I1001 15:55:47.762230 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/manila-share-share1-0" Oct 01 15:55:47 crc kubenswrapper[4869]: I1001 15:55:47.762343 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Oct 01 15:55:47 crc kubenswrapper[4869]: I1001 15:55:47.808698 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=2.681844088 podStartE2EDuration="6.808677499s" podCreationTimestamp="2025-10-01 15:55:41 +0000 UTC" firstStartedPulling="2025-10-01 15:55:42.574669676 +0000 UTC m=+3051.721512822" lastFinishedPulling="2025-10-01 15:55:46.701503117 +0000 UTC m=+3055.848346233" observedRunningTime="2025-10-01 15:55:47.804478333 +0000 UTC m=+3056.951321479" watchObservedRunningTime="2025-10-01 15:55:47.808677499 +0000 UTC m=+3056.955520625" Oct 01 15:55:47 crc kubenswrapper[4869]: I1001 15:55:47.843613 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/manila-share-share1-0"] Oct 01 15:55:47 crc kubenswrapper[4869]: I1001 15:55:47.855954 4869 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/manila-share-share1-0"] Oct 01 15:55:47 crc kubenswrapper[4869]: I1001 15:55:47.864840 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/manila-share-share1-0"] Oct 01 15:55:47 crc kubenswrapper[4869]: E1001 15:55:47.865311 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d413715e-3ed1-4ea2-8b2f-0a56e1c2677e" containerName="manila-share" Oct 01 15:55:47 crc kubenswrapper[4869]: I1001 15:55:47.865335 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="d413715e-3ed1-4ea2-8b2f-0a56e1c2677e" containerName="manila-share" Oct 01 15:55:47 crc kubenswrapper[4869]: E1001 15:55:47.865380 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d413715e-3ed1-4ea2-8b2f-0a56e1c2677e" containerName="probe" Oct 01 15:55:47 crc kubenswrapper[4869]: I1001 15:55:47.865390 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="d413715e-3ed1-4ea2-8b2f-0a56e1c2677e" containerName="probe" Oct 01 15:55:47 crc kubenswrapper[4869]: I1001 15:55:47.865609 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="d413715e-3ed1-4ea2-8b2f-0a56e1c2677e" containerName="probe" Oct 01 15:55:47 crc kubenswrapper[4869]: I1001 15:55:47.865642 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="d413715e-3ed1-4ea2-8b2f-0a56e1c2677e" containerName="manila-share" Oct 01 15:55:47 crc kubenswrapper[4869]: I1001 15:55:47.868499 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/manila-share-share1-0" Oct 01 15:55:47 crc kubenswrapper[4869]: I1001 15:55:47.872043 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"manila-share-share1-config-data" Oct 01 15:55:47 crc kubenswrapper[4869]: I1001 15:55:47.878715 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/manila-share-share1-0"] Oct 01 15:55:48 crc kubenswrapper[4869]: I1001 15:55:48.041812 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/d86b56fc-3eca-4fd9-9abb-d6831a9d12db-config-data-custom\") pod \"manila-share-share1-0\" (UID: \"d86b56fc-3eca-4fd9-9abb-d6831a9d12db\") " pod="openstack/manila-share-share1-0" Oct 01 15:55:48 crc kubenswrapper[4869]: I1001 15:55:48.042360 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-manila\" (UniqueName: \"kubernetes.io/host-path/d86b56fc-3eca-4fd9-9abb-d6831a9d12db-var-lib-manila\") pod \"manila-share-share1-0\" (UID: \"d86b56fc-3eca-4fd9-9abb-d6831a9d12db\") " pod="openstack/manila-share-share1-0" Oct 01 15:55:48 crc kubenswrapper[4869]: I1001 15:55:48.042422 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d86b56fc-3eca-4fd9-9abb-d6831a9d12db-combined-ca-bundle\") pod \"manila-share-share1-0\" (UID: \"d86b56fc-3eca-4fd9-9abb-d6831a9d12db\") " pod="openstack/manila-share-share1-0" Oct 01 15:55:48 crc kubenswrapper[4869]: I1001 15:55:48.042460 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d86b56fc-3eca-4fd9-9abb-d6831a9d12db-config-data\") pod \"manila-share-share1-0\" (UID: \"d86b56fc-3eca-4fd9-9abb-d6831a9d12db\") " pod="openstack/manila-share-share1-0" Oct 01 15:55:48 crc kubenswrapper[4869]: I1001 15:55:48.042484 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/d86b56fc-3eca-4fd9-9abb-d6831a9d12db-ceph\") pod \"manila-share-share1-0\" (UID: \"d86b56fc-3eca-4fd9-9abb-d6831a9d12db\") " pod="openstack/manila-share-share1-0" Oct 01 15:55:48 crc kubenswrapper[4869]: I1001 15:55:48.042557 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d86b56fc-3eca-4fd9-9abb-d6831a9d12db-scripts\") pod \"manila-share-share1-0\" (UID: \"d86b56fc-3eca-4fd9-9abb-d6831a9d12db\") " pod="openstack/manila-share-share1-0" Oct 01 15:55:48 crc kubenswrapper[4869]: I1001 15:55:48.042641 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-89m24\" (UniqueName: \"kubernetes.io/projected/d86b56fc-3eca-4fd9-9abb-d6831a9d12db-kube-api-access-89m24\") pod \"manila-share-share1-0\" (UID: \"d86b56fc-3eca-4fd9-9abb-d6831a9d12db\") " pod="openstack/manila-share-share1-0" Oct 01 15:55:48 crc kubenswrapper[4869]: I1001 15:55:48.042937 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/d86b56fc-3eca-4fd9-9abb-d6831a9d12db-etc-machine-id\") pod \"manila-share-share1-0\" (UID: \"d86b56fc-3eca-4fd9-9abb-d6831a9d12db\") " pod="openstack/manila-share-share1-0" Oct 01 15:55:48 crc kubenswrapper[4869]: I1001 15:55:48.145035 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/d86b56fc-3eca-4fd9-9abb-d6831a9d12db-etc-machine-id\") pod \"manila-share-share1-0\" (UID: \"d86b56fc-3eca-4fd9-9abb-d6831a9d12db\") " pod="openstack/manila-share-share1-0" Oct 01 15:55:48 crc kubenswrapper[4869]: I1001 15:55:48.145115 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/d86b56fc-3eca-4fd9-9abb-d6831a9d12db-config-data-custom\") pod \"manila-share-share1-0\" (UID: \"d86b56fc-3eca-4fd9-9abb-d6831a9d12db\") " pod="openstack/manila-share-share1-0" Oct 01 15:55:48 crc kubenswrapper[4869]: I1001 15:55:48.145175 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-manila\" (UniqueName: \"kubernetes.io/host-path/d86b56fc-3eca-4fd9-9abb-d6831a9d12db-var-lib-manila\") pod \"manila-share-share1-0\" (UID: \"d86b56fc-3eca-4fd9-9abb-d6831a9d12db\") " pod="openstack/manila-share-share1-0" Oct 01 15:55:48 crc kubenswrapper[4869]: I1001 15:55:48.145208 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/d86b56fc-3eca-4fd9-9abb-d6831a9d12db-etc-machine-id\") pod \"manila-share-share1-0\" (UID: \"d86b56fc-3eca-4fd9-9abb-d6831a9d12db\") " pod="openstack/manila-share-share1-0" Oct 01 15:55:48 crc kubenswrapper[4869]: I1001 15:55:48.145588 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-manila\" (UniqueName: \"kubernetes.io/host-path/d86b56fc-3eca-4fd9-9abb-d6831a9d12db-var-lib-manila\") pod \"manila-share-share1-0\" (UID: \"d86b56fc-3eca-4fd9-9abb-d6831a9d12db\") " pod="openstack/manila-share-share1-0" Oct 01 15:55:48 crc kubenswrapper[4869]: I1001 15:55:48.145219 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d86b56fc-3eca-4fd9-9abb-d6831a9d12db-combined-ca-bundle\") pod \"manila-share-share1-0\" (UID: \"d86b56fc-3eca-4fd9-9abb-d6831a9d12db\") " pod="openstack/manila-share-share1-0" Oct 01 15:55:48 crc kubenswrapper[4869]: I1001 15:55:48.145968 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d86b56fc-3eca-4fd9-9abb-d6831a9d12db-config-data\") pod \"manila-share-share1-0\" (UID: \"d86b56fc-3eca-4fd9-9abb-d6831a9d12db\") " pod="openstack/manila-share-share1-0" Oct 01 15:55:48 crc kubenswrapper[4869]: I1001 15:55:48.145997 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/d86b56fc-3eca-4fd9-9abb-d6831a9d12db-ceph\") pod \"manila-share-share1-0\" (UID: \"d86b56fc-3eca-4fd9-9abb-d6831a9d12db\") " pod="openstack/manila-share-share1-0" Oct 01 15:55:48 crc kubenswrapper[4869]: I1001 15:55:48.146037 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d86b56fc-3eca-4fd9-9abb-d6831a9d12db-scripts\") pod \"manila-share-share1-0\" (UID: \"d86b56fc-3eca-4fd9-9abb-d6831a9d12db\") " pod="openstack/manila-share-share1-0" Oct 01 15:55:48 crc kubenswrapper[4869]: I1001 15:55:48.146069 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-89m24\" (UniqueName: \"kubernetes.io/projected/d86b56fc-3eca-4fd9-9abb-d6831a9d12db-kube-api-access-89m24\") pod \"manila-share-share1-0\" (UID: \"d86b56fc-3eca-4fd9-9abb-d6831a9d12db\") " pod="openstack/manila-share-share1-0" Oct 01 15:55:48 crc kubenswrapper[4869]: I1001 15:55:48.156146 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d86b56fc-3eca-4fd9-9abb-d6831a9d12db-scripts\") pod \"manila-share-share1-0\" (UID: \"d86b56fc-3eca-4fd9-9abb-d6831a9d12db\") " pod="openstack/manila-share-share1-0" Oct 01 15:55:48 crc kubenswrapper[4869]: I1001 15:55:48.156587 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/d86b56fc-3eca-4fd9-9abb-d6831a9d12db-ceph\") pod \"manila-share-share1-0\" (UID: \"d86b56fc-3eca-4fd9-9abb-d6831a9d12db\") " pod="openstack/manila-share-share1-0" Oct 01 15:55:48 crc kubenswrapper[4869]: I1001 15:55:48.156608 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d86b56fc-3eca-4fd9-9abb-d6831a9d12db-config-data\") pod \"manila-share-share1-0\" (UID: \"d86b56fc-3eca-4fd9-9abb-d6831a9d12db\") " pod="openstack/manila-share-share1-0" Oct 01 15:55:48 crc kubenswrapper[4869]: I1001 15:55:48.156788 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d86b56fc-3eca-4fd9-9abb-d6831a9d12db-combined-ca-bundle\") pod \"manila-share-share1-0\" (UID: \"d86b56fc-3eca-4fd9-9abb-d6831a9d12db\") " pod="openstack/manila-share-share1-0" Oct 01 15:55:48 crc kubenswrapper[4869]: I1001 15:55:48.160483 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/d86b56fc-3eca-4fd9-9abb-d6831a9d12db-config-data-custom\") pod \"manila-share-share1-0\" (UID: \"d86b56fc-3eca-4fd9-9abb-d6831a9d12db\") " pod="openstack/manila-share-share1-0" Oct 01 15:55:48 crc kubenswrapper[4869]: I1001 15:55:48.165361 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-89m24\" (UniqueName: \"kubernetes.io/projected/d86b56fc-3eca-4fd9-9abb-d6831a9d12db-kube-api-access-89m24\") pod \"manila-share-share1-0\" (UID: \"d86b56fc-3eca-4fd9-9abb-d6831a9d12db\") " pod="openstack/manila-share-share1-0" Oct 01 15:55:48 crc kubenswrapper[4869]: I1001 15:55:48.273089 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/manila-share-share1-0" Oct 01 15:55:48 crc kubenswrapper[4869]: I1001 15:55:48.309150 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/manila-scheduler-0" Oct 01 15:55:48 crc kubenswrapper[4869]: I1001 15:55:48.452645 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/ad1db0ac-1359-46bf-b6d6-6ae089968105-config-data-custom\") pod \"ad1db0ac-1359-46bf-b6d6-6ae089968105\" (UID: \"ad1db0ac-1359-46bf-b6d6-6ae089968105\") " Oct 01 15:55:48 crc kubenswrapper[4869]: I1001 15:55:48.453195 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kk9hz\" (UniqueName: \"kubernetes.io/projected/ad1db0ac-1359-46bf-b6d6-6ae089968105-kube-api-access-kk9hz\") pod \"ad1db0ac-1359-46bf-b6d6-6ae089968105\" (UID: \"ad1db0ac-1359-46bf-b6d6-6ae089968105\") " Oct 01 15:55:48 crc kubenswrapper[4869]: I1001 15:55:48.453268 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ad1db0ac-1359-46bf-b6d6-6ae089968105-scripts\") pod \"ad1db0ac-1359-46bf-b6d6-6ae089968105\" (UID: \"ad1db0ac-1359-46bf-b6d6-6ae089968105\") " Oct 01 15:55:48 crc kubenswrapper[4869]: I1001 15:55:48.453349 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ad1db0ac-1359-46bf-b6d6-6ae089968105-combined-ca-bundle\") pod \"ad1db0ac-1359-46bf-b6d6-6ae089968105\" (UID: \"ad1db0ac-1359-46bf-b6d6-6ae089968105\") " Oct 01 15:55:48 crc kubenswrapper[4869]: I1001 15:55:48.453481 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ad1db0ac-1359-46bf-b6d6-6ae089968105-config-data\") pod \"ad1db0ac-1359-46bf-b6d6-6ae089968105\" (UID: \"ad1db0ac-1359-46bf-b6d6-6ae089968105\") " Oct 01 15:55:48 crc kubenswrapper[4869]: I1001 15:55:48.453576 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/ad1db0ac-1359-46bf-b6d6-6ae089968105-etc-machine-id\") pod \"ad1db0ac-1359-46bf-b6d6-6ae089968105\" (UID: \"ad1db0ac-1359-46bf-b6d6-6ae089968105\") " Oct 01 15:55:48 crc kubenswrapper[4869]: I1001 15:55:48.453949 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/ad1db0ac-1359-46bf-b6d6-6ae089968105-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "ad1db0ac-1359-46bf-b6d6-6ae089968105" (UID: "ad1db0ac-1359-46bf-b6d6-6ae089968105"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 01 15:55:48 crc kubenswrapper[4869]: I1001 15:55:48.454479 4869 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/ad1db0ac-1359-46bf-b6d6-6ae089968105-etc-machine-id\") on node \"crc\" DevicePath \"\"" Oct 01 15:55:48 crc kubenswrapper[4869]: I1001 15:55:48.474214 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ad1db0ac-1359-46bf-b6d6-6ae089968105-kube-api-access-kk9hz" (OuterVolumeSpecName: "kube-api-access-kk9hz") pod "ad1db0ac-1359-46bf-b6d6-6ae089968105" (UID: "ad1db0ac-1359-46bf-b6d6-6ae089968105"). InnerVolumeSpecName "kube-api-access-kk9hz". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 15:55:48 crc kubenswrapper[4869]: I1001 15:55:48.478582 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ad1db0ac-1359-46bf-b6d6-6ae089968105-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "ad1db0ac-1359-46bf-b6d6-6ae089968105" (UID: "ad1db0ac-1359-46bf-b6d6-6ae089968105"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 15:55:48 crc kubenswrapper[4869]: I1001 15:55:48.490495 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ad1db0ac-1359-46bf-b6d6-6ae089968105-scripts" (OuterVolumeSpecName: "scripts") pod "ad1db0ac-1359-46bf-b6d6-6ae089968105" (UID: "ad1db0ac-1359-46bf-b6d6-6ae089968105"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 15:55:48 crc kubenswrapper[4869]: I1001 15:55:48.523411 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ad1db0ac-1359-46bf-b6d6-6ae089968105-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "ad1db0ac-1359-46bf-b6d6-6ae089968105" (UID: "ad1db0ac-1359-46bf-b6d6-6ae089968105"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 15:55:48 crc kubenswrapper[4869]: I1001 15:55:48.556393 4869 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/ad1db0ac-1359-46bf-b6d6-6ae089968105-config-data-custom\") on node \"crc\" DevicePath \"\"" Oct 01 15:55:48 crc kubenswrapper[4869]: I1001 15:55:48.556430 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kk9hz\" (UniqueName: \"kubernetes.io/projected/ad1db0ac-1359-46bf-b6d6-6ae089968105-kube-api-access-kk9hz\") on node \"crc\" DevicePath \"\"" Oct 01 15:55:48 crc kubenswrapper[4869]: I1001 15:55:48.556443 4869 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ad1db0ac-1359-46bf-b6d6-6ae089968105-scripts\") on node \"crc\" DevicePath \"\"" Oct 01 15:55:48 crc kubenswrapper[4869]: I1001 15:55:48.556450 4869 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ad1db0ac-1359-46bf-b6d6-6ae089968105-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 01 15:55:48 crc kubenswrapper[4869]: I1001 15:55:48.569631 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ad1db0ac-1359-46bf-b6d6-6ae089968105-config-data" (OuterVolumeSpecName: "config-data") pod "ad1db0ac-1359-46bf-b6d6-6ae089968105" (UID: "ad1db0ac-1359-46bf-b6d6-6ae089968105"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 15:55:48 crc kubenswrapper[4869]: I1001 15:55:48.658300 4869 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ad1db0ac-1359-46bf-b6d6-6ae089968105-config-data\") on node \"crc\" DevicePath \"\"" Oct 01 15:55:48 crc kubenswrapper[4869]: I1001 15:55:48.773845 4869 generic.go:334] "Generic (PLEG): container finished" podID="ad1db0ac-1359-46bf-b6d6-6ae089968105" containerID="f18a2f9d660ec959412c7c816017b9531ca71df61884dd1ff143b7052b966d11" exitCode=0 Oct 01 15:55:48 crc kubenswrapper[4869]: I1001 15:55:48.773919 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-scheduler-0" event={"ID":"ad1db0ac-1359-46bf-b6d6-6ae089968105","Type":"ContainerDied","Data":"f18a2f9d660ec959412c7c816017b9531ca71df61884dd1ff143b7052b966d11"} Oct 01 15:55:48 crc kubenswrapper[4869]: I1001 15:55:48.773952 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-scheduler-0" event={"ID":"ad1db0ac-1359-46bf-b6d6-6ae089968105","Type":"ContainerDied","Data":"f3dad3a91977a40d1c6447eaa9c769e9cce3747dd6079e062fbbd0c6e8d50521"} Oct 01 15:55:48 crc kubenswrapper[4869]: I1001 15:55:48.773950 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/manila-scheduler-0" Oct 01 15:55:48 crc kubenswrapper[4869]: I1001 15:55:48.773968 4869 scope.go:117] "RemoveContainer" containerID="39986cca38322ac80b9db03b4ed3e3038dcf7b530a41087e87f2870438450193" Oct 01 15:55:48 crc kubenswrapper[4869]: I1001 15:55:48.810333 4869 scope.go:117] "RemoveContainer" containerID="f18a2f9d660ec959412c7c816017b9531ca71df61884dd1ff143b7052b966d11" Oct 01 15:55:48 crc kubenswrapper[4869]: I1001 15:55:48.823234 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/manila-scheduler-0"] Oct 01 15:55:48 crc kubenswrapper[4869]: I1001 15:55:48.840116 4869 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/manila-scheduler-0"] Oct 01 15:55:48 crc kubenswrapper[4869]: I1001 15:55:48.842951 4869 scope.go:117] "RemoveContainer" containerID="39986cca38322ac80b9db03b4ed3e3038dcf7b530a41087e87f2870438450193" Oct 01 15:55:48 crc kubenswrapper[4869]: E1001 15:55:48.843632 4869 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"39986cca38322ac80b9db03b4ed3e3038dcf7b530a41087e87f2870438450193\": container with ID starting with 39986cca38322ac80b9db03b4ed3e3038dcf7b530a41087e87f2870438450193 not found: ID does not exist" containerID="39986cca38322ac80b9db03b4ed3e3038dcf7b530a41087e87f2870438450193" Oct 01 15:55:48 crc kubenswrapper[4869]: I1001 15:55:48.843663 4869 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"39986cca38322ac80b9db03b4ed3e3038dcf7b530a41087e87f2870438450193"} err="failed to get container status \"39986cca38322ac80b9db03b4ed3e3038dcf7b530a41087e87f2870438450193\": rpc error: code = NotFound desc = could not find container \"39986cca38322ac80b9db03b4ed3e3038dcf7b530a41087e87f2870438450193\": container with ID starting with 39986cca38322ac80b9db03b4ed3e3038dcf7b530a41087e87f2870438450193 not found: ID does not exist" Oct 01 15:55:48 crc kubenswrapper[4869]: I1001 15:55:48.843687 4869 scope.go:117] "RemoveContainer" containerID="f18a2f9d660ec959412c7c816017b9531ca71df61884dd1ff143b7052b966d11" Oct 01 15:55:48 crc kubenswrapper[4869]: E1001 15:55:48.843982 4869 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f18a2f9d660ec959412c7c816017b9531ca71df61884dd1ff143b7052b966d11\": container with ID starting with f18a2f9d660ec959412c7c816017b9531ca71df61884dd1ff143b7052b966d11 not found: ID does not exist" containerID="f18a2f9d660ec959412c7c816017b9531ca71df61884dd1ff143b7052b966d11" Oct 01 15:55:48 crc kubenswrapper[4869]: I1001 15:55:48.844018 4869 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f18a2f9d660ec959412c7c816017b9531ca71df61884dd1ff143b7052b966d11"} err="failed to get container status \"f18a2f9d660ec959412c7c816017b9531ca71df61884dd1ff143b7052b966d11\": rpc error: code = NotFound desc = could not find container \"f18a2f9d660ec959412c7c816017b9531ca71df61884dd1ff143b7052b966d11\": container with ID starting with f18a2f9d660ec959412c7c816017b9531ca71df61884dd1ff143b7052b966d11 not found: ID does not exist" Oct 01 15:55:48 crc kubenswrapper[4869]: I1001 15:55:48.862121 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/manila-scheduler-0"] Oct 01 15:55:48 crc kubenswrapper[4869]: E1001 15:55:48.862769 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ad1db0ac-1359-46bf-b6d6-6ae089968105" containerName="manila-scheduler" Oct 01 15:55:48 crc kubenswrapper[4869]: I1001 15:55:48.862788 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="ad1db0ac-1359-46bf-b6d6-6ae089968105" containerName="manila-scheduler" Oct 01 15:55:48 crc kubenswrapper[4869]: E1001 15:55:48.862830 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ad1db0ac-1359-46bf-b6d6-6ae089968105" containerName="probe" Oct 01 15:55:48 crc kubenswrapper[4869]: I1001 15:55:48.862836 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="ad1db0ac-1359-46bf-b6d6-6ae089968105" containerName="probe" Oct 01 15:55:48 crc kubenswrapper[4869]: I1001 15:55:48.863010 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="ad1db0ac-1359-46bf-b6d6-6ae089968105" containerName="probe" Oct 01 15:55:48 crc kubenswrapper[4869]: I1001 15:55:48.863028 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="ad1db0ac-1359-46bf-b6d6-6ae089968105" containerName="manila-scheduler" Oct 01 15:55:48 crc kubenswrapper[4869]: I1001 15:55:48.864932 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/manila-scheduler-0" Oct 01 15:55:48 crc kubenswrapper[4869]: I1001 15:55:48.867090 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"manila-scheduler-config-data" Oct 01 15:55:48 crc kubenswrapper[4869]: I1001 15:55:48.895407 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/manila-scheduler-0"] Oct 01 15:55:48 crc kubenswrapper[4869]: I1001 15:55:48.909435 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/manila-share-share1-0"] Oct 01 15:55:48 crc kubenswrapper[4869]: I1001 15:55:48.965074 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/ae87b093-3a64-4d75-9b85-45fed188f715-config-data-custom\") pod \"manila-scheduler-0\" (UID: \"ae87b093-3a64-4d75-9b85-45fed188f715\") " pod="openstack/manila-scheduler-0" Oct 01 15:55:48 crc kubenswrapper[4869]: I1001 15:55:48.965181 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ae87b093-3a64-4d75-9b85-45fed188f715-scripts\") pod \"manila-scheduler-0\" (UID: \"ae87b093-3a64-4d75-9b85-45fed188f715\") " pod="openstack/manila-scheduler-0" Oct 01 15:55:48 crc kubenswrapper[4869]: I1001 15:55:48.965217 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ae87b093-3a64-4d75-9b85-45fed188f715-combined-ca-bundle\") pod \"manila-scheduler-0\" (UID: \"ae87b093-3a64-4d75-9b85-45fed188f715\") " pod="openstack/manila-scheduler-0" Oct 01 15:55:48 crc kubenswrapper[4869]: I1001 15:55:48.965281 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/ae87b093-3a64-4d75-9b85-45fed188f715-etc-machine-id\") pod \"manila-scheduler-0\" (UID: \"ae87b093-3a64-4d75-9b85-45fed188f715\") " pod="openstack/manila-scheduler-0" Oct 01 15:55:48 crc kubenswrapper[4869]: I1001 15:55:48.965304 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ae87b093-3a64-4d75-9b85-45fed188f715-config-data\") pod \"manila-scheduler-0\" (UID: \"ae87b093-3a64-4d75-9b85-45fed188f715\") " pod="openstack/manila-scheduler-0" Oct 01 15:55:48 crc kubenswrapper[4869]: I1001 15:55:48.965368 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-v8v8z\" (UniqueName: \"kubernetes.io/projected/ae87b093-3a64-4d75-9b85-45fed188f715-kube-api-access-v8v8z\") pod \"manila-scheduler-0\" (UID: \"ae87b093-3a64-4d75-9b85-45fed188f715\") " pod="openstack/manila-scheduler-0" Oct 01 15:55:49 crc kubenswrapper[4869]: I1001 15:55:49.067662 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-v8v8z\" (UniqueName: \"kubernetes.io/projected/ae87b093-3a64-4d75-9b85-45fed188f715-kube-api-access-v8v8z\") pod \"manila-scheduler-0\" (UID: \"ae87b093-3a64-4d75-9b85-45fed188f715\") " pod="openstack/manila-scheduler-0" Oct 01 15:55:49 crc kubenswrapper[4869]: I1001 15:55:49.067791 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/ae87b093-3a64-4d75-9b85-45fed188f715-config-data-custom\") pod \"manila-scheduler-0\" (UID: \"ae87b093-3a64-4d75-9b85-45fed188f715\") " pod="openstack/manila-scheduler-0" Oct 01 15:55:49 crc kubenswrapper[4869]: I1001 15:55:49.067868 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ae87b093-3a64-4d75-9b85-45fed188f715-scripts\") pod \"manila-scheduler-0\" (UID: \"ae87b093-3a64-4d75-9b85-45fed188f715\") " pod="openstack/manila-scheduler-0" Oct 01 15:55:49 crc kubenswrapper[4869]: I1001 15:55:49.067895 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ae87b093-3a64-4d75-9b85-45fed188f715-combined-ca-bundle\") pod \"manila-scheduler-0\" (UID: \"ae87b093-3a64-4d75-9b85-45fed188f715\") " pod="openstack/manila-scheduler-0" Oct 01 15:55:49 crc kubenswrapper[4869]: I1001 15:55:49.067942 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/ae87b093-3a64-4d75-9b85-45fed188f715-etc-machine-id\") pod \"manila-scheduler-0\" (UID: \"ae87b093-3a64-4d75-9b85-45fed188f715\") " pod="openstack/manila-scheduler-0" Oct 01 15:55:49 crc kubenswrapper[4869]: I1001 15:55:49.067966 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ae87b093-3a64-4d75-9b85-45fed188f715-config-data\") pod \"manila-scheduler-0\" (UID: \"ae87b093-3a64-4d75-9b85-45fed188f715\") " pod="openstack/manila-scheduler-0" Oct 01 15:55:49 crc kubenswrapper[4869]: I1001 15:55:49.069610 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/ae87b093-3a64-4d75-9b85-45fed188f715-etc-machine-id\") pod \"manila-scheduler-0\" (UID: \"ae87b093-3a64-4d75-9b85-45fed188f715\") " pod="openstack/manila-scheduler-0" Oct 01 15:55:49 crc kubenswrapper[4869]: I1001 15:55:49.072469 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/ae87b093-3a64-4d75-9b85-45fed188f715-config-data-custom\") pod \"manila-scheduler-0\" (UID: \"ae87b093-3a64-4d75-9b85-45fed188f715\") " pod="openstack/manila-scheduler-0" Oct 01 15:55:49 crc kubenswrapper[4869]: I1001 15:55:49.074492 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ae87b093-3a64-4d75-9b85-45fed188f715-scripts\") pod \"manila-scheduler-0\" (UID: \"ae87b093-3a64-4d75-9b85-45fed188f715\") " pod="openstack/manila-scheduler-0" Oct 01 15:55:49 crc kubenswrapper[4869]: I1001 15:55:49.074787 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ae87b093-3a64-4d75-9b85-45fed188f715-combined-ca-bundle\") pod \"manila-scheduler-0\" (UID: \"ae87b093-3a64-4d75-9b85-45fed188f715\") " pod="openstack/manila-scheduler-0" Oct 01 15:55:49 crc kubenswrapper[4869]: I1001 15:55:49.075777 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ae87b093-3a64-4d75-9b85-45fed188f715-config-data\") pod \"manila-scheduler-0\" (UID: \"ae87b093-3a64-4d75-9b85-45fed188f715\") " pod="openstack/manila-scheduler-0" Oct 01 15:55:49 crc kubenswrapper[4869]: I1001 15:55:49.089116 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-v8v8z\" (UniqueName: \"kubernetes.io/projected/ae87b093-3a64-4d75-9b85-45fed188f715-kube-api-access-v8v8z\") pod \"manila-scheduler-0\" (UID: \"ae87b093-3a64-4d75-9b85-45fed188f715\") " pod="openstack/manila-scheduler-0" Oct 01 15:55:49 crc kubenswrapper[4869]: I1001 15:55:49.196773 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/manila-scheduler-0" Oct 01 15:55:49 crc kubenswrapper[4869]: I1001 15:55:49.505719 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/manila-scheduler-0"] Oct 01 15:55:49 crc kubenswrapper[4869]: W1001 15:55:49.551054 4869 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podae87b093_3a64_4d75_9b85_45fed188f715.slice/crio-556f4f9785d537ffef404744b6015f4b47a582a83ab37a549303fc81a3f48870 WatchSource:0}: Error finding container 556f4f9785d537ffef404744b6015f4b47a582a83ab37a549303fc81a3f48870: Status 404 returned error can't find the container with id 556f4f9785d537ffef404744b6015f4b47a582a83ab37a549303fc81a3f48870 Oct 01 15:55:49 crc kubenswrapper[4869]: I1001 15:55:49.592850 4869 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ad1db0ac-1359-46bf-b6d6-6ae089968105" path="/var/lib/kubelet/pods/ad1db0ac-1359-46bf-b6d6-6ae089968105/volumes" Oct 01 15:55:49 crc kubenswrapper[4869]: I1001 15:55:49.593930 4869 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d413715e-3ed1-4ea2-8b2f-0a56e1c2677e" path="/var/lib/kubelet/pods/d413715e-3ed1-4ea2-8b2f-0a56e1c2677e/volumes" Oct 01 15:55:49 crc kubenswrapper[4869]: I1001 15:55:49.791230 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-share-share1-0" event={"ID":"d86b56fc-3eca-4fd9-9abb-d6831a9d12db","Type":"ContainerStarted","Data":"860504b275ecc47032a8e9e6908dbbc270e067255ffc4723f1a51861e43264f4"} Oct 01 15:55:49 crc kubenswrapper[4869]: I1001 15:55:49.791286 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-share-share1-0" event={"ID":"d86b56fc-3eca-4fd9-9abb-d6831a9d12db","Type":"ContainerStarted","Data":"3b74a093b86b0af0604d3b3347bd00590bc56a14fc0981815fa870cbc60dc7d8"} Oct 01 15:55:49 crc kubenswrapper[4869]: I1001 15:55:49.795548 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-scheduler-0" event={"ID":"ae87b093-3a64-4d75-9b85-45fed188f715","Type":"ContainerStarted","Data":"556f4f9785d537ffef404744b6015f4b47a582a83ab37a549303fc81a3f48870"} Oct 01 15:55:50 crc kubenswrapper[4869]: I1001 15:55:50.807684 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-share-share1-0" event={"ID":"d86b56fc-3eca-4fd9-9abb-d6831a9d12db","Type":"ContainerStarted","Data":"82b1e943149fbd32fa775f739bba9f1786965d2e71d13794ffe47c45ca3437a1"} Oct 01 15:55:50 crc kubenswrapper[4869]: I1001 15:55:50.811739 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-scheduler-0" event={"ID":"ae87b093-3a64-4d75-9b85-45fed188f715","Type":"ContainerStarted","Data":"a7a21c6545f680567d3105ff3967c6fa3a06a897129ac83ef873e67760551fe4"} Oct 01 15:55:50 crc kubenswrapper[4869]: I1001 15:55:50.811788 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-scheduler-0" event={"ID":"ae87b093-3a64-4d75-9b85-45fed188f715","Type":"ContainerStarted","Data":"3f3919b67d36a38524c56fa1a0bc55732c2527a68eb0dc12a4f8c0697473a4cb"} Oct 01 15:55:50 crc kubenswrapper[4869]: I1001 15:55:50.843627 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/manila-share-share1-0" podStartSLOduration=3.843605343 podStartE2EDuration="3.843605343s" podCreationTimestamp="2025-10-01 15:55:47 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-01 15:55:50.834995415 +0000 UTC m=+3059.981838531" watchObservedRunningTime="2025-10-01 15:55:50.843605343 +0000 UTC m=+3059.990448479" Oct 01 15:55:50 crc kubenswrapper[4869]: I1001 15:55:50.867698 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/manila-scheduler-0" podStartSLOduration=2.86767473 podStartE2EDuration="2.86767473s" podCreationTimestamp="2025-10-01 15:55:48 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-01 15:55:50.864164801 +0000 UTC m=+3060.011007937" watchObservedRunningTime="2025-10-01 15:55:50.86767473 +0000 UTC m=+3060.014517876" Oct 01 15:55:54 crc kubenswrapper[4869]: I1001 15:55:54.269472 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/manila-api-0" Oct 01 15:55:56 crc kubenswrapper[4869]: I1001 15:55:56.581394 4869 scope.go:117] "RemoveContainer" containerID="ea87da209cc47118cc41bdb3107264d5dd2e07bc832e620553f0ac1be9456693" Oct 01 15:55:56 crc kubenswrapper[4869]: E1001 15:55:56.582423 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-c86m8_openshift-machine-config-operator(a4b64f7f-0b03-4f47-965b-9fde048b735c)\"" pod="openshift-machine-config-operator/machine-config-daemon-c86m8" podUID="a4b64f7f-0b03-4f47-965b-9fde048b735c" Oct 01 15:55:58 crc kubenswrapper[4869]: I1001 15:55:58.274374 4869 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/manila-share-share1-0" Oct 01 15:55:59 crc kubenswrapper[4869]: I1001 15:55:59.219145 4869 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/manila-scheduler-0" Oct 01 15:56:00 crc kubenswrapper[4869]: I1001 15:56:00.671143 4869 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/manila-scheduler-0" Oct 01 15:56:09 crc kubenswrapper[4869]: I1001 15:56:09.743124 4869 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/manila-share-share1-0" Oct 01 15:56:10 crc kubenswrapper[4869]: I1001 15:56:10.584557 4869 scope.go:117] "RemoveContainer" containerID="ea87da209cc47118cc41bdb3107264d5dd2e07bc832e620553f0ac1be9456693" Oct 01 15:56:10 crc kubenswrapper[4869]: E1001 15:56:10.585859 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-c86m8_openshift-machine-config-operator(a4b64f7f-0b03-4f47-965b-9fde048b735c)\"" pod="openshift-machine-config-operator/machine-config-daemon-c86m8" podUID="a4b64f7f-0b03-4f47-965b-9fde048b735c" Oct 01 15:56:12 crc kubenswrapper[4869]: I1001 15:56:12.102798 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ceilometer-0" Oct 01 15:56:15 crc kubenswrapper[4869]: I1001 15:56:15.389669 4869 scope.go:117] "RemoveContainer" containerID="3d863a20679bb24706bca70c967efd7d0bb10fe19b461751fa5815624c992250" Oct 01 15:56:15 crc kubenswrapper[4869]: I1001 15:56:15.442218 4869 scope.go:117] "RemoveContainer" containerID="dcff541c96dba12ba0ead29d3b75b548f45575094f1f4f61f28448018972ca7d" Oct 01 15:56:15 crc kubenswrapper[4869]: I1001 15:56:15.510676 4869 scope.go:117] "RemoveContainer" containerID="b3e5b7f4fd336faf4e8c3ab4170e3a30cc70fde295fc51d405de6d9ea8a3a15d" Oct 01 15:56:25 crc kubenswrapper[4869]: I1001 15:56:25.581720 4869 scope.go:117] "RemoveContainer" containerID="ea87da209cc47118cc41bdb3107264d5dd2e07bc832e620553f0ac1be9456693" Oct 01 15:56:25 crc kubenswrapper[4869]: E1001 15:56:25.582839 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-c86m8_openshift-machine-config-operator(a4b64f7f-0b03-4f47-965b-9fde048b735c)\"" pod="openshift-machine-config-operator/machine-config-daemon-c86m8" podUID="a4b64f7f-0b03-4f47-965b-9fde048b735c" Oct 01 15:56:40 crc kubenswrapper[4869]: I1001 15:56:40.583820 4869 scope.go:117] "RemoveContainer" containerID="ea87da209cc47118cc41bdb3107264d5dd2e07bc832e620553f0ac1be9456693" Oct 01 15:56:40 crc kubenswrapper[4869]: E1001 15:56:40.584734 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-c86m8_openshift-machine-config-operator(a4b64f7f-0b03-4f47-965b-9fde048b735c)\"" pod="openshift-machine-config-operator/machine-config-daemon-c86m8" podUID="a4b64f7f-0b03-4f47-965b-9fde048b735c" Oct 01 15:56:54 crc kubenswrapper[4869]: I1001 15:56:54.581602 4869 scope.go:117] "RemoveContainer" containerID="ea87da209cc47118cc41bdb3107264d5dd2e07bc832e620553f0ac1be9456693" Oct 01 15:56:55 crc kubenswrapper[4869]: I1001 15:56:55.555810 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-c86m8" event={"ID":"a4b64f7f-0b03-4f47-965b-9fde048b735c","Type":"ContainerStarted","Data":"73a9cb1c6d1e9e84d375990bcfc5854c30e29631055280b1b2dc0f98545eaf97"} Oct 01 15:57:11 crc kubenswrapper[4869]: I1001 15:57:11.444021 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-operator-controller-operator-6b47f79668-mq25c"] Oct 01 15:57:11 crc kubenswrapper[4869]: I1001 15:57:11.446964 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-controller-operator-6b47f79668-mq25c" Oct 01 15:57:11 crc kubenswrapper[4869]: I1001 15:57:11.469774 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lkgd4\" (UniqueName: \"kubernetes.io/projected/4fdc8588-1f96-4122-ba09-bda7cc861582-kube-api-access-lkgd4\") pod \"openstack-operator-controller-operator-6b47f79668-mq25c\" (UID: \"4fdc8588-1f96-4122-ba09-bda7cc861582\") " pod="openstack-operators/openstack-operator-controller-operator-6b47f79668-mq25c" Oct 01 15:57:11 crc kubenswrapper[4869]: I1001 15:57:11.469950 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-controller-operator-6b47f79668-mq25c"] Oct 01 15:57:11 crc kubenswrapper[4869]: I1001 15:57:11.571704 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lkgd4\" (UniqueName: \"kubernetes.io/projected/4fdc8588-1f96-4122-ba09-bda7cc861582-kube-api-access-lkgd4\") pod \"openstack-operator-controller-operator-6b47f79668-mq25c\" (UID: \"4fdc8588-1f96-4122-ba09-bda7cc861582\") " pod="openstack-operators/openstack-operator-controller-operator-6b47f79668-mq25c" Oct 01 15:57:11 crc kubenswrapper[4869]: I1001 15:57:11.591769 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lkgd4\" (UniqueName: \"kubernetes.io/projected/4fdc8588-1f96-4122-ba09-bda7cc861582-kube-api-access-lkgd4\") pod \"openstack-operator-controller-operator-6b47f79668-mq25c\" (UID: \"4fdc8588-1f96-4122-ba09-bda7cc861582\") " pod="openstack-operators/openstack-operator-controller-operator-6b47f79668-mq25c" Oct 01 15:57:11 crc kubenswrapper[4869]: I1001 15:57:11.774645 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-controller-operator-6b47f79668-mq25c" Oct 01 15:57:12 crc kubenswrapper[4869]: I1001 15:57:12.403094 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-controller-operator-6b47f79668-mq25c"] Oct 01 15:57:12 crc kubenswrapper[4869]: I1001 15:57:12.778532 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-operator-6b47f79668-mq25c" event={"ID":"4fdc8588-1f96-4122-ba09-bda7cc861582","Type":"ContainerStarted","Data":"9c2cd392bbc111ff1549b9973cd3af917049965023e8ed4adfcca46d9152f3c8"} Oct 01 15:57:12 crc kubenswrapper[4869]: I1001 15:57:12.778919 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-operator-6b47f79668-mq25c" event={"ID":"4fdc8588-1f96-4122-ba09-bda7cc861582","Type":"ContainerStarted","Data":"1cc25bc8e8b40c90c91f665f9de5a00092c46eb26133269f3ea881748d588226"} Oct 01 15:57:13 crc kubenswrapper[4869]: I1001 15:57:13.791220 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-operator-6b47f79668-mq25c" event={"ID":"4fdc8588-1f96-4122-ba09-bda7cc861582","Type":"ContainerStarted","Data":"8fd5ae35b720d019129ce7f331445dc684b19cab57713b3613c16c40b9c54b1c"} Oct 01 15:57:13 crc kubenswrapper[4869]: I1001 15:57:13.791800 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/openstack-operator-controller-operator-6b47f79668-mq25c" Oct 01 15:57:13 crc kubenswrapper[4869]: I1001 15:57:13.858678 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-operator-controller-operator-6b47f79668-mq25c" podStartSLOduration=2.858651804 podStartE2EDuration="2.858651804s" podCreationTimestamp="2025-10-01 15:57:11 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-01 15:57:13.840629489 +0000 UTC m=+3142.987472635" watchObservedRunningTime="2025-10-01 15:57:13.858651804 +0000 UTC m=+3143.005494950" Oct 01 15:57:21 crc kubenswrapper[4869]: I1001 15:57:21.778803 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/openstack-operator-controller-operator-6b47f79668-mq25c" Oct 01 15:57:21 crc kubenswrapper[4869]: I1001 15:57:21.883416 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/openstack-operator-controller-operator-676c66f88b-8r9wr"] Oct 01 15:57:21 crc kubenswrapper[4869]: I1001 15:57:21.883656 4869 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-operators/openstack-operator-controller-operator-676c66f88b-8r9wr" podUID="c0f1c013-9664-4846-8576-8a9a26c64dad" containerName="operator" containerID="cri-o://a5d46c0d49426f951873a559d933817635be5f5aab8c5e38d1894f6e0f69a661" gracePeriod=10 Oct 01 15:57:21 crc kubenswrapper[4869]: I1001 15:57:21.883797 4869 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-operators/openstack-operator-controller-operator-676c66f88b-8r9wr" podUID="c0f1c013-9664-4846-8576-8a9a26c64dad" containerName="kube-rbac-proxy" containerID="cri-o://08443d0df1fb1c49c0a76691ad8df71c97795e8a18eb345aef12035fac6fb39f" gracePeriod=10 Oct 01 15:57:22 crc kubenswrapper[4869]: I1001 15:57:22.380161 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-controller-operator-676c66f88b-8r9wr" Oct 01 15:57:22 crc kubenswrapper[4869]: I1001 15:57:22.533175 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fcxw4\" (UniqueName: \"kubernetes.io/projected/c0f1c013-9664-4846-8576-8a9a26c64dad-kube-api-access-fcxw4\") pod \"c0f1c013-9664-4846-8576-8a9a26c64dad\" (UID: \"c0f1c013-9664-4846-8576-8a9a26c64dad\") " Oct 01 15:57:22 crc kubenswrapper[4869]: I1001 15:57:22.539542 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c0f1c013-9664-4846-8576-8a9a26c64dad-kube-api-access-fcxw4" (OuterVolumeSpecName: "kube-api-access-fcxw4") pod "c0f1c013-9664-4846-8576-8a9a26c64dad" (UID: "c0f1c013-9664-4846-8576-8a9a26c64dad"). InnerVolumeSpecName "kube-api-access-fcxw4". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 15:57:22 crc kubenswrapper[4869]: I1001 15:57:22.635669 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fcxw4\" (UniqueName: \"kubernetes.io/projected/c0f1c013-9664-4846-8576-8a9a26c64dad-kube-api-access-fcxw4\") on node \"crc\" DevicePath \"\"" Oct 01 15:57:22 crc kubenswrapper[4869]: I1001 15:57:22.900611 4869 generic.go:334] "Generic (PLEG): container finished" podID="c0f1c013-9664-4846-8576-8a9a26c64dad" containerID="08443d0df1fb1c49c0a76691ad8df71c97795e8a18eb345aef12035fac6fb39f" exitCode=0 Oct 01 15:57:22 crc kubenswrapper[4869]: I1001 15:57:22.900664 4869 generic.go:334] "Generic (PLEG): container finished" podID="c0f1c013-9664-4846-8576-8a9a26c64dad" containerID="a5d46c0d49426f951873a559d933817635be5f5aab8c5e38d1894f6e0f69a661" exitCode=0 Oct 01 15:57:22 crc kubenswrapper[4869]: I1001 15:57:22.900669 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-controller-operator-676c66f88b-8r9wr" Oct 01 15:57:22 crc kubenswrapper[4869]: I1001 15:57:22.900699 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-operator-676c66f88b-8r9wr" event={"ID":"c0f1c013-9664-4846-8576-8a9a26c64dad","Type":"ContainerDied","Data":"08443d0df1fb1c49c0a76691ad8df71c97795e8a18eb345aef12035fac6fb39f"} Oct 01 15:57:22 crc kubenswrapper[4869]: I1001 15:57:22.900747 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-operator-676c66f88b-8r9wr" event={"ID":"c0f1c013-9664-4846-8576-8a9a26c64dad","Type":"ContainerDied","Data":"a5d46c0d49426f951873a559d933817635be5f5aab8c5e38d1894f6e0f69a661"} Oct 01 15:57:22 crc kubenswrapper[4869]: I1001 15:57:22.900772 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-operator-676c66f88b-8r9wr" event={"ID":"c0f1c013-9664-4846-8576-8a9a26c64dad","Type":"ContainerDied","Data":"75d18cebc5fc55fa6678aa4a2e427a29310e97a211d329ed8f5c78d0fa0da8e3"} Oct 01 15:57:22 crc kubenswrapper[4869]: I1001 15:57:22.900804 4869 scope.go:117] "RemoveContainer" containerID="08443d0df1fb1c49c0a76691ad8df71c97795e8a18eb345aef12035fac6fb39f" Oct 01 15:57:22 crc kubenswrapper[4869]: I1001 15:57:22.934821 4869 scope.go:117] "RemoveContainer" containerID="a5d46c0d49426f951873a559d933817635be5f5aab8c5e38d1894f6e0f69a661" Oct 01 15:57:22 crc kubenswrapper[4869]: I1001 15:57:22.966058 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/openstack-operator-controller-operator-676c66f88b-8r9wr"] Oct 01 15:57:22 crc kubenswrapper[4869]: I1001 15:57:22.980596 4869 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-operators/openstack-operator-controller-operator-676c66f88b-8r9wr"] Oct 01 15:57:22 crc kubenswrapper[4869]: I1001 15:57:22.991056 4869 scope.go:117] "RemoveContainer" containerID="08443d0df1fb1c49c0a76691ad8df71c97795e8a18eb345aef12035fac6fb39f" Oct 01 15:57:22 crc kubenswrapper[4869]: E1001 15:57:22.991766 4869 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"08443d0df1fb1c49c0a76691ad8df71c97795e8a18eb345aef12035fac6fb39f\": container with ID starting with 08443d0df1fb1c49c0a76691ad8df71c97795e8a18eb345aef12035fac6fb39f not found: ID does not exist" containerID="08443d0df1fb1c49c0a76691ad8df71c97795e8a18eb345aef12035fac6fb39f" Oct 01 15:57:22 crc kubenswrapper[4869]: I1001 15:57:22.991822 4869 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"08443d0df1fb1c49c0a76691ad8df71c97795e8a18eb345aef12035fac6fb39f"} err="failed to get container status \"08443d0df1fb1c49c0a76691ad8df71c97795e8a18eb345aef12035fac6fb39f\": rpc error: code = NotFound desc = could not find container \"08443d0df1fb1c49c0a76691ad8df71c97795e8a18eb345aef12035fac6fb39f\": container with ID starting with 08443d0df1fb1c49c0a76691ad8df71c97795e8a18eb345aef12035fac6fb39f not found: ID does not exist" Oct 01 15:57:22 crc kubenswrapper[4869]: I1001 15:57:22.991858 4869 scope.go:117] "RemoveContainer" containerID="a5d46c0d49426f951873a559d933817635be5f5aab8c5e38d1894f6e0f69a661" Oct 01 15:57:22 crc kubenswrapper[4869]: E1001 15:57:22.992383 4869 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a5d46c0d49426f951873a559d933817635be5f5aab8c5e38d1894f6e0f69a661\": container with ID starting with a5d46c0d49426f951873a559d933817635be5f5aab8c5e38d1894f6e0f69a661 not found: ID does not exist" containerID="a5d46c0d49426f951873a559d933817635be5f5aab8c5e38d1894f6e0f69a661" Oct 01 15:57:22 crc kubenswrapper[4869]: I1001 15:57:22.992418 4869 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a5d46c0d49426f951873a559d933817635be5f5aab8c5e38d1894f6e0f69a661"} err="failed to get container status \"a5d46c0d49426f951873a559d933817635be5f5aab8c5e38d1894f6e0f69a661\": rpc error: code = NotFound desc = could not find container \"a5d46c0d49426f951873a559d933817635be5f5aab8c5e38d1894f6e0f69a661\": container with ID starting with a5d46c0d49426f951873a559d933817635be5f5aab8c5e38d1894f6e0f69a661 not found: ID does not exist" Oct 01 15:57:22 crc kubenswrapper[4869]: I1001 15:57:22.992443 4869 scope.go:117] "RemoveContainer" containerID="08443d0df1fb1c49c0a76691ad8df71c97795e8a18eb345aef12035fac6fb39f" Oct 01 15:57:22 crc kubenswrapper[4869]: I1001 15:57:22.992751 4869 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"08443d0df1fb1c49c0a76691ad8df71c97795e8a18eb345aef12035fac6fb39f"} err="failed to get container status \"08443d0df1fb1c49c0a76691ad8df71c97795e8a18eb345aef12035fac6fb39f\": rpc error: code = NotFound desc = could not find container \"08443d0df1fb1c49c0a76691ad8df71c97795e8a18eb345aef12035fac6fb39f\": container with ID starting with 08443d0df1fb1c49c0a76691ad8df71c97795e8a18eb345aef12035fac6fb39f not found: ID does not exist" Oct 01 15:57:22 crc kubenswrapper[4869]: I1001 15:57:22.992791 4869 scope.go:117] "RemoveContainer" containerID="a5d46c0d49426f951873a559d933817635be5f5aab8c5e38d1894f6e0f69a661" Oct 01 15:57:22 crc kubenswrapper[4869]: I1001 15:57:22.993128 4869 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a5d46c0d49426f951873a559d933817635be5f5aab8c5e38d1894f6e0f69a661"} err="failed to get container status \"a5d46c0d49426f951873a559d933817635be5f5aab8c5e38d1894f6e0f69a661\": rpc error: code = NotFound desc = could not find container \"a5d46c0d49426f951873a559d933817635be5f5aab8c5e38d1894f6e0f69a661\": container with ID starting with a5d46c0d49426f951873a559d933817635be5f5aab8c5e38d1894f6e0f69a661 not found: ID does not exist" Oct 01 15:57:23 crc kubenswrapper[4869]: I1001 15:57:23.598654 4869 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c0f1c013-9664-4846-8576-8a9a26c64dad" path="/var/lib/kubelet/pods/c0f1c013-9664-4846-8576-8a9a26c64dad/volumes" Oct 01 15:57:54 crc kubenswrapper[4869]: I1001 15:57:54.763518 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/test-operator-controller-manager-788d856b94-q2j77"] Oct 01 15:57:54 crc kubenswrapper[4869]: E1001 15:57:54.765116 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c0f1c013-9664-4846-8576-8a9a26c64dad" containerName="operator" Oct 01 15:57:54 crc kubenswrapper[4869]: I1001 15:57:54.765151 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="c0f1c013-9664-4846-8576-8a9a26c64dad" containerName="operator" Oct 01 15:57:54 crc kubenswrapper[4869]: E1001 15:57:54.765202 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c0f1c013-9664-4846-8576-8a9a26c64dad" containerName="kube-rbac-proxy" Oct 01 15:57:54 crc kubenswrapper[4869]: I1001 15:57:54.765219 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="c0f1c013-9664-4846-8576-8a9a26c64dad" containerName="kube-rbac-proxy" Oct 01 15:57:54 crc kubenswrapper[4869]: I1001 15:57:54.765763 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="c0f1c013-9664-4846-8576-8a9a26c64dad" containerName="kube-rbac-proxy" Oct 01 15:57:54 crc kubenswrapper[4869]: I1001 15:57:54.765856 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="c0f1c013-9664-4846-8576-8a9a26c64dad" containerName="operator" Oct 01 15:57:54 crc kubenswrapper[4869]: I1001 15:57:54.768171 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/test-operator-controller-manager-788d856b94-q2j77" Oct 01 15:57:54 crc kubenswrapper[4869]: I1001 15:57:54.781890 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/test-operator-controller-manager-788d856b94-q2j77"] Oct 01 15:57:54 crc kubenswrapper[4869]: I1001 15:57:54.883563 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8z8jk\" (UniqueName: \"kubernetes.io/projected/15d016d4-52ca-4f58-8ccc-388d070c739c-kube-api-access-8z8jk\") pod \"test-operator-controller-manager-788d856b94-q2j77\" (UID: \"15d016d4-52ca-4f58-8ccc-388d070c739c\") " pod="openstack-operators/test-operator-controller-manager-788d856b94-q2j77" Oct 01 15:57:54 crc kubenswrapper[4869]: I1001 15:57:54.985925 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8z8jk\" (UniqueName: \"kubernetes.io/projected/15d016d4-52ca-4f58-8ccc-388d070c739c-kube-api-access-8z8jk\") pod \"test-operator-controller-manager-788d856b94-q2j77\" (UID: \"15d016d4-52ca-4f58-8ccc-388d070c739c\") " pod="openstack-operators/test-operator-controller-manager-788d856b94-q2j77" Oct 01 15:57:55 crc kubenswrapper[4869]: I1001 15:57:55.013767 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8z8jk\" (UniqueName: \"kubernetes.io/projected/15d016d4-52ca-4f58-8ccc-388d070c739c-kube-api-access-8z8jk\") pod \"test-operator-controller-manager-788d856b94-q2j77\" (UID: \"15d016d4-52ca-4f58-8ccc-388d070c739c\") " pod="openstack-operators/test-operator-controller-manager-788d856b94-q2j77" Oct 01 15:57:55 crc kubenswrapper[4869]: I1001 15:57:55.100246 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/test-operator-controller-manager-788d856b94-q2j77" Oct 01 15:57:55 crc kubenswrapper[4869]: I1001 15:57:55.454704 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/test-operator-controller-manager-788d856b94-q2j77"] Oct 01 15:57:55 crc kubenswrapper[4869]: W1001 15:57:55.488866 4869 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod15d016d4_52ca_4f58_8ccc_388d070c739c.slice/crio-56ad0027603b63733d50d39bcec3711522d8ce7226bb42ba294818d44b996ccc WatchSource:0}: Error finding container 56ad0027603b63733d50d39bcec3711522d8ce7226bb42ba294818d44b996ccc: Status 404 returned error can't find the container with id 56ad0027603b63733d50d39bcec3711522d8ce7226bb42ba294818d44b996ccc Oct 01 15:57:56 crc kubenswrapper[4869]: I1001 15:57:56.290294 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/test-operator-controller-manager-788d856b94-q2j77" event={"ID":"15d016d4-52ca-4f58-8ccc-388d070c739c","Type":"ContainerStarted","Data":"56ad0027603b63733d50d39bcec3711522d8ce7226bb42ba294818d44b996ccc"} Oct 01 15:57:58 crc kubenswrapper[4869]: I1001 15:57:58.316612 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/test-operator-controller-manager-788d856b94-q2j77" event={"ID":"15d016d4-52ca-4f58-8ccc-388d070c739c","Type":"ContainerStarted","Data":"e9b009c9bf61e028901cf321b0d5aaa33a6e371fafdcefaab2bbef9db6098c29"} Oct 01 15:57:58 crc kubenswrapper[4869]: I1001 15:57:58.317176 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/test-operator-controller-manager-788d856b94-q2j77" event={"ID":"15d016d4-52ca-4f58-8ccc-388d070c739c","Type":"ContainerStarted","Data":"b3a16735b8255bd87689f48c98a5c576fb37f41a21fe7302b4e5d5bbd8429bac"} Oct 01 15:57:58 crc kubenswrapper[4869]: I1001 15:57:58.317273 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/test-operator-controller-manager-788d856b94-q2j77" Oct 01 15:57:58 crc kubenswrapper[4869]: I1001 15:57:58.338374 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/test-operator-controller-manager-788d856b94-q2j77" podStartSLOduration=2.198572538 podStartE2EDuration="4.33835875s" podCreationTimestamp="2025-10-01 15:57:54 +0000 UTC" firstStartedPulling="2025-10-01 15:57:55.492519446 +0000 UTC m=+3184.639362572" lastFinishedPulling="2025-10-01 15:57:57.632305628 +0000 UTC m=+3186.779148784" observedRunningTime="2025-10-01 15:57:58.333706653 +0000 UTC m=+3187.480549769" watchObservedRunningTime="2025-10-01 15:57:58.33835875 +0000 UTC m=+3187.485201866" Oct 01 15:58:05 crc kubenswrapper[4869]: I1001 15:58:05.104652 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/test-operator-controller-manager-788d856b94-q2j77" Oct 01 15:58:05 crc kubenswrapper[4869]: I1001 15:58:05.191933 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/test-operator-controller-manager-cbdf6dc66-n8xn2"] Oct 01 15:58:05 crc kubenswrapper[4869]: I1001 15:58:05.192185 4869 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-operators/test-operator-controller-manager-cbdf6dc66-n8xn2" podUID="a74ba78b-be87-40e4-a1a1-f59a10612f6c" containerName="kube-rbac-proxy" containerID="cri-o://c73c4c225544f48d834cb5c560b25dbd1a12d0dfffe68b2a15940950355683fb" gracePeriod=10 Oct 01 15:58:05 crc kubenswrapper[4869]: I1001 15:58:05.192253 4869 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-operators/test-operator-controller-manager-cbdf6dc66-n8xn2" podUID="a74ba78b-be87-40e4-a1a1-f59a10612f6c" containerName="manager" containerID="cri-o://e2593693f67975a92dcd05606c0ffeb91344e76a150930ad2763aeda80e3eae0" gracePeriod=10 Oct 01 15:58:05 crc kubenswrapper[4869]: I1001 15:58:05.402169 4869 generic.go:334] "Generic (PLEG): container finished" podID="a74ba78b-be87-40e4-a1a1-f59a10612f6c" containerID="e2593693f67975a92dcd05606c0ffeb91344e76a150930ad2763aeda80e3eae0" exitCode=0 Oct 01 15:58:05 crc kubenswrapper[4869]: I1001 15:58:05.402591 4869 generic.go:334] "Generic (PLEG): container finished" podID="a74ba78b-be87-40e4-a1a1-f59a10612f6c" containerID="c73c4c225544f48d834cb5c560b25dbd1a12d0dfffe68b2a15940950355683fb" exitCode=0 Oct 01 15:58:05 crc kubenswrapper[4869]: I1001 15:58:05.402288 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/test-operator-controller-manager-cbdf6dc66-n8xn2" event={"ID":"a74ba78b-be87-40e4-a1a1-f59a10612f6c","Type":"ContainerDied","Data":"e2593693f67975a92dcd05606c0ffeb91344e76a150930ad2763aeda80e3eae0"} Oct 01 15:58:05 crc kubenswrapper[4869]: I1001 15:58:05.402664 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/test-operator-controller-manager-cbdf6dc66-n8xn2" event={"ID":"a74ba78b-be87-40e4-a1a1-f59a10612f6c","Type":"ContainerDied","Data":"c73c4c225544f48d834cb5c560b25dbd1a12d0dfffe68b2a15940950355683fb"} Oct 01 15:58:05 crc kubenswrapper[4869]: I1001 15:58:05.857696 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/test-operator-controller-manager-cbdf6dc66-n8xn2" Oct 01 15:58:05 crc kubenswrapper[4869]: I1001 15:58:05.947189 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pmtfg\" (UniqueName: \"kubernetes.io/projected/a74ba78b-be87-40e4-a1a1-f59a10612f6c-kube-api-access-pmtfg\") pod \"a74ba78b-be87-40e4-a1a1-f59a10612f6c\" (UID: \"a74ba78b-be87-40e4-a1a1-f59a10612f6c\") " Oct 01 15:58:05 crc kubenswrapper[4869]: I1001 15:58:05.953886 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a74ba78b-be87-40e4-a1a1-f59a10612f6c-kube-api-access-pmtfg" (OuterVolumeSpecName: "kube-api-access-pmtfg") pod "a74ba78b-be87-40e4-a1a1-f59a10612f6c" (UID: "a74ba78b-be87-40e4-a1a1-f59a10612f6c"). InnerVolumeSpecName "kube-api-access-pmtfg". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 15:58:06 crc kubenswrapper[4869]: I1001 15:58:06.050057 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pmtfg\" (UniqueName: \"kubernetes.io/projected/a74ba78b-be87-40e4-a1a1-f59a10612f6c-kube-api-access-pmtfg\") on node \"crc\" DevicePath \"\"" Oct 01 15:58:06 crc kubenswrapper[4869]: I1001 15:58:06.431016 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/test-operator-controller-manager-cbdf6dc66-n8xn2" event={"ID":"a74ba78b-be87-40e4-a1a1-f59a10612f6c","Type":"ContainerDied","Data":"a3f6f0ad2a1c0177738beeccd7dab29ff55a89e62e80a70d3efabc3b7f60a976"} Oct 01 15:58:06 crc kubenswrapper[4869]: I1001 15:58:06.431099 4869 scope.go:117] "RemoveContainer" containerID="e2593693f67975a92dcd05606c0ffeb91344e76a150930ad2763aeda80e3eae0" Oct 01 15:58:06 crc kubenswrapper[4869]: I1001 15:58:06.431130 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/test-operator-controller-manager-cbdf6dc66-n8xn2" Oct 01 15:58:06 crc kubenswrapper[4869]: I1001 15:58:06.479705 4869 scope.go:117] "RemoveContainer" containerID="c73c4c225544f48d834cb5c560b25dbd1a12d0dfffe68b2a15940950355683fb" Oct 01 15:58:06 crc kubenswrapper[4869]: I1001 15:58:06.483244 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/test-operator-controller-manager-cbdf6dc66-n8xn2"] Oct 01 15:58:06 crc kubenswrapper[4869]: I1001 15:58:06.498107 4869 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-operators/test-operator-controller-manager-cbdf6dc66-n8xn2"] Oct 01 15:58:07 crc kubenswrapper[4869]: I1001 15:58:07.594451 4869 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a74ba78b-be87-40e4-a1a1-f59a10612f6c" path="/var/lib/kubelet/pods/a74ba78b-be87-40e4-a1a1-f59a10612f6c/volumes" Oct 01 15:58:07 crc kubenswrapper[4869]: I1001 15:58:07.798780 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-bdz7q"] Oct 01 15:58:07 crc kubenswrapper[4869]: E1001 15:58:07.799524 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a74ba78b-be87-40e4-a1a1-f59a10612f6c" containerName="manager" Oct 01 15:58:07 crc kubenswrapper[4869]: I1001 15:58:07.799555 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="a74ba78b-be87-40e4-a1a1-f59a10612f6c" containerName="manager" Oct 01 15:58:07 crc kubenswrapper[4869]: E1001 15:58:07.799591 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a74ba78b-be87-40e4-a1a1-f59a10612f6c" containerName="kube-rbac-proxy" Oct 01 15:58:07 crc kubenswrapper[4869]: I1001 15:58:07.799608 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="a74ba78b-be87-40e4-a1a1-f59a10612f6c" containerName="kube-rbac-proxy" Oct 01 15:58:07 crc kubenswrapper[4869]: I1001 15:58:07.800069 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="a74ba78b-be87-40e4-a1a1-f59a10612f6c" containerName="manager" Oct 01 15:58:07 crc kubenswrapper[4869]: I1001 15:58:07.800125 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="a74ba78b-be87-40e4-a1a1-f59a10612f6c" containerName="kube-rbac-proxy" Oct 01 15:58:07 crc kubenswrapper[4869]: I1001 15:58:07.804189 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-bdz7q" Oct 01 15:58:07 crc kubenswrapper[4869]: I1001 15:58:07.813825 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-bdz7q"] Oct 01 15:58:07 crc kubenswrapper[4869]: I1001 15:58:07.890750 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d3f3b270-09ac-462f-8647-95b88e3fd687-utilities\") pod \"community-operators-bdz7q\" (UID: \"d3f3b270-09ac-462f-8647-95b88e3fd687\") " pod="openshift-marketplace/community-operators-bdz7q" Oct 01 15:58:07 crc kubenswrapper[4869]: I1001 15:58:07.890874 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sv4x9\" (UniqueName: \"kubernetes.io/projected/d3f3b270-09ac-462f-8647-95b88e3fd687-kube-api-access-sv4x9\") pod \"community-operators-bdz7q\" (UID: \"d3f3b270-09ac-462f-8647-95b88e3fd687\") " pod="openshift-marketplace/community-operators-bdz7q" Oct 01 15:58:07 crc kubenswrapper[4869]: I1001 15:58:07.890908 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d3f3b270-09ac-462f-8647-95b88e3fd687-catalog-content\") pod \"community-operators-bdz7q\" (UID: \"d3f3b270-09ac-462f-8647-95b88e3fd687\") " pod="openshift-marketplace/community-operators-bdz7q" Oct 01 15:58:07 crc kubenswrapper[4869]: I1001 15:58:07.993189 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d3f3b270-09ac-462f-8647-95b88e3fd687-catalog-content\") pod \"community-operators-bdz7q\" (UID: \"d3f3b270-09ac-462f-8647-95b88e3fd687\") " pod="openshift-marketplace/community-operators-bdz7q" Oct 01 15:58:07 crc kubenswrapper[4869]: I1001 15:58:07.993344 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d3f3b270-09ac-462f-8647-95b88e3fd687-utilities\") pod \"community-operators-bdz7q\" (UID: \"d3f3b270-09ac-462f-8647-95b88e3fd687\") " pod="openshift-marketplace/community-operators-bdz7q" Oct 01 15:58:07 crc kubenswrapper[4869]: I1001 15:58:07.993421 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sv4x9\" (UniqueName: \"kubernetes.io/projected/d3f3b270-09ac-462f-8647-95b88e3fd687-kube-api-access-sv4x9\") pod \"community-operators-bdz7q\" (UID: \"d3f3b270-09ac-462f-8647-95b88e3fd687\") " pod="openshift-marketplace/community-operators-bdz7q" Oct 01 15:58:07 crc kubenswrapper[4869]: I1001 15:58:07.993894 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d3f3b270-09ac-462f-8647-95b88e3fd687-catalog-content\") pod \"community-operators-bdz7q\" (UID: \"d3f3b270-09ac-462f-8647-95b88e3fd687\") " pod="openshift-marketplace/community-operators-bdz7q" Oct 01 15:58:07 crc kubenswrapper[4869]: I1001 15:58:07.994008 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d3f3b270-09ac-462f-8647-95b88e3fd687-utilities\") pod \"community-operators-bdz7q\" (UID: \"d3f3b270-09ac-462f-8647-95b88e3fd687\") " pod="openshift-marketplace/community-operators-bdz7q" Oct 01 15:58:08 crc kubenswrapper[4869]: I1001 15:58:08.024786 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sv4x9\" (UniqueName: \"kubernetes.io/projected/d3f3b270-09ac-462f-8647-95b88e3fd687-kube-api-access-sv4x9\") pod \"community-operators-bdz7q\" (UID: \"d3f3b270-09ac-462f-8647-95b88e3fd687\") " pod="openshift-marketplace/community-operators-bdz7q" Oct 01 15:58:08 crc kubenswrapper[4869]: I1001 15:58:08.146221 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-bdz7q" Oct 01 15:58:08 crc kubenswrapper[4869]: I1001 15:58:08.708370 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-bdz7q"] Oct 01 15:58:08 crc kubenswrapper[4869]: W1001 15:58:08.722625 4869 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podd3f3b270_09ac_462f_8647_95b88e3fd687.slice/crio-dc6629750876c529d36c5638dad5588837259255686bbcf0cc04b9241f81ed56 WatchSource:0}: Error finding container dc6629750876c529d36c5638dad5588837259255686bbcf0cc04b9241f81ed56: Status 404 returned error can't find the container with id dc6629750876c529d36c5638dad5588837259255686bbcf0cc04b9241f81ed56 Oct 01 15:58:09 crc kubenswrapper[4869]: I1001 15:58:09.461831 4869 generic.go:334] "Generic (PLEG): container finished" podID="d3f3b270-09ac-462f-8647-95b88e3fd687" containerID="0bdfec0ff1c9bebb412799536f10cf05e7ae80246f3d017bcf26a3c4c030f0d3" exitCode=0 Oct 01 15:58:09 crc kubenswrapper[4869]: I1001 15:58:09.461886 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-bdz7q" event={"ID":"d3f3b270-09ac-462f-8647-95b88e3fd687","Type":"ContainerDied","Data":"0bdfec0ff1c9bebb412799536f10cf05e7ae80246f3d017bcf26a3c4c030f0d3"} Oct 01 15:58:09 crc kubenswrapper[4869]: I1001 15:58:09.463193 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-bdz7q" event={"ID":"d3f3b270-09ac-462f-8647-95b88e3fd687","Type":"ContainerStarted","Data":"dc6629750876c529d36c5638dad5588837259255686bbcf0cc04b9241f81ed56"} Oct 01 15:58:11 crc kubenswrapper[4869]: I1001 15:58:11.486118 4869 generic.go:334] "Generic (PLEG): container finished" podID="d3f3b270-09ac-462f-8647-95b88e3fd687" containerID="2b0b37041603c512a1f46e5e20811ddee1ee7bbaeb404a502361f6f73786377b" exitCode=0 Oct 01 15:58:11 crc kubenswrapper[4869]: I1001 15:58:11.486560 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-bdz7q" event={"ID":"d3f3b270-09ac-462f-8647-95b88e3fd687","Type":"ContainerDied","Data":"2b0b37041603c512a1f46e5e20811ddee1ee7bbaeb404a502361f6f73786377b"} Oct 01 15:58:12 crc kubenswrapper[4869]: I1001 15:58:12.500112 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-bdz7q" event={"ID":"d3f3b270-09ac-462f-8647-95b88e3fd687","Type":"ContainerStarted","Data":"69dcbeb5a4f091e50027acc72c17a1a582bf05df1f076484f7558ff3ac77a15c"} Oct 01 15:58:12 crc kubenswrapper[4869]: I1001 15:58:12.536093 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-bdz7q" podStartSLOduration=3.033897547 podStartE2EDuration="5.536070571s" podCreationTimestamp="2025-10-01 15:58:07 +0000 UTC" firstStartedPulling="2025-10-01 15:58:09.464246926 +0000 UTC m=+3198.611090072" lastFinishedPulling="2025-10-01 15:58:11.96641997 +0000 UTC m=+3201.113263096" observedRunningTime="2025-10-01 15:58:12.524001466 +0000 UTC m=+3201.670844582" watchObservedRunningTime="2025-10-01 15:58:12.536070571 +0000 UTC m=+3201.682913697" Oct 01 15:58:18 crc kubenswrapper[4869]: I1001 15:58:18.146657 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-bdz7q" Oct 01 15:58:18 crc kubenswrapper[4869]: I1001 15:58:18.147489 4869 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-bdz7q" Oct 01 15:58:18 crc kubenswrapper[4869]: I1001 15:58:18.221586 4869 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-bdz7q" Oct 01 15:58:18 crc kubenswrapper[4869]: I1001 15:58:18.620189 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-bdz7q" Oct 01 15:58:18 crc kubenswrapper[4869]: I1001 15:58:18.689051 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-bdz7q"] Oct 01 15:58:20 crc kubenswrapper[4869]: I1001 15:58:20.583126 4869 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-bdz7q" podUID="d3f3b270-09ac-462f-8647-95b88e3fd687" containerName="registry-server" containerID="cri-o://69dcbeb5a4f091e50027acc72c17a1a582bf05df1f076484f7558ff3ac77a15c" gracePeriod=2 Oct 01 15:58:21 crc kubenswrapper[4869]: I1001 15:58:21.149588 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-bdz7q" Oct 01 15:58:21 crc kubenswrapper[4869]: I1001 15:58:21.273905 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d3f3b270-09ac-462f-8647-95b88e3fd687-catalog-content\") pod \"d3f3b270-09ac-462f-8647-95b88e3fd687\" (UID: \"d3f3b270-09ac-462f-8647-95b88e3fd687\") " Oct 01 15:58:21 crc kubenswrapper[4869]: I1001 15:58:21.274124 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d3f3b270-09ac-462f-8647-95b88e3fd687-utilities\") pod \"d3f3b270-09ac-462f-8647-95b88e3fd687\" (UID: \"d3f3b270-09ac-462f-8647-95b88e3fd687\") " Oct 01 15:58:21 crc kubenswrapper[4869]: I1001 15:58:21.274168 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-sv4x9\" (UniqueName: \"kubernetes.io/projected/d3f3b270-09ac-462f-8647-95b88e3fd687-kube-api-access-sv4x9\") pod \"d3f3b270-09ac-462f-8647-95b88e3fd687\" (UID: \"d3f3b270-09ac-462f-8647-95b88e3fd687\") " Oct 01 15:58:21 crc kubenswrapper[4869]: I1001 15:58:21.280888 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d3f3b270-09ac-462f-8647-95b88e3fd687-utilities" (OuterVolumeSpecName: "utilities") pod "d3f3b270-09ac-462f-8647-95b88e3fd687" (UID: "d3f3b270-09ac-462f-8647-95b88e3fd687"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 15:58:21 crc kubenswrapper[4869]: I1001 15:58:21.281326 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d3f3b270-09ac-462f-8647-95b88e3fd687-kube-api-access-sv4x9" (OuterVolumeSpecName: "kube-api-access-sv4x9") pod "d3f3b270-09ac-462f-8647-95b88e3fd687" (UID: "d3f3b270-09ac-462f-8647-95b88e3fd687"). InnerVolumeSpecName "kube-api-access-sv4x9". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 15:58:21 crc kubenswrapper[4869]: I1001 15:58:21.386663 4869 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d3f3b270-09ac-462f-8647-95b88e3fd687-utilities\") on node \"crc\" DevicePath \"\"" Oct 01 15:58:21 crc kubenswrapper[4869]: I1001 15:58:21.386709 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-sv4x9\" (UniqueName: \"kubernetes.io/projected/d3f3b270-09ac-462f-8647-95b88e3fd687-kube-api-access-sv4x9\") on node \"crc\" DevicePath \"\"" Oct 01 15:58:21 crc kubenswrapper[4869]: I1001 15:58:21.594764 4869 generic.go:334] "Generic (PLEG): container finished" podID="d3f3b270-09ac-462f-8647-95b88e3fd687" containerID="69dcbeb5a4f091e50027acc72c17a1a582bf05df1f076484f7558ff3ac77a15c" exitCode=0 Oct 01 15:58:21 crc kubenswrapper[4869]: I1001 15:58:21.594862 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-bdz7q" Oct 01 15:58:21 crc kubenswrapper[4869]: I1001 15:58:21.603511 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-bdz7q" event={"ID":"d3f3b270-09ac-462f-8647-95b88e3fd687","Type":"ContainerDied","Data":"69dcbeb5a4f091e50027acc72c17a1a582bf05df1f076484f7558ff3ac77a15c"} Oct 01 15:58:21 crc kubenswrapper[4869]: I1001 15:58:21.603552 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-bdz7q" event={"ID":"d3f3b270-09ac-462f-8647-95b88e3fd687","Type":"ContainerDied","Data":"dc6629750876c529d36c5638dad5588837259255686bbcf0cc04b9241f81ed56"} Oct 01 15:58:21 crc kubenswrapper[4869]: I1001 15:58:21.603572 4869 scope.go:117] "RemoveContainer" containerID="69dcbeb5a4f091e50027acc72c17a1a582bf05df1f076484f7558ff3ac77a15c" Oct 01 15:58:21 crc kubenswrapper[4869]: I1001 15:58:21.640744 4869 scope.go:117] "RemoveContainer" containerID="2b0b37041603c512a1f46e5e20811ddee1ee7bbaeb404a502361f6f73786377b" Oct 01 15:58:21 crc kubenswrapper[4869]: I1001 15:58:21.685320 4869 scope.go:117] "RemoveContainer" containerID="0bdfec0ff1c9bebb412799536f10cf05e7ae80246f3d017bcf26a3c4c030f0d3" Oct 01 15:58:21 crc kubenswrapper[4869]: I1001 15:58:21.728393 4869 scope.go:117] "RemoveContainer" containerID="69dcbeb5a4f091e50027acc72c17a1a582bf05df1f076484f7558ff3ac77a15c" Oct 01 15:58:21 crc kubenswrapper[4869]: E1001 15:58:21.729000 4869 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"69dcbeb5a4f091e50027acc72c17a1a582bf05df1f076484f7558ff3ac77a15c\": container with ID starting with 69dcbeb5a4f091e50027acc72c17a1a582bf05df1f076484f7558ff3ac77a15c not found: ID does not exist" containerID="69dcbeb5a4f091e50027acc72c17a1a582bf05df1f076484f7558ff3ac77a15c" Oct 01 15:58:21 crc kubenswrapper[4869]: I1001 15:58:21.729035 4869 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"69dcbeb5a4f091e50027acc72c17a1a582bf05df1f076484f7558ff3ac77a15c"} err="failed to get container status \"69dcbeb5a4f091e50027acc72c17a1a582bf05df1f076484f7558ff3ac77a15c\": rpc error: code = NotFound desc = could not find container \"69dcbeb5a4f091e50027acc72c17a1a582bf05df1f076484f7558ff3ac77a15c\": container with ID starting with 69dcbeb5a4f091e50027acc72c17a1a582bf05df1f076484f7558ff3ac77a15c not found: ID does not exist" Oct 01 15:58:21 crc kubenswrapper[4869]: I1001 15:58:21.729060 4869 scope.go:117] "RemoveContainer" containerID="2b0b37041603c512a1f46e5e20811ddee1ee7bbaeb404a502361f6f73786377b" Oct 01 15:58:21 crc kubenswrapper[4869]: E1001 15:58:21.729393 4869 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2b0b37041603c512a1f46e5e20811ddee1ee7bbaeb404a502361f6f73786377b\": container with ID starting with 2b0b37041603c512a1f46e5e20811ddee1ee7bbaeb404a502361f6f73786377b not found: ID does not exist" containerID="2b0b37041603c512a1f46e5e20811ddee1ee7bbaeb404a502361f6f73786377b" Oct 01 15:58:21 crc kubenswrapper[4869]: I1001 15:58:21.729492 4869 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2b0b37041603c512a1f46e5e20811ddee1ee7bbaeb404a502361f6f73786377b"} err="failed to get container status \"2b0b37041603c512a1f46e5e20811ddee1ee7bbaeb404a502361f6f73786377b\": rpc error: code = NotFound desc = could not find container \"2b0b37041603c512a1f46e5e20811ddee1ee7bbaeb404a502361f6f73786377b\": container with ID starting with 2b0b37041603c512a1f46e5e20811ddee1ee7bbaeb404a502361f6f73786377b not found: ID does not exist" Oct 01 15:58:21 crc kubenswrapper[4869]: I1001 15:58:21.729586 4869 scope.go:117] "RemoveContainer" containerID="0bdfec0ff1c9bebb412799536f10cf05e7ae80246f3d017bcf26a3c4c030f0d3" Oct 01 15:58:21 crc kubenswrapper[4869]: E1001 15:58:21.730139 4869 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0bdfec0ff1c9bebb412799536f10cf05e7ae80246f3d017bcf26a3c4c030f0d3\": container with ID starting with 0bdfec0ff1c9bebb412799536f10cf05e7ae80246f3d017bcf26a3c4c030f0d3 not found: ID does not exist" containerID="0bdfec0ff1c9bebb412799536f10cf05e7ae80246f3d017bcf26a3c4c030f0d3" Oct 01 15:58:21 crc kubenswrapper[4869]: I1001 15:58:21.730168 4869 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0bdfec0ff1c9bebb412799536f10cf05e7ae80246f3d017bcf26a3c4c030f0d3"} err="failed to get container status \"0bdfec0ff1c9bebb412799536f10cf05e7ae80246f3d017bcf26a3c4c030f0d3\": rpc error: code = NotFound desc = could not find container \"0bdfec0ff1c9bebb412799536f10cf05e7ae80246f3d017bcf26a3c4c030f0d3\": container with ID starting with 0bdfec0ff1c9bebb412799536f10cf05e7ae80246f3d017bcf26a3c4c030f0d3 not found: ID does not exist" Oct 01 15:58:21 crc kubenswrapper[4869]: I1001 15:58:21.877470 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d3f3b270-09ac-462f-8647-95b88e3fd687-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "d3f3b270-09ac-462f-8647-95b88e3fd687" (UID: "d3f3b270-09ac-462f-8647-95b88e3fd687"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 15:58:21 crc kubenswrapper[4869]: I1001 15:58:21.897091 4869 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d3f3b270-09ac-462f-8647-95b88e3fd687-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 01 15:58:21 crc kubenswrapper[4869]: I1001 15:58:21.939844 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-bdz7q"] Oct 01 15:58:21 crc kubenswrapper[4869]: I1001 15:58:21.950128 4869 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-bdz7q"] Oct 01 15:58:23 crc kubenswrapper[4869]: I1001 15:58:23.597520 4869 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d3f3b270-09ac-462f-8647-95b88e3fd687" path="/var/lib/kubelet/pods/d3f3b270-09ac-462f-8647-95b88e3fd687/volumes" Oct 01 15:59:13 crc kubenswrapper[4869]: I1001 15:59:13.354425 4869 patch_prober.go:28] interesting pod/machine-config-daemon-c86m8 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 01 15:59:13 crc kubenswrapper[4869]: I1001 15:59:13.355252 4869 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-c86m8" podUID="a4b64f7f-0b03-4f47-965b-9fde048b735c" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 01 15:59:43 crc kubenswrapper[4869]: I1001 15:59:43.353852 4869 patch_prober.go:28] interesting pod/machine-config-daemon-c86m8 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 01 15:59:43 crc kubenswrapper[4869]: I1001 15:59:43.354465 4869 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-c86m8" podUID="a4b64f7f-0b03-4f47-965b-9fde048b735c" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 01 15:59:49 crc kubenswrapper[4869]: I1001 15:59:49.294654 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/tempest-tests-tempest-s00-full"] Oct 01 15:59:49 crc kubenswrapper[4869]: E1001 15:59:49.295674 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d3f3b270-09ac-462f-8647-95b88e3fd687" containerName="extract-content" Oct 01 15:59:49 crc kubenswrapper[4869]: I1001 15:59:49.295691 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="d3f3b270-09ac-462f-8647-95b88e3fd687" containerName="extract-content" Oct 01 15:59:49 crc kubenswrapper[4869]: E1001 15:59:49.295712 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d3f3b270-09ac-462f-8647-95b88e3fd687" containerName="registry-server" Oct 01 15:59:49 crc kubenswrapper[4869]: I1001 15:59:49.295720 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="d3f3b270-09ac-462f-8647-95b88e3fd687" containerName="registry-server" Oct 01 15:59:49 crc kubenswrapper[4869]: E1001 15:59:49.295750 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d3f3b270-09ac-462f-8647-95b88e3fd687" containerName="extract-utilities" Oct 01 15:59:49 crc kubenswrapper[4869]: I1001 15:59:49.295760 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="d3f3b270-09ac-462f-8647-95b88e3fd687" containerName="extract-utilities" Oct 01 15:59:49 crc kubenswrapper[4869]: I1001 15:59:49.295970 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="d3f3b270-09ac-462f-8647-95b88e3fd687" containerName="registry-server" Oct 01 15:59:49 crc kubenswrapper[4869]: I1001 15:59:49.296768 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/tempest-tests-tempest-s00-full" Oct 01 15:59:49 crc kubenswrapper[4869]: I1001 15:59:49.298893 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"test-operator-controller-priv-key" Oct 01 15:59:49 crc kubenswrapper[4869]: I1001 15:59:49.299411 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"default-dockercfg-rnrsn" Oct 01 15:59:49 crc kubenswrapper[4869]: I1001 15:59:49.300690 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"tempest-tests-tempest-custom-data-s0" Oct 01 15:59:49 crc kubenswrapper[4869]: I1001 15:59:49.300890 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"tempest-tests-tempest-env-vars-s0" Oct 01 15:59:49 crc kubenswrapper[4869]: I1001 15:59:49.312319 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/tempest-tests-tempest-s00-full"] Oct 01 15:59:49 crc kubenswrapper[4869]: I1001 15:59:49.320405 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/78543bf0-aa4b-45dc-a7c6-37a22a5be6ea-openstack-config-secret\") pod \"tempest-tests-tempest-s00-full\" (UID: \"78543bf0-aa4b-45dc-a7c6-37a22a5be6ea\") " pod="openstack/tempest-tests-tempest-s00-full" Oct 01 15:59:49 crc kubenswrapper[4869]: I1001 15:59:49.320477 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/78543bf0-aa4b-45dc-a7c6-37a22a5be6ea-config-data\") pod \"tempest-tests-tempest-s00-full\" (UID: \"78543bf0-aa4b-45dc-a7c6-37a22a5be6ea\") " pod="openstack/tempest-tests-tempest-s00-full" Oct 01 15:59:49 crc kubenswrapper[4869]: I1001 15:59:49.320740 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/78543bf0-aa4b-45dc-a7c6-37a22a5be6ea-openstack-config\") pod \"tempest-tests-tempest-s00-full\" (UID: \"78543bf0-aa4b-45dc-a7c6-37a22a5be6ea\") " pod="openstack/tempest-tests-tempest-s00-full" Oct 01 15:59:49 crc kubenswrapper[4869]: I1001 15:59:49.422500 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/78543bf0-aa4b-45dc-a7c6-37a22a5be6ea-ssh-key\") pod \"tempest-tests-tempest-s00-full\" (UID: \"78543bf0-aa4b-45dc-a7c6-37a22a5be6ea\") " pod="openstack/tempest-tests-tempest-s00-full" Oct 01 15:59:49 crc kubenswrapper[4869]: I1001 15:59:49.422586 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/78543bf0-aa4b-45dc-a7c6-37a22a5be6ea-openstack-config\") pod \"tempest-tests-tempest-s00-full\" (UID: \"78543bf0-aa4b-45dc-a7c6-37a22a5be6ea\") " pod="openstack/tempest-tests-tempest-s00-full" Oct 01 15:59:49 crc kubenswrapper[4869]: I1001 15:59:49.422665 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"tempest-tests-tempest-s00-full\" (UID: \"78543bf0-aa4b-45dc-a7c6-37a22a5be6ea\") " pod="openstack/tempest-tests-tempest-s00-full" Oct 01 15:59:49 crc kubenswrapper[4869]: I1001 15:59:49.422716 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/78543bf0-aa4b-45dc-a7c6-37a22a5be6ea-ceph\") pod \"tempest-tests-tempest-s00-full\" (UID: \"78543bf0-aa4b-45dc-a7c6-37a22a5be6ea\") " pod="openstack/tempest-tests-tempest-s00-full" Oct 01 15:59:49 crc kubenswrapper[4869]: I1001 15:59:49.422957 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ca-certs\" (UniqueName: \"kubernetes.io/secret/78543bf0-aa4b-45dc-a7c6-37a22a5be6ea-ca-certs\") pod \"tempest-tests-tempest-s00-full\" (UID: \"78543bf0-aa4b-45dc-a7c6-37a22a5be6ea\") " pod="openstack/tempest-tests-tempest-s00-full" Oct 01 15:59:49 crc kubenswrapper[4869]: I1001 15:59:49.423307 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/78543bf0-aa4b-45dc-a7c6-37a22a5be6ea-openstack-config-secret\") pod \"tempest-tests-tempest-s00-full\" (UID: \"78543bf0-aa4b-45dc-a7c6-37a22a5be6ea\") " pod="openstack/tempest-tests-tempest-s00-full" Oct 01 15:59:49 crc kubenswrapper[4869]: I1001 15:59:49.423405 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/78543bf0-aa4b-45dc-a7c6-37a22a5be6ea-config-data\") pod \"tempest-tests-tempest-s00-full\" (UID: \"78543bf0-aa4b-45dc-a7c6-37a22a5be6ea\") " pod="openstack/tempest-tests-tempest-s00-full" Oct 01 15:59:49 crc kubenswrapper[4869]: I1001 15:59:49.423471 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"test-operator-ephemeral-temporary\" (UniqueName: \"kubernetes.io/empty-dir/78543bf0-aa4b-45dc-a7c6-37a22a5be6ea-test-operator-ephemeral-temporary\") pod \"tempest-tests-tempest-s00-full\" (UID: \"78543bf0-aa4b-45dc-a7c6-37a22a5be6ea\") " pod="openstack/tempest-tests-tempest-s00-full" Oct 01 15:59:49 crc kubenswrapper[4869]: I1001 15:59:49.423664 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pmlps\" (UniqueName: \"kubernetes.io/projected/78543bf0-aa4b-45dc-a7c6-37a22a5be6ea-kube-api-access-pmlps\") pod \"tempest-tests-tempest-s00-full\" (UID: \"78543bf0-aa4b-45dc-a7c6-37a22a5be6ea\") " pod="openstack/tempest-tests-tempest-s00-full" Oct 01 15:59:49 crc kubenswrapper[4869]: I1001 15:59:49.423719 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"test-operator-ephemeral-workdir\" (UniqueName: \"kubernetes.io/empty-dir/78543bf0-aa4b-45dc-a7c6-37a22a5be6ea-test-operator-ephemeral-workdir\") pod \"tempest-tests-tempest-s00-full\" (UID: \"78543bf0-aa4b-45dc-a7c6-37a22a5be6ea\") " pod="openstack/tempest-tests-tempest-s00-full" Oct 01 15:59:49 crc kubenswrapper[4869]: I1001 15:59:49.424647 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/78543bf0-aa4b-45dc-a7c6-37a22a5be6ea-openstack-config\") pod \"tempest-tests-tempest-s00-full\" (UID: \"78543bf0-aa4b-45dc-a7c6-37a22a5be6ea\") " pod="openstack/tempest-tests-tempest-s00-full" Oct 01 15:59:49 crc kubenswrapper[4869]: I1001 15:59:49.425698 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/78543bf0-aa4b-45dc-a7c6-37a22a5be6ea-config-data\") pod \"tempest-tests-tempest-s00-full\" (UID: \"78543bf0-aa4b-45dc-a7c6-37a22a5be6ea\") " pod="openstack/tempest-tests-tempest-s00-full" Oct 01 15:59:49 crc kubenswrapper[4869]: I1001 15:59:49.438729 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/78543bf0-aa4b-45dc-a7c6-37a22a5be6ea-openstack-config-secret\") pod \"tempest-tests-tempest-s00-full\" (UID: \"78543bf0-aa4b-45dc-a7c6-37a22a5be6ea\") " pod="openstack/tempest-tests-tempest-s00-full" Oct 01 15:59:49 crc kubenswrapper[4869]: I1001 15:59:49.525844 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"test-operator-ephemeral-temporary\" (UniqueName: \"kubernetes.io/empty-dir/78543bf0-aa4b-45dc-a7c6-37a22a5be6ea-test-operator-ephemeral-temporary\") pod \"tempest-tests-tempest-s00-full\" (UID: \"78543bf0-aa4b-45dc-a7c6-37a22a5be6ea\") " pod="openstack/tempest-tests-tempest-s00-full" Oct 01 15:59:49 crc kubenswrapper[4869]: I1001 15:59:49.525999 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pmlps\" (UniqueName: \"kubernetes.io/projected/78543bf0-aa4b-45dc-a7c6-37a22a5be6ea-kube-api-access-pmlps\") pod \"tempest-tests-tempest-s00-full\" (UID: \"78543bf0-aa4b-45dc-a7c6-37a22a5be6ea\") " pod="openstack/tempest-tests-tempest-s00-full" Oct 01 15:59:49 crc kubenswrapper[4869]: I1001 15:59:49.526042 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"test-operator-ephemeral-workdir\" (UniqueName: \"kubernetes.io/empty-dir/78543bf0-aa4b-45dc-a7c6-37a22a5be6ea-test-operator-ephemeral-workdir\") pod \"tempest-tests-tempest-s00-full\" (UID: \"78543bf0-aa4b-45dc-a7c6-37a22a5be6ea\") " pod="openstack/tempest-tests-tempest-s00-full" Oct 01 15:59:49 crc kubenswrapper[4869]: I1001 15:59:49.526096 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/78543bf0-aa4b-45dc-a7c6-37a22a5be6ea-ssh-key\") pod \"tempest-tests-tempest-s00-full\" (UID: \"78543bf0-aa4b-45dc-a7c6-37a22a5be6ea\") " pod="openstack/tempest-tests-tempest-s00-full" Oct 01 15:59:49 crc kubenswrapper[4869]: I1001 15:59:49.526229 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"tempest-tests-tempest-s00-full\" (UID: \"78543bf0-aa4b-45dc-a7c6-37a22a5be6ea\") " pod="openstack/tempest-tests-tempest-s00-full" Oct 01 15:59:49 crc kubenswrapper[4869]: I1001 15:59:49.526621 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"test-operator-ephemeral-workdir\" (UniqueName: \"kubernetes.io/empty-dir/78543bf0-aa4b-45dc-a7c6-37a22a5be6ea-test-operator-ephemeral-workdir\") pod \"tempest-tests-tempest-s00-full\" (UID: \"78543bf0-aa4b-45dc-a7c6-37a22a5be6ea\") " pod="openstack/tempest-tests-tempest-s00-full" Oct 01 15:59:49 crc kubenswrapper[4869]: I1001 15:59:49.527072 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/78543bf0-aa4b-45dc-a7c6-37a22a5be6ea-ceph\") pod \"tempest-tests-tempest-s00-full\" (UID: \"78543bf0-aa4b-45dc-a7c6-37a22a5be6ea\") " pod="openstack/tempest-tests-tempest-s00-full" Oct 01 15:59:49 crc kubenswrapper[4869]: I1001 15:59:49.527157 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ca-certs\" (UniqueName: \"kubernetes.io/secret/78543bf0-aa4b-45dc-a7c6-37a22a5be6ea-ca-certs\") pod \"tempest-tests-tempest-s00-full\" (UID: \"78543bf0-aa4b-45dc-a7c6-37a22a5be6ea\") " pod="openstack/tempest-tests-tempest-s00-full" Oct 01 15:59:49 crc kubenswrapper[4869]: I1001 15:59:49.527558 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"test-operator-ephemeral-temporary\" (UniqueName: \"kubernetes.io/empty-dir/78543bf0-aa4b-45dc-a7c6-37a22a5be6ea-test-operator-ephemeral-temporary\") pod \"tempest-tests-tempest-s00-full\" (UID: \"78543bf0-aa4b-45dc-a7c6-37a22a5be6ea\") " pod="openstack/tempest-tests-tempest-s00-full" Oct 01 15:59:49 crc kubenswrapper[4869]: I1001 15:59:49.527579 4869 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"tempest-tests-tempest-s00-full\" (UID: \"78543bf0-aa4b-45dc-a7c6-37a22a5be6ea\") device mount path \"/mnt/openstack/pv08\"" pod="openstack/tempest-tests-tempest-s00-full" Oct 01 15:59:49 crc kubenswrapper[4869]: I1001 15:59:49.531743 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/78543bf0-aa4b-45dc-a7c6-37a22a5be6ea-ceph\") pod \"tempest-tests-tempest-s00-full\" (UID: \"78543bf0-aa4b-45dc-a7c6-37a22a5be6ea\") " pod="openstack/tempest-tests-tempest-s00-full" Oct 01 15:59:49 crc kubenswrapper[4869]: I1001 15:59:49.531963 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/78543bf0-aa4b-45dc-a7c6-37a22a5be6ea-ssh-key\") pod \"tempest-tests-tempest-s00-full\" (UID: \"78543bf0-aa4b-45dc-a7c6-37a22a5be6ea\") " pod="openstack/tempest-tests-tempest-s00-full" Oct 01 15:59:49 crc kubenswrapper[4869]: I1001 15:59:49.535193 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ca-certs\" (UniqueName: \"kubernetes.io/secret/78543bf0-aa4b-45dc-a7c6-37a22a5be6ea-ca-certs\") pod \"tempest-tests-tempest-s00-full\" (UID: \"78543bf0-aa4b-45dc-a7c6-37a22a5be6ea\") " pod="openstack/tempest-tests-tempest-s00-full" Oct 01 15:59:49 crc kubenswrapper[4869]: I1001 15:59:49.551608 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pmlps\" (UniqueName: \"kubernetes.io/projected/78543bf0-aa4b-45dc-a7c6-37a22a5be6ea-kube-api-access-pmlps\") pod \"tempest-tests-tempest-s00-full\" (UID: \"78543bf0-aa4b-45dc-a7c6-37a22a5be6ea\") " pod="openstack/tempest-tests-tempest-s00-full" Oct 01 15:59:49 crc kubenswrapper[4869]: I1001 15:59:49.570518 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"tempest-tests-tempest-s00-full\" (UID: \"78543bf0-aa4b-45dc-a7c6-37a22a5be6ea\") " pod="openstack/tempest-tests-tempest-s00-full" Oct 01 15:59:49 crc kubenswrapper[4869]: I1001 15:59:49.640034 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/tempest-tests-tempest-s00-full" Oct 01 15:59:50 crc kubenswrapper[4869]: I1001 15:59:50.280151 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/tempest-tests-tempest-s00-full"] Oct 01 15:59:50 crc kubenswrapper[4869]: I1001 15:59:50.293076 4869 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Oct 01 15:59:50 crc kubenswrapper[4869]: I1001 15:59:50.600717 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/tempest-tests-tempest-s00-full" event={"ID":"78543bf0-aa4b-45dc-a7c6-37a22a5be6ea","Type":"ContainerStarted","Data":"197b13cffaa032b4a9a053024f120c1da210f5edbf910d57274ab1206a907282"} Oct 01 16:00:00 crc kubenswrapper[4869]: I1001 16:00:00.156617 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29322240-s2nk4"] Oct 01 16:00:00 crc kubenswrapper[4869]: I1001 16:00:00.158691 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29322240-s2nk4" Oct 01 16:00:00 crc kubenswrapper[4869]: I1001 16:00:00.162733 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Oct 01 16:00:00 crc kubenswrapper[4869]: I1001 16:00:00.163951 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Oct 01 16:00:00 crc kubenswrapper[4869]: I1001 16:00:00.166029 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29322240-s2nk4"] Oct 01 16:00:00 crc kubenswrapper[4869]: I1001 16:00:00.265674 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/9aae4936-0f51-48a4-8298-7583d486c6ee-secret-volume\") pod \"collect-profiles-29322240-s2nk4\" (UID: \"9aae4936-0f51-48a4-8298-7583d486c6ee\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29322240-s2nk4" Oct 01 16:00:00 crc kubenswrapper[4869]: I1001 16:00:00.265803 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/9aae4936-0f51-48a4-8298-7583d486c6ee-config-volume\") pod \"collect-profiles-29322240-s2nk4\" (UID: \"9aae4936-0f51-48a4-8298-7583d486c6ee\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29322240-s2nk4" Oct 01 16:00:00 crc kubenswrapper[4869]: I1001 16:00:00.266426 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qmftk\" (UniqueName: \"kubernetes.io/projected/9aae4936-0f51-48a4-8298-7583d486c6ee-kube-api-access-qmftk\") pod \"collect-profiles-29322240-s2nk4\" (UID: \"9aae4936-0f51-48a4-8298-7583d486c6ee\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29322240-s2nk4" Oct 01 16:00:00 crc kubenswrapper[4869]: I1001 16:00:00.369201 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qmftk\" (UniqueName: \"kubernetes.io/projected/9aae4936-0f51-48a4-8298-7583d486c6ee-kube-api-access-qmftk\") pod \"collect-profiles-29322240-s2nk4\" (UID: \"9aae4936-0f51-48a4-8298-7583d486c6ee\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29322240-s2nk4" Oct 01 16:00:00 crc kubenswrapper[4869]: I1001 16:00:00.369995 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/9aae4936-0f51-48a4-8298-7583d486c6ee-secret-volume\") pod \"collect-profiles-29322240-s2nk4\" (UID: \"9aae4936-0f51-48a4-8298-7583d486c6ee\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29322240-s2nk4" Oct 01 16:00:00 crc kubenswrapper[4869]: I1001 16:00:00.370078 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/9aae4936-0f51-48a4-8298-7583d486c6ee-config-volume\") pod \"collect-profiles-29322240-s2nk4\" (UID: \"9aae4936-0f51-48a4-8298-7583d486c6ee\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29322240-s2nk4" Oct 01 16:00:00 crc kubenswrapper[4869]: I1001 16:00:00.371735 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/9aae4936-0f51-48a4-8298-7583d486c6ee-config-volume\") pod \"collect-profiles-29322240-s2nk4\" (UID: \"9aae4936-0f51-48a4-8298-7583d486c6ee\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29322240-s2nk4" Oct 01 16:00:00 crc kubenswrapper[4869]: I1001 16:00:00.381553 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/9aae4936-0f51-48a4-8298-7583d486c6ee-secret-volume\") pod \"collect-profiles-29322240-s2nk4\" (UID: \"9aae4936-0f51-48a4-8298-7583d486c6ee\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29322240-s2nk4" Oct 01 16:00:00 crc kubenswrapper[4869]: I1001 16:00:00.403918 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qmftk\" (UniqueName: \"kubernetes.io/projected/9aae4936-0f51-48a4-8298-7583d486c6ee-kube-api-access-qmftk\") pod \"collect-profiles-29322240-s2nk4\" (UID: \"9aae4936-0f51-48a4-8298-7583d486c6ee\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29322240-s2nk4" Oct 01 16:00:00 crc kubenswrapper[4869]: I1001 16:00:00.485010 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29322240-s2nk4" Oct 01 16:00:13 crc kubenswrapper[4869]: I1001 16:00:13.354096 4869 patch_prober.go:28] interesting pod/machine-config-daemon-c86m8 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 01 16:00:13 crc kubenswrapper[4869]: I1001 16:00:13.354954 4869 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-c86m8" podUID="a4b64f7f-0b03-4f47-965b-9fde048b735c" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 01 16:00:13 crc kubenswrapper[4869]: I1001 16:00:13.355028 4869 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-c86m8" Oct 01 16:00:13 crc kubenswrapper[4869]: I1001 16:00:13.356252 4869 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"73a9cb1c6d1e9e84d375990bcfc5854c30e29631055280b1b2dc0f98545eaf97"} pod="openshift-machine-config-operator/machine-config-daemon-c86m8" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Oct 01 16:00:13 crc kubenswrapper[4869]: I1001 16:00:13.356399 4869 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-c86m8" podUID="a4b64f7f-0b03-4f47-965b-9fde048b735c" containerName="machine-config-daemon" containerID="cri-o://73a9cb1c6d1e9e84d375990bcfc5854c30e29631055280b1b2dc0f98545eaf97" gracePeriod=600 Oct 01 16:00:13 crc kubenswrapper[4869]: I1001 16:00:13.828753 4869 generic.go:334] "Generic (PLEG): container finished" podID="a4b64f7f-0b03-4f47-965b-9fde048b735c" containerID="73a9cb1c6d1e9e84d375990bcfc5854c30e29631055280b1b2dc0f98545eaf97" exitCode=0 Oct 01 16:00:13 crc kubenswrapper[4869]: I1001 16:00:13.828815 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-c86m8" event={"ID":"a4b64f7f-0b03-4f47-965b-9fde048b735c","Type":"ContainerDied","Data":"73a9cb1c6d1e9e84d375990bcfc5854c30e29631055280b1b2dc0f98545eaf97"} Oct 01 16:00:13 crc kubenswrapper[4869]: I1001 16:00:13.828904 4869 scope.go:117] "RemoveContainer" containerID="ea87da209cc47118cc41bdb3107264d5dd2e07bc832e620553f0ac1be9456693" Oct 01 16:00:31 crc kubenswrapper[4869]: E1001 16:00:31.841490 4869 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-tempest-all:current-podified" Oct 01 16:00:31 crc kubenswrapper[4869]: E1001 16:00:31.842371 4869 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:tempest-tests-tempest-tests-runner,Image:quay.io/podified-antelope-centos9/openstack-tempest-all:current-podified,Command:[],Args:[],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:test-operator-ephemeral-workdir,ReadOnly:false,MountPath:/var/lib/tempest,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:test-operator-ephemeral-temporary,ReadOnly:false,MountPath:/tmp,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:false,MountPath:/etc/test_operator,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:test-operator-logs,ReadOnly:false,MountPath:/var/lib/tempest/external_files,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:openstack-config,ReadOnly:true,MountPath:/etc/openstack/clouds.yaml,SubPath:clouds.yaml,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:openstack-config,ReadOnly:true,MountPath:/var/lib/tempest/.config/openstack/clouds.yaml,SubPath:clouds.yaml,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:openstack-config-secret,ReadOnly:false,MountPath:/etc/openstack/secure.yaml,SubPath:secure.yaml,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:ca-certs,ReadOnly:true,MountPath:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem,SubPath:tls-ca-bundle.pem,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:ssh-key,ReadOnly:false,MountPath:/var/lib/tempest/id_ecdsa,SubPath:ssh_key,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:ceph,ReadOnly:true,MountPath:/etc/ceph,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-pmlps,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*42480,RunAsNonRoot:*false,ReadOnlyRootFilesystem:*false,AllowPrivilegeEscalation:*true,RunAsGroup:*42480,ProcMount:nil,WindowsOptions:nil,SeccompProfile:&SeccompProfile{Type:RuntimeDefault,LocalhostProfile:nil,},AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{EnvFromSource{Prefix:,ConfigMapRef:&ConfigMapEnvSource{LocalObjectReference:LocalObjectReference{Name:tempest-tests-tempest-custom-data-s0,},Optional:nil,},SecretRef:nil,},EnvFromSource{Prefix:,ConfigMapRef:&ConfigMapEnvSource{LocalObjectReference:LocalObjectReference{Name:tempest-tests-tempest-env-vars-s0,},Optional:nil,},SecretRef:nil,},},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod tempest-tests-tempest-s00-full_openstack(78543bf0-aa4b-45dc-a7c6-37a22a5be6ea): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Oct 01 16:00:31 crc kubenswrapper[4869]: E1001 16:00:31.843758 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"tempest-tests-tempest-tests-runner\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/tempest-tests-tempest-s00-full" podUID="78543bf0-aa4b-45dc-a7c6-37a22a5be6ea" Oct 01 16:00:32 crc kubenswrapper[4869]: E1001 16:00:32.076343 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"tempest-tests-tempest-tests-runner\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-tempest-all:current-podified\\\"\"" pod="openstack/tempest-tests-tempest-s00-full" podUID="78543bf0-aa4b-45dc-a7c6-37a22a5be6ea" Oct 01 16:00:32 crc kubenswrapper[4869]: I1001 16:00:32.257479 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29322240-s2nk4"] Oct 01 16:00:32 crc kubenswrapper[4869]: W1001 16:00:32.258739 4869 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod9aae4936_0f51_48a4_8298_7583d486c6ee.slice/crio-d38517fc15208c7d2ef0b5bd5959a93ca2f75dcdffbca25d69e5904efeb6d127 WatchSource:0}: Error finding container d38517fc15208c7d2ef0b5bd5959a93ca2f75dcdffbca25d69e5904efeb6d127: Status 404 returned error can't find the container with id d38517fc15208c7d2ef0b5bd5959a93ca2f75dcdffbca25d69e5904efeb6d127 Oct 01 16:00:33 crc kubenswrapper[4869]: I1001 16:00:33.087738 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-c86m8" event={"ID":"a4b64f7f-0b03-4f47-965b-9fde048b735c","Type":"ContainerStarted","Data":"79c9485f04215c7c3b6f12e2efc6b087d5369f6ae2f17fb6c51f154c13b2407a"} Oct 01 16:00:33 crc kubenswrapper[4869]: I1001 16:00:33.092531 4869 generic.go:334] "Generic (PLEG): container finished" podID="9aae4936-0f51-48a4-8298-7583d486c6ee" containerID="cc1a2821d018f6fee189dff271dc1ed21ec61a9f8f3804fd2d83b5f1f246145f" exitCode=0 Oct 01 16:00:33 crc kubenswrapper[4869]: I1001 16:00:33.092597 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29322240-s2nk4" event={"ID":"9aae4936-0f51-48a4-8298-7583d486c6ee","Type":"ContainerDied","Data":"cc1a2821d018f6fee189dff271dc1ed21ec61a9f8f3804fd2d83b5f1f246145f"} Oct 01 16:00:33 crc kubenswrapper[4869]: I1001 16:00:33.092879 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29322240-s2nk4" event={"ID":"9aae4936-0f51-48a4-8298-7583d486c6ee","Type":"ContainerStarted","Data":"d38517fc15208c7d2ef0b5bd5959a93ca2f75dcdffbca25d69e5904efeb6d127"} Oct 01 16:00:34 crc kubenswrapper[4869]: I1001 16:00:34.481721 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29322240-s2nk4" Oct 01 16:00:34 crc kubenswrapper[4869]: I1001 16:00:34.570716 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qmftk\" (UniqueName: \"kubernetes.io/projected/9aae4936-0f51-48a4-8298-7583d486c6ee-kube-api-access-qmftk\") pod \"9aae4936-0f51-48a4-8298-7583d486c6ee\" (UID: \"9aae4936-0f51-48a4-8298-7583d486c6ee\") " Oct 01 16:00:34 crc kubenswrapper[4869]: I1001 16:00:34.571329 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/9aae4936-0f51-48a4-8298-7583d486c6ee-config-volume\") pod \"9aae4936-0f51-48a4-8298-7583d486c6ee\" (UID: \"9aae4936-0f51-48a4-8298-7583d486c6ee\") " Oct 01 16:00:34 crc kubenswrapper[4869]: I1001 16:00:34.571732 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/9aae4936-0f51-48a4-8298-7583d486c6ee-secret-volume\") pod \"9aae4936-0f51-48a4-8298-7583d486c6ee\" (UID: \"9aae4936-0f51-48a4-8298-7583d486c6ee\") " Oct 01 16:00:34 crc kubenswrapper[4869]: I1001 16:00:34.572227 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9aae4936-0f51-48a4-8298-7583d486c6ee-config-volume" (OuterVolumeSpecName: "config-volume") pod "9aae4936-0f51-48a4-8298-7583d486c6ee" (UID: "9aae4936-0f51-48a4-8298-7583d486c6ee"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 16:00:34 crc kubenswrapper[4869]: I1001 16:00:34.577694 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9aae4936-0f51-48a4-8298-7583d486c6ee-kube-api-access-qmftk" (OuterVolumeSpecName: "kube-api-access-qmftk") pod "9aae4936-0f51-48a4-8298-7583d486c6ee" (UID: "9aae4936-0f51-48a4-8298-7583d486c6ee"). InnerVolumeSpecName "kube-api-access-qmftk". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 16:00:34 crc kubenswrapper[4869]: I1001 16:00:34.578461 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9aae4936-0f51-48a4-8298-7583d486c6ee-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "9aae4936-0f51-48a4-8298-7583d486c6ee" (UID: "9aae4936-0f51-48a4-8298-7583d486c6ee"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 16:00:34 crc kubenswrapper[4869]: I1001 16:00:34.674145 4869 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/9aae4936-0f51-48a4-8298-7583d486c6ee-config-volume\") on node \"crc\" DevicePath \"\"" Oct 01 16:00:34 crc kubenswrapper[4869]: I1001 16:00:34.674188 4869 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/9aae4936-0f51-48a4-8298-7583d486c6ee-secret-volume\") on node \"crc\" DevicePath \"\"" Oct 01 16:00:34 crc kubenswrapper[4869]: I1001 16:00:34.674201 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qmftk\" (UniqueName: \"kubernetes.io/projected/9aae4936-0f51-48a4-8298-7583d486c6ee-kube-api-access-qmftk\") on node \"crc\" DevicePath \"\"" Oct 01 16:00:35 crc kubenswrapper[4869]: I1001 16:00:35.114386 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29322240-s2nk4" event={"ID":"9aae4936-0f51-48a4-8298-7583d486c6ee","Type":"ContainerDied","Data":"d38517fc15208c7d2ef0b5bd5959a93ca2f75dcdffbca25d69e5904efeb6d127"} Oct 01 16:00:35 crc kubenswrapper[4869]: I1001 16:00:35.114443 4869 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="d38517fc15208c7d2ef0b5bd5959a93ca2f75dcdffbca25d69e5904efeb6d127" Oct 01 16:00:35 crc kubenswrapper[4869]: I1001 16:00:35.114443 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29322240-s2nk4" Oct 01 16:00:35 crc kubenswrapper[4869]: I1001 16:00:35.569300 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29322195-8k696"] Oct 01 16:00:35 crc kubenswrapper[4869]: I1001 16:00:35.578688 4869 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29322195-8k696"] Oct 01 16:00:35 crc kubenswrapper[4869]: I1001 16:00:35.598058 4869 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7b44ff70-e201-4423-bf46-ec7066786edc" path="/var/lib/kubelet/pods/7b44ff70-e201-4423-bf46-ec7066786edc/volumes" Oct 01 16:00:47 crc kubenswrapper[4869]: I1001 16:00:47.882554 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"tempest-tests-tempest-env-vars-s0" Oct 01 16:00:49 crc kubenswrapper[4869]: I1001 16:00:49.268519 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/tempest-tests-tempest-s00-full" event={"ID":"78543bf0-aa4b-45dc-a7c6-37a22a5be6ea","Type":"ContainerStarted","Data":"13f0c91a6a4b872edb42923b131243ff49ea806efc1b2cdc8e35bb8a45f85322"} Oct 01 16:00:49 crc kubenswrapper[4869]: I1001 16:00:49.292119 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/tempest-tests-tempest-s00-full" podStartSLOduration=3.708342891 podStartE2EDuration="1m1.292094615s" podCreationTimestamp="2025-10-01 15:59:48 +0000 UTC" firstStartedPulling="2025-10-01 15:59:50.29265733 +0000 UTC m=+3299.439500456" lastFinishedPulling="2025-10-01 16:00:47.876409034 +0000 UTC m=+3357.023252180" observedRunningTime="2025-10-01 16:00:49.288144676 +0000 UTC m=+3358.434987792" watchObservedRunningTime="2025-10-01 16:00:49.292094615 +0000 UTC m=+3358.438937771" Oct 01 16:01:00 crc kubenswrapper[4869]: I1001 16:01:00.156374 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-cron-29322241-7648v"] Oct 01 16:01:00 crc kubenswrapper[4869]: E1001 16:01:00.157254 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9aae4936-0f51-48a4-8298-7583d486c6ee" containerName="collect-profiles" Oct 01 16:01:00 crc kubenswrapper[4869]: I1001 16:01:00.157290 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="9aae4936-0f51-48a4-8298-7583d486c6ee" containerName="collect-profiles" Oct 01 16:01:00 crc kubenswrapper[4869]: I1001 16:01:00.157534 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="9aae4936-0f51-48a4-8298-7583d486c6ee" containerName="collect-profiles" Oct 01 16:01:00 crc kubenswrapper[4869]: I1001 16:01:00.158355 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-cron-29322241-7648v" Oct 01 16:01:00 crc kubenswrapper[4869]: I1001 16:01:00.169032 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-cron-29322241-7648v"] Oct 01 16:01:00 crc kubenswrapper[4869]: I1001 16:01:00.276357 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/d67f52c3-ce6c-486a-8e11-91d25894472f-fernet-keys\") pod \"keystone-cron-29322241-7648v\" (UID: \"d67f52c3-ce6c-486a-8e11-91d25894472f\") " pod="openstack/keystone-cron-29322241-7648v" Oct 01 16:01:00 crc kubenswrapper[4869]: I1001 16:01:00.276538 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-px8bn\" (UniqueName: \"kubernetes.io/projected/d67f52c3-ce6c-486a-8e11-91d25894472f-kube-api-access-px8bn\") pod \"keystone-cron-29322241-7648v\" (UID: \"d67f52c3-ce6c-486a-8e11-91d25894472f\") " pod="openstack/keystone-cron-29322241-7648v" Oct 01 16:01:00 crc kubenswrapper[4869]: I1001 16:01:00.276715 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d67f52c3-ce6c-486a-8e11-91d25894472f-config-data\") pod \"keystone-cron-29322241-7648v\" (UID: \"d67f52c3-ce6c-486a-8e11-91d25894472f\") " pod="openstack/keystone-cron-29322241-7648v" Oct 01 16:01:00 crc kubenswrapper[4869]: I1001 16:01:00.276829 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d67f52c3-ce6c-486a-8e11-91d25894472f-combined-ca-bundle\") pod \"keystone-cron-29322241-7648v\" (UID: \"d67f52c3-ce6c-486a-8e11-91d25894472f\") " pod="openstack/keystone-cron-29322241-7648v" Oct 01 16:01:00 crc kubenswrapper[4869]: I1001 16:01:00.379285 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d67f52c3-ce6c-486a-8e11-91d25894472f-config-data\") pod \"keystone-cron-29322241-7648v\" (UID: \"d67f52c3-ce6c-486a-8e11-91d25894472f\") " pod="openstack/keystone-cron-29322241-7648v" Oct 01 16:01:00 crc kubenswrapper[4869]: I1001 16:01:00.379368 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d67f52c3-ce6c-486a-8e11-91d25894472f-combined-ca-bundle\") pod \"keystone-cron-29322241-7648v\" (UID: \"d67f52c3-ce6c-486a-8e11-91d25894472f\") " pod="openstack/keystone-cron-29322241-7648v" Oct 01 16:01:00 crc kubenswrapper[4869]: I1001 16:01:00.379644 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/d67f52c3-ce6c-486a-8e11-91d25894472f-fernet-keys\") pod \"keystone-cron-29322241-7648v\" (UID: \"d67f52c3-ce6c-486a-8e11-91d25894472f\") " pod="openstack/keystone-cron-29322241-7648v" Oct 01 16:01:00 crc kubenswrapper[4869]: I1001 16:01:00.380562 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-px8bn\" (UniqueName: \"kubernetes.io/projected/d67f52c3-ce6c-486a-8e11-91d25894472f-kube-api-access-px8bn\") pod \"keystone-cron-29322241-7648v\" (UID: \"d67f52c3-ce6c-486a-8e11-91d25894472f\") " pod="openstack/keystone-cron-29322241-7648v" Oct 01 16:01:00 crc kubenswrapper[4869]: I1001 16:01:00.387115 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d67f52c3-ce6c-486a-8e11-91d25894472f-config-data\") pod \"keystone-cron-29322241-7648v\" (UID: \"d67f52c3-ce6c-486a-8e11-91d25894472f\") " pod="openstack/keystone-cron-29322241-7648v" Oct 01 16:01:00 crc kubenswrapper[4869]: I1001 16:01:00.387289 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d67f52c3-ce6c-486a-8e11-91d25894472f-combined-ca-bundle\") pod \"keystone-cron-29322241-7648v\" (UID: \"d67f52c3-ce6c-486a-8e11-91d25894472f\") " pod="openstack/keystone-cron-29322241-7648v" Oct 01 16:01:00 crc kubenswrapper[4869]: I1001 16:01:00.392066 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/d67f52c3-ce6c-486a-8e11-91d25894472f-fernet-keys\") pod \"keystone-cron-29322241-7648v\" (UID: \"d67f52c3-ce6c-486a-8e11-91d25894472f\") " pod="openstack/keystone-cron-29322241-7648v" Oct 01 16:01:00 crc kubenswrapper[4869]: I1001 16:01:00.406704 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-px8bn\" (UniqueName: \"kubernetes.io/projected/d67f52c3-ce6c-486a-8e11-91d25894472f-kube-api-access-px8bn\") pod \"keystone-cron-29322241-7648v\" (UID: \"d67f52c3-ce6c-486a-8e11-91d25894472f\") " pod="openstack/keystone-cron-29322241-7648v" Oct 01 16:01:00 crc kubenswrapper[4869]: I1001 16:01:00.486284 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-cron-29322241-7648v" Oct 01 16:01:00 crc kubenswrapper[4869]: I1001 16:01:00.781132 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-cron-29322241-7648v"] Oct 01 16:01:01 crc kubenswrapper[4869]: I1001 16:01:01.399233 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-cron-29322241-7648v" event={"ID":"d67f52c3-ce6c-486a-8e11-91d25894472f","Type":"ContainerStarted","Data":"70c4b45cfdbd5ddaba4f949d1aa10d83f013183dfa3c50879787e1820d331266"} Oct 01 16:01:01 crc kubenswrapper[4869]: I1001 16:01:01.399584 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-cron-29322241-7648v" event={"ID":"d67f52c3-ce6c-486a-8e11-91d25894472f","Type":"ContainerStarted","Data":"6971068f86dbc8f005aa5aac89afc5cc709471a1827db0ee2a398fbc954e227b"} Oct 01 16:01:01 crc kubenswrapper[4869]: I1001 16:01:01.436681 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-cron-29322241-7648v" podStartSLOduration=1.436650277 podStartE2EDuration="1.436650277s" podCreationTimestamp="2025-10-01 16:01:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-01 16:01:01.426408019 +0000 UTC m=+3370.573251155" watchObservedRunningTime="2025-10-01 16:01:01.436650277 +0000 UTC m=+3370.583493423" Oct 01 16:01:03 crc kubenswrapper[4869]: I1001 16:01:03.425516 4869 generic.go:334] "Generic (PLEG): container finished" podID="d67f52c3-ce6c-486a-8e11-91d25894472f" containerID="70c4b45cfdbd5ddaba4f949d1aa10d83f013183dfa3c50879787e1820d331266" exitCode=0 Oct 01 16:01:03 crc kubenswrapper[4869]: I1001 16:01:03.425601 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-cron-29322241-7648v" event={"ID":"d67f52c3-ce6c-486a-8e11-91d25894472f","Type":"ContainerDied","Data":"70c4b45cfdbd5ddaba4f949d1aa10d83f013183dfa3c50879787e1820d331266"} Oct 01 16:01:04 crc kubenswrapper[4869]: I1001 16:01:04.814662 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-cron-29322241-7648v" Oct 01 16:01:04 crc kubenswrapper[4869]: I1001 16:01:04.892449 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-px8bn\" (UniqueName: \"kubernetes.io/projected/d67f52c3-ce6c-486a-8e11-91d25894472f-kube-api-access-px8bn\") pod \"d67f52c3-ce6c-486a-8e11-91d25894472f\" (UID: \"d67f52c3-ce6c-486a-8e11-91d25894472f\") " Oct 01 16:01:04 crc kubenswrapper[4869]: I1001 16:01:04.892608 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d67f52c3-ce6c-486a-8e11-91d25894472f-config-data\") pod \"d67f52c3-ce6c-486a-8e11-91d25894472f\" (UID: \"d67f52c3-ce6c-486a-8e11-91d25894472f\") " Oct 01 16:01:04 crc kubenswrapper[4869]: I1001 16:01:04.892667 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/d67f52c3-ce6c-486a-8e11-91d25894472f-fernet-keys\") pod \"d67f52c3-ce6c-486a-8e11-91d25894472f\" (UID: \"d67f52c3-ce6c-486a-8e11-91d25894472f\") " Oct 01 16:01:04 crc kubenswrapper[4869]: I1001 16:01:04.892723 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d67f52c3-ce6c-486a-8e11-91d25894472f-combined-ca-bundle\") pod \"d67f52c3-ce6c-486a-8e11-91d25894472f\" (UID: \"d67f52c3-ce6c-486a-8e11-91d25894472f\") " Oct 01 16:01:04 crc kubenswrapper[4869]: I1001 16:01:04.900335 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d67f52c3-ce6c-486a-8e11-91d25894472f-fernet-keys" (OuterVolumeSpecName: "fernet-keys") pod "d67f52c3-ce6c-486a-8e11-91d25894472f" (UID: "d67f52c3-ce6c-486a-8e11-91d25894472f"). InnerVolumeSpecName "fernet-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 16:01:04 crc kubenswrapper[4869]: I1001 16:01:04.902559 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d67f52c3-ce6c-486a-8e11-91d25894472f-kube-api-access-px8bn" (OuterVolumeSpecName: "kube-api-access-px8bn") pod "d67f52c3-ce6c-486a-8e11-91d25894472f" (UID: "d67f52c3-ce6c-486a-8e11-91d25894472f"). InnerVolumeSpecName "kube-api-access-px8bn". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 16:01:04 crc kubenswrapper[4869]: I1001 16:01:04.950991 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d67f52c3-ce6c-486a-8e11-91d25894472f-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "d67f52c3-ce6c-486a-8e11-91d25894472f" (UID: "d67f52c3-ce6c-486a-8e11-91d25894472f"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 16:01:04 crc kubenswrapper[4869]: I1001 16:01:04.985958 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d67f52c3-ce6c-486a-8e11-91d25894472f-config-data" (OuterVolumeSpecName: "config-data") pod "d67f52c3-ce6c-486a-8e11-91d25894472f" (UID: "d67f52c3-ce6c-486a-8e11-91d25894472f"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 16:01:04 crc kubenswrapper[4869]: I1001 16:01:04.996891 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-px8bn\" (UniqueName: \"kubernetes.io/projected/d67f52c3-ce6c-486a-8e11-91d25894472f-kube-api-access-px8bn\") on node \"crc\" DevicePath \"\"" Oct 01 16:01:04 crc kubenswrapper[4869]: I1001 16:01:04.997977 4869 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d67f52c3-ce6c-486a-8e11-91d25894472f-config-data\") on node \"crc\" DevicePath \"\"" Oct 01 16:01:04 crc kubenswrapper[4869]: I1001 16:01:04.997999 4869 reconciler_common.go:293] "Volume detached for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/d67f52c3-ce6c-486a-8e11-91d25894472f-fernet-keys\") on node \"crc\" DevicePath \"\"" Oct 01 16:01:04 crc kubenswrapper[4869]: I1001 16:01:04.998019 4869 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d67f52c3-ce6c-486a-8e11-91d25894472f-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 01 16:01:05 crc kubenswrapper[4869]: I1001 16:01:05.450692 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-cron-29322241-7648v" event={"ID":"d67f52c3-ce6c-486a-8e11-91d25894472f","Type":"ContainerDied","Data":"6971068f86dbc8f005aa5aac89afc5cc709471a1827db0ee2a398fbc954e227b"} Oct 01 16:01:05 crc kubenswrapper[4869]: I1001 16:01:05.450953 4869 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="6971068f86dbc8f005aa5aac89afc5cc709471a1827db0ee2a398fbc954e227b" Oct 01 16:01:05 crc kubenswrapper[4869]: I1001 16:01:05.450778 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-cron-29322241-7648v" Oct 01 16:01:15 crc kubenswrapper[4869]: I1001 16:01:15.861343 4869 scope.go:117] "RemoveContainer" containerID="09144d984105f0751c9baf4d4ac22b51fcf226e93d477c26c4e3cc76614119d8" Oct 01 16:02:15 crc kubenswrapper[4869]: I1001 16:02:15.952747 4869 scope.go:117] "RemoveContainer" containerID="f111438f80d0f5055529c6244880ac9ee8a04edffa8c2d0c1641a89a1b5e6d53" Oct 01 16:02:16 crc kubenswrapper[4869]: I1001 16:02:16.052505 4869 scope.go:117] "RemoveContainer" containerID="605a92c850e67049b917baa78be2cf05b4219bf2115ce553d543d6d7172ff3c3" Oct 01 16:02:43 crc kubenswrapper[4869]: I1001 16:02:43.354822 4869 patch_prober.go:28] interesting pod/machine-config-daemon-c86m8 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 01 16:02:43 crc kubenswrapper[4869]: I1001 16:02:43.355371 4869 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-c86m8" podUID="a4b64f7f-0b03-4f47-965b-9fde048b735c" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 01 16:03:13 crc kubenswrapper[4869]: I1001 16:03:13.354614 4869 patch_prober.go:28] interesting pod/machine-config-daemon-c86m8 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 01 16:03:13 crc kubenswrapper[4869]: I1001 16:03:13.355332 4869 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-c86m8" podUID="a4b64f7f-0b03-4f47-965b-9fde048b735c" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 01 16:03:43 crc kubenswrapper[4869]: I1001 16:03:43.354126 4869 patch_prober.go:28] interesting pod/machine-config-daemon-c86m8 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 01 16:03:43 crc kubenswrapper[4869]: I1001 16:03:43.354909 4869 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-c86m8" podUID="a4b64f7f-0b03-4f47-965b-9fde048b735c" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 01 16:03:43 crc kubenswrapper[4869]: I1001 16:03:43.354960 4869 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-c86m8" Oct 01 16:03:43 crc kubenswrapper[4869]: I1001 16:03:43.355862 4869 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"79c9485f04215c7c3b6f12e2efc6b087d5369f6ae2f17fb6c51f154c13b2407a"} pod="openshift-machine-config-operator/machine-config-daemon-c86m8" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Oct 01 16:03:43 crc kubenswrapper[4869]: I1001 16:03:43.355929 4869 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-c86m8" podUID="a4b64f7f-0b03-4f47-965b-9fde048b735c" containerName="machine-config-daemon" containerID="cri-o://79c9485f04215c7c3b6f12e2efc6b087d5369f6ae2f17fb6c51f154c13b2407a" gracePeriod=600 Oct 01 16:03:43 crc kubenswrapper[4869]: E1001 16:03:43.488049 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-c86m8_openshift-machine-config-operator(a4b64f7f-0b03-4f47-965b-9fde048b735c)\"" pod="openshift-machine-config-operator/machine-config-daemon-c86m8" podUID="a4b64f7f-0b03-4f47-965b-9fde048b735c" Oct 01 16:03:44 crc kubenswrapper[4869]: I1001 16:03:44.137214 4869 generic.go:334] "Generic (PLEG): container finished" podID="a4b64f7f-0b03-4f47-965b-9fde048b735c" containerID="79c9485f04215c7c3b6f12e2efc6b087d5369f6ae2f17fb6c51f154c13b2407a" exitCode=0 Oct 01 16:03:44 crc kubenswrapper[4869]: I1001 16:03:44.137279 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-c86m8" event={"ID":"a4b64f7f-0b03-4f47-965b-9fde048b735c","Type":"ContainerDied","Data":"79c9485f04215c7c3b6f12e2efc6b087d5369f6ae2f17fb6c51f154c13b2407a"} Oct 01 16:03:44 crc kubenswrapper[4869]: I1001 16:03:44.137319 4869 scope.go:117] "RemoveContainer" containerID="73a9cb1c6d1e9e84d375990bcfc5854c30e29631055280b1b2dc0f98545eaf97" Oct 01 16:03:44 crc kubenswrapper[4869]: I1001 16:03:44.138251 4869 scope.go:117] "RemoveContainer" containerID="79c9485f04215c7c3b6f12e2efc6b087d5369f6ae2f17fb6c51f154c13b2407a" Oct 01 16:03:44 crc kubenswrapper[4869]: E1001 16:03:44.138685 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-c86m8_openshift-machine-config-operator(a4b64f7f-0b03-4f47-965b-9fde048b735c)\"" pod="openshift-machine-config-operator/machine-config-daemon-c86m8" podUID="a4b64f7f-0b03-4f47-965b-9fde048b735c" Oct 01 16:03:58 crc kubenswrapper[4869]: I1001 16:03:58.581906 4869 scope.go:117] "RemoveContainer" containerID="79c9485f04215c7c3b6f12e2efc6b087d5369f6ae2f17fb6c51f154c13b2407a" Oct 01 16:03:58 crc kubenswrapper[4869]: E1001 16:03:58.583087 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-c86m8_openshift-machine-config-operator(a4b64f7f-0b03-4f47-965b-9fde048b735c)\"" pod="openshift-machine-config-operator/machine-config-daemon-c86m8" podUID="a4b64f7f-0b03-4f47-965b-9fde048b735c" Oct 01 16:04:10 crc kubenswrapper[4869]: I1001 16:04:10.582406 4869 scope.go:117] "RemoveContainer" containerID="79c9485f04215c7c3b6f12e2efc6b087d5369f6ae2f17fb6c51f154c13b2407a" Oct 01 16:04:10 crc kubenswrapper[4869]: E1001 16:04:10.582960 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-c86m8_openshift-machine-config-operator(a4b64f7f-0b03-4f47-965b-9fde048b735c)\"" pod="openshift-machine-config-operator/machine-config-daemon-c86m8" podUID="a4b64f7f-0b03-4f47-965b-9fde048b735c" Oct 01 16:04:25 crc kubenswrapper[4869]: I1001 16:04:25.582005 4869 scope.go:117] "RemoveContainer" containerID="79c9485f04215c7c3b6f12e2efc6b087d5369f6ae2f17fb6c51f154c13b2407a" Oct 01 16:04:25 crc kubenswrapper[4869]: E1001 16:04:25.584795 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-c86m8_openshift-machine-config-operator(a4b64f7f-0b03-4f47-965b-9fde048b735c)\"" pod="openshift-machine-config-operator/machine-config-daemon-c86m8" podUID="a4b64f7f-0b03-4f47-965b-9fde048b735c" Oct 01 16:04:36 crc kubenswrapper[4869]: I1001 16:04:36.581201 4869 scope.go:117] "RemoveContainer" containerID="79c9485f04215c7c3b6f12e2efc6b087d5369f6ae2f17fb6c51f154c13b2407a" Oct 01 16:04:36 crc kubenswrapper[4869]: E1001 16:04:36.582182 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-c86m8_openshift-machine-config-operator(a4b64f7f-0b03-4f47-965b-9fde048b735c)\"" pod="openshift-machine-config-operator/machine-config-daemon-c86m8" podUID="a4b64f7f-0b03-4f47-965b-9fde048b735c" Oct 01 16:04:44 crc kubenswrapper[4869]: I1001 16:04:44.047113 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/manila-db-create-4vqnt"] Oct 01 16:04:44 crc kubenswrapper[4869]: I1001 16:04:44.057896 4869 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/manila-db-create-4vqnt"] Oct 01 16:04:45 crc kubenswrapper[4869]: I1001 16:04:45.603696 4869 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c44eaba4-c497-45e1-9f35-bb8b579b70fd" path="/var/lib/kubelet/pods/c44eaba4-c497-45e1-9f35-bb8b579b70fd/volumes" Oct 01 16:04:50 crc kubenswrapper[4869]: I1001 16:04:50.580899 4869 scope.go:117] "RemoveContainer" containerID="79c9485f04215c7c3b6f12e2efc6b087d5369f6ae2f17fb6c51f154c13b2407a" Oct 01 16:04:50 crc kubenswrapper[4869]: E1001 16:04:50.582001 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-c86m8_openshift-machine-config-operator(a4b64f7f-0b03-4f47-965b-9fde048b735c)\"" pod="openshift-machine-config-operator/machine-config-daemon-c86m8" podUID="a4b64f7f-0b03-4f47-965b-9fde048b735c" Oct 01 16:04:54 crc kubenswrapper[4869]: I1001 16:04:54.051420 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/manila-fc6c-account-create-vdtbr"] Oct 01 16:04:54 crc kubenswrapper[4869]: I1001 16:04:54.060245 4869 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/manila-fc6c-account-create-vdtbr"] Oct 01 16:04:55 crc kubenswrapper[4869]: I1001 16:04:55.599818 4869 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="73c04986-4ca1-4de0-b432-6d29a0f0b8f9" path="/var/lib/kubelet/pods/73c04986-4ca1-4de0-b432-6d29a0f0b8f9/volumes" Oct 01 16:04:59 crc kubenswrapper[4869]: I1001 16:04:59.888403 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-x95v7"] Oct 01 16:04:59 crc kubenswrapper[4869]: E1001 16:04:59.889590 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d67f52c3-ce6c-486a-8e11-91d25894472f" containerName="keystone-cron" Oct 01 16:04:59 crc kubenswrapper[4869]: I1001 16:04:59.889613 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="d67f52c3-ce6c-486a-8e11-91d25894472f" containerName="keystone-cron" Oct 01 16:04:59 crc kubenswrapper[4869]: I1001 16:04:59.889988 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="d67f52c3-ce6c-486a-8e11-91d25894472f" containerName="keystone-cron" Oct 01 16:04:59 crc kubenswrapper[4869]: I1001 16:04:59.894089 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-x95v7" Oct 01 16:04:59 crc kubenswrapper[4869]: I1001 16:04:59.911786 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-x95v7"] Oct 01 16:05:00 crc kubenswrapper[4869]: I1001 16:05:00.004909 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5tgqr\" (UniqueName: \"kubernetes.io/projected/62336757-3803-4630-868f-404f26706cf0-kube-api-access-5tgqr\") pod \"redhat-operators-x95v7\" (UID: \"62336757-3803-4630-868f-404f26706cf0\") " pod="openshift-marketplace/redhat-operators-x95v7" Oct 01 16:05:00 crc kubenswrapper[4869]: I1001 16:05:00.005223 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/62336757-3803-4630-868f-404f26706cf0-utilities\") pod \"redhat-operators-x95v7\" (UID: \"62336757-3803-4630-868f-404f26706cf0\") " pod="openshift-marketplace/redhat-operators-x95v7" Oct 01 16:05:00 crc kubenswrapper[4869]: I1001 16:05:00.005295 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/62336757-3803-4630-868f-404f26706cf0-catalog-content\") pod \"redhat-operators-x95v7\" (UID: \"62336757-3803-4630-868f-404f26706cf0\") " pod="openshift-marketplace/redhat-operators-x95v7" Oct 01 16:05:00 crc kubenswrapper[4869]: I1001 16:05:00.107838 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/62336757-3803-4630-868f-404f26706cf0-utilities\") pod \"redhat-operators-x95v7\" (UID: \"62336757-3803-4630-868f-404f26706cf0\") " pod="openshift-marketplace/redhat-operators-x95v7" Oct 01 16:05:00 crc kubenswrapper[4869]: I1001 16:05:00.107893 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/62336757-3803-4630-868f-404f26706cf0-catalog-content\") pod \"redhat-operators-x95v7\" (UID: \"62336757-3803-4630-868f-404f26706cf0\") " pod="openshift-marketplace/redhat-operators-x95v7" Oct 01 16:05:00 crc kubenswrapper[4869]: I1001 16:05:00.108043 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5tgqr\" (UniqueName: \"kubernetes.io/projected/62336757-3803-4630-868f-404f26706cf0-kube-api-access-5tgqr\") pod \"redhat-operators-x95v7\" (UID: \"62336757-3803-4630-868f-404f26706cf0\") " pod="openshift-marketplace/redhat-operators-x95v7" Oct 01 16:05:00 crc kubenswrapper[4869]: I1001 16:05:00.108736 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/62336757-3803-4630-868f-404f26706cf0-utilities\") pod \"redhat-operators-x95v7\" (UID: \"62336757-3803-4630-868f-404f26706cf0\") " pod="openshift-marketplace/redhat-operators-x95v7" Oct 01 16:05:00 crc kubenswrapper[4869]: I1001 16:05:00.108759 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/62336757-3803-4630-868f-404f26706cf0-catalog-content\") pod \"redhat-operators-x95v7\" (UID: \"62336757-3803-4630-868f-404f26706cf0\") " pod="openshift-marketplace/redhat-operators-x95v7" Oct 01 16:05:00 crc kubenswrapper[4869]: I1001 16:05:00.133816 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5tgqr\" (UniqueName: \"kubernetes.io/projected/62336757-3803-4630-868f-404f26706cf0-kube-api-access-5tgqr\") pod \"redhat-operators-x95v7\" (UID: \"62336757-3803-4630-868f-404f26706cf0\") " pod="openshift-marketplace/redhat-operators-x95v7" Oct 01 16:05:00 crc kubenswrapper[4869]: I1001 16:05:00.228864 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-x95v7" Oct 01 16:05:00 crc kubenswrapper[4869]: I1001 16:05:00.691329 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-x95v7"] Oct 01 16:05:00 crc kubenswrapper[4869]: I1001 16:05:00.910660 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-x95v7" event={"ID":"62336757-3803-4630-868f-404f26706cf0","Type":"ContainerStarted","Data":"5a63d69a5faef636c3dabee29fe16c7ac58e1871f863cf93048314338ddd23ec"} Oct 01 16:05:00 crc kubenswrapper[4869]: I1001 16:05:00.910701 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-x95v7" event={"ID":"62336757-3803-4630-868f-404f26706cf0","Type":"ContainerStarted","Data":"f5213079d8150e7244e9fa92711cdf0f6cd0c311e1e30949f2eb48b87c483f09"} Oct 01 16:05:01 crc kubenswrapper[4869]: I1001 16:05:01.924559 4869 generic.go:334] "Generic (PLEG): container finished" podID="62336757-3803-4630-868f-404f26706cf0" containerID="5a63d69a5faef636c3dabee29fe16c7ac58e1871f863cf93048314338ddd23ec" exitCode=0 Oct 01 16:05:01 crc kubenswrapper[4869]: I1001 16:05:01.924627 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-x95v7" event={"ID":"62336757-3803-4630-868f-404f26706cf0","Type":"ContainerDied","Data":"5a63d69a5faef636c3dabee29fe16c7ac58e1871f863cf93048314338ddd23ec"} Oct 01 16:05:01 crc kubenswrapper[4869]: I1001 16:05:01.927792 4869 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Oct 01 16:05:02 crc kubenswrapper[4869]: I1001 16:05:02.936862 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-x95v7" event={"ID":"62336757-3803-4630-868f-404f26706cf0","Type":"ContainerStarted","Data":"0c24e65e27b666e0ab6ad7c3e2bbd8a8920dd5e6e33a8434a8c0ec66471e06fa"} Oct 01 16:05:03 crc kubenswrapper[4869]: I1001 16:05:03.948784 4869 generic.go:334] "Generic (PLEG): container finished" podID="62336757-3803-4630-868f-404f26706cf0" containerID="0c24e65e27b666e0ab6ad7c3e2bbd8a8920dd5e6e33a8434a8c0ec66471e06fa" exitCode=0 Oct 01 16:05:03 crc kubenswrapper[4869]: I1001 16:05:03.948893 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-x95v7" event={"ID":"62336757-3803-4630-868f-404f26706cf0","Type":"ContainerDied","Data":"0c24e65e27b666e0ab6ad7c3e2bbd8a8920dd5e6e33a8434a8c0ec66471e06fa"} Oct 01 16:05:04 crc kubenswrapper[4869]: I1001 16:05:04.986396 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-x95v7" event={"ID":"62336757-3803-4630-868f-404f26706cf0","Type":"ContainerStarted","Data":"25d24d3545dbfdf2b6335943cfdaa1f35fdfbb691110f87e81563aa6f8c66a88"} Oct 01 16:05:05 crc kubenswrapper[4869]: I1001 16:05:05.026481 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-x95v7" podStartSLOduration=3.414181993 podStartE2EDuration="6.026454666s" podCreationTimestamp="2025-10-01 16:04:59 +0000 UTC" firstStartedPulling="2025-10-01 16:05:01.927517614 +0000 UTC m=+3611.074360730" lastFinishedPulling="2025-10-01 16:05:04.539790247 +0000 UTC m=+3613.686633403" observedRunningTime="2025-10-01 16:05:05.0250292 +0000 UTC m=+3614.171872336" watchObservedRunningTime="2025-10-01 16:05:05.026454666 +0000 UTC m=+3614.173297792" Oct 01 16:05:05 crc kubenswrapper[4869]: I1001 16:05:05.581671 4869 scope.go:117] "RemoveContainer" containerID="79c9485f04215c7c3b6f12e2efc6b087d5369f6ae2f17fb6c51f154c13b2407a" Oct 01 16:05:05 crc kubenswrapper[4869]: E1001 16:05:05.582308 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-c86m8_openshift-machine-config-operator(a4b64f7f-0b03-4f47-965b-9fde048b735c)\"" pod="openshift-machine-config-operator/machine-config-daemon-c86m8" podUID="a4b64f7f-0b03-4f47-965b-9fde048b735c" Oct 01 16:05:10 crc kubenswrapper[4869]: I1001 16:05:10.229455 4869 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-x95v7" Oct 01 16:05:10 crc kubenswrapper[4869]: I1001 16:05:10.231056 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-x95v7" Oct 01 16:05:10 crc kubenswrapper[4869]: I1001 16:05:10.283432 4869 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-x95v7" Oct 01 16:05:11 crc kubenswrapper[4869]: I1001 16:05:11.109187 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-x95v7" Oct 01 16:05:11 crc kubenswrapper[4869]: I1001 16:05:11.162556 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-x95v7"] Oct 01 16:05:13 crc kubenswrapper[4869]: I1001 16:05:13.063658 4869 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-x95v7" podUID="62336757-3803-4630-868f-404f26706cf0" containerName="registry-server" containerID="cri-o://25d24d3545dbfdf2b6335943cfdaa1f35fdfbb691110f87e81563aa6f8c66a88" gracePeriod=2 Oct 01 16:05:13 crc kubenswrapper[4869]: I1001 16:05:13.535731 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-x95v7" Oct 01 16:05:13 crc kubenswrapper[4869]: I1001 16:05:13.578510 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/62336757-3803-4630-868f-404f26706cf0-catalog-content\") pod \"62336757-3803-4630-868f-404f26706cf0\" (UID: \"62336757-3803-4630-868f-404f26706cf0\") " Oct 01 16:05:13 crc kubenswrapper[4869]: I1001 16:05:13.578586 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5tgqr\" (UniqueName: \"kubernetes.io/projected/62336757-3803-4630-868f-404f26706cf0-kube-api-access-5tgqr\") pod \"62336757-3803-4630-868f-404f26706cf0\" (UID: \"62336757-3803-4630-868f-404f26706cf0\") " Oct 01 16:05:13 crc kubenswrapper[4869]: I1001 16:05:13.578679 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/62336757-3803-4630-868f-404f26706cf0-utilities\") pod \"62336757-3803-4630-868f-404f26706cf0\" (UID: \"62336757-3803-4630-868f-404f26706cf0\") " Oct 01 16:05:13 crc kubenswrapper[4869]: I1001 16:05:13.579398 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/62336757-3803-4630-868f-404f26706cf0-utilities" (OuterVolumeSpecName: "utilities") pod "62336757-3803-4630-868f-404f26706cf0" (UID: "62336757-3803-4630-868f-404f26706cf0"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 16:05:13 crc kubenswrapper[4869]: I1001 16:05:13.584404 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/62336757-3803-4630-868f-404f26706cf0-kube-api-access-5tgqr" (OuterVolumeSpecName: "kube-api-access-5tgqr") pod "62336757-3803-4630-868f-404f26706cf0" (UID: "62336757-3803-4630-868f-404f26706cf0"). InnerVolumeSpecName "kube-api-access-5tgqr". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 16:05:13 crc kubenswrapper[4869]: I1001 16:05:13.585020 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5tgqr\" (UniqueName: \"kubernetes.io/projected/62336757-3803-4630-868f-404f26706cf0-kube-api-access-5tgqr\") on node \"crc\" DevicePath \"\"" Oct 01 16:05:13 crc kubenswrapper[4869]: I1001 16:05:13.585096 4869 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/62336757-3803-4630-868f-404f26706cf0-utilities\") on node \"crc\" DevicePath \"\"" Oct 01 16:05:13 crc kubenswrapper[4869]: I1001 16:05:13.663485 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/62336757-3803-4630-868f-404f26706cf0-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "62336757-3803-4630-868f-404f26706cf0" (UID: "62336757-3803-4630-868f-404f26706cf0"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 16:05:13 crc kubenswrapper[4869]: I1001 16:05:13.688754 4869 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/62336757-3803-4630-868f-404f26706cf0-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 01 16:05:14 crc kubenswrapper[4869]: I1001 16:05:14.080723 4869 generic.go:334] "Generic (PLEG): container finished" podID="62336757-3803-4630-868f-404f26706cf0" containerID="25d24d3545dbfdf2b6335943cfdaa1f35fdfbb691110f87e81563aa6f8c66a88" exitCode=0 Oct 01 16:05:14 crc kubenswrapper[4869]: I1001 16:05:14.080802 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-x95v7" event={"ID":"62336757-3803-4630-868f-404f26706cf0","Type":"ContainerDied","Data":"25d24d3545dbfdf2b6335943cfdaa1f35fdfbb691110f87e81563aa6f8c66a88"} Oct 01 16:05:14 crc kubenswrapper[4869]: I1001 16:05:14.080860 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-x95v7" Oct 01 16:05:14 crc kubenswrapper[4869]: I1001 16:05:14.081427 4869 scope.go:117] "RemoveContainer" containerID="25d24d3545dbfdf2b6335943cfdaa1f35fdfbb691110f87e81563aa6f8c66a88" Oct 01 16:05:14 crc kubenswrapper[4869]: I1001 16:05:14.081407 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-x95v7" event={"ID":"62336757-3803-4630-868f-404f26706cf0","Type":"ContainerDied","Data":"f5213079d8150e7244e9fa92711cdf0f6cd0c311e1e30949f2eb48b87c483f09"} Oct 01 16:05:14 crc kubenswrapper[4869]: I1001 16:05:14.104400 4869 scope.go:117] "RemoveContainer" containerID="0c24e65e27b666e0ab6ad7c3e2bbd8a8920dd5e6e33a8434a8c0ec66471e06fa" Oct 01 16:05:14 crc kubenswrapper[4869]: I1001 16:05:14.124300 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-x95v7"] Oct 01 16:05:14 crc kubenswrapper[4869]: I1001 16:05:14.138314 4869 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-x95v7"] Oct 01 16:05:14 crc kubenswrapper[4869]: I1001 16:05:14.154648 4869 scope.go:117] "RemoveContainer" containerID="5a63d69a5faef636c3dabee29fe16c7ac58e1871f863cf93048314338ddd23ec" Oct 01 16:05:14 crc kubenswrapper[4869]: I1001 16:05:14.208109 4869 scope.go:117] "RemoveContainer" containerID="25d24d3545dbfdf2b6335943cfdaa1f35fdfbb691110f87e81563aa6f8c66a88" Oct 01 16:05:14 crc kubenswrapper[4869]: E1001 16:05:14.208856 4869 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"25d24d3545dbfdf2b6335943cfdaa1f35fdfbb691110f87e81563aa6f8c66a88\": container with ID starting with 25d24d3545dbfdf2b6335943cfdaa1f35fdfbb691110f87e81563aa6f8c66a88 not found: ID does not exist" containerID="25d24d3545dbfdf2b6335943cfdaa1f35fdfbb691110f87e81563aa6f8c66a88" Oct 01 16:05:14 crc kubenswrapper[4869]: I1001 16:05:14.208990 4869 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"25d24d3545dbfdf2b6335943cfdaa1f35fdfbb691110f87e81563aa6f8c66a88"} err="failed to get container status \"25d24d3545dbfdf2b6335943cfdaa1f35fdfbb691110f87e81563aa6f8c66a88\": rpc error: code = NotFound desc = could not find container \"25d24d3545dbfdf2b6335943cfdaa1f35fdfbb691110f87e81563aa6f8c66a88\": container with ID starting with 25d24d3545dbfdf2b6335943cfdaa1f35fdfbb691110f87e81563aa6f8c66a88 not found: ID does not exist" Oct 01 16:05:14 crc kubenswrapper[4869]: I1001 16:05:14.209102 4869 scope.go:117] "RemoveContainer" containerID="0c24e65e27b666e0ab6ad7c3e2bbd8a8920dd5e6e33a8434a8c0ec66471e06fa" Oct 01 16:05:14 crc kubenswrapper[4869]: E1001 16:05:14.209615 4869 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0c24e65e27b666e0ab6ad7c3e2bbd8a8920dd5e6e33a8434a8c0ec66471e06fa\": container with ID starting with 0c24e65e27b666e0ab6ad7c3e2bbd8a8920dd5e6e33a8434a8c0ec66471e06fa not found: ID does not exist" containerID="0c24e65e27b666e0ab6ad7c3e2bbd8a8920dd5e6e33a8434a8c0ec66471e06fa" Oct 01 16:05:14 crc kubenswrapper[4869]: I1001 16:05:14.209649 4869 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0c24e65e27b666e0ab6ad7c3e2bbd8a8920dd5e6e33a8434a8c0ec66471e06fa"} err="failed to get container status \"0c24e65e27b666e0ab6ad7c3e2bbd8a8920dd5e6e33a8434a8c0ec66471e06fa\": rpc error: code = NotFound desc = could not find container \"0c24e65e27b666e0ab6ad7c3e2bbd8a8920dd5e6e33a8434a8c0ec66471e06fa\": container with ID starting with 0c24e65e27b666e0ab6ad7c3e2bbd8a8920dd5e6e33a8434a8c0ec66471e06fa not found: ID does not exist" Oct 01 16:05:14 crc kubenswrapper[4869]: I1001 16:05:14.209668 4869 scope.go:117] "RemoveContainer" containerID="5a63d69a5faef636c3dabee29fe16c7ac58e1871f863cf93048314338ddd23ec" Oct 01 16:05:14 crc kubenswrapper[4869]: E1001 16:05:14.209948 4869 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5a63d69a5faef636c3dabee29fe16c7ac58e1871f863cf93048314338ddd23ec\": container with ID starting with 5a63d69a5faef636c3dabee29fe16c7ac58e1871f863cf93048314338ddd23ec not found: ID does not exist" containerID="5a63d69a5faef636c3dabee29fe16c7ac58e1871f863cf93048314338ddd23ec" Oct 01 16:05:14 crc kubenswrapper[4869]: I1001 16:05:14.209999 4869 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5a63d69a5faef636c3dabee29fe16c7ac58e1871f863cf93048314338ddd23ec"} err="failed to get container status \"5a63d69a5faef636c3dabee29fe16c7ac58e1871f863cf93048314338ddd23ec\": rpc error: code = NotFound desc = could not find container \"5a63d69a5faef636c3dabee29fe16c7ac58e1871f863cf93048314338ddd23ec\": container with ID starting with 5a63d69a5faef636c3dabee29fe16c7ac58e1871f863cf93048314338ddd23ec not found: ID does not exist" Oct 01 16:05:15 crc kubenswrapper[4869]: I1001 16:05:15.593020 4869 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="62336757-3803-4630-868f-404f26706cf0" path="/var/lib/kubelet/pods/62336757-3803-4630-868f-404f26706cf0/volumes" Oct 01 16:05:16 crc kubenswrapper[4869]: I1001 16:05:16.222561 4869 scope.go:117] "RemoveContainer" containerID="6db8090beb2d0a726b31cc9798a1bf965d66a1d0357cdb88f12938f9e0f0f3d2" Oct 01 16:05:16 crc kubenswrapper[4869]: I1001 16:05:16.244104 4869 scope.go:117] "RemoveContainer" containerID="2194ffacdd19e2540036abb8d7fe48806c491d8172647164f1d9fc99b49a8c53" Oct 01 16:05:17 crc kubenswrapper[4869]: I1001 16:05:17.582352 4869 scope.go:117] "RemoveContainer" containerID="79c9485f04215c7c3b6f12e2efc6b087d5369f6ae2f17fb6c51f154c13b2407a" Oct 01 16:05:17 crc kubenswrapper[4869]: E1001 16:05:17.582910 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-c86m8_openshift-machine-config-operator(a4b64f7f-0b03-4f47-965b-9fde048b735c)\"" pod="openshift-machine-config-operator/machine-config-daemon-c86m8" podUID="a4b64f7f-0b03-4f47-965b-9fde048b735c" Oct 01 16:05:23 crc kubenswrapper[4869]: I1001 16:05:23.059403 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/manila-db-sync-mlfvt"] Oct 01 16:05:23 crc kubenswrapper[4869]: I1001 16:05:23.069146 4869 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/manila-db-sync-mlfvt"] Oct 01 16:05:23 crc kubenswrapper[4869]: I1001 16:05:23.596595 4869 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ffc8a9b9-ff6f-4d41-b65e-9b57aebf660e" path="/var/lib/kubelet/pods/ffc8a9b9-ff6f-4d41-b65e-9b57aebf660e/volumes" Oct 01 16:05:30 crc kubenswrapper[4869]: I1001 16:05:30.582703 4869 scope.go:117] "RemoveContainer" containerID="79c9485f04215c7c3b6f12e2efc6b087d5369f6ae2f17fb6c51f154c13b2407a" Oct 01 16:05:30 crc kubenswrapper[4869]: E1001 16:05:30.583600 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-c86m8_openshift-machine-config-operator(a4b64f7f-0b03-4f47-965b-9fde048b735c)\"" pod="openshift-machine-config-operator/machine-config-daemon-c86m8" podUID="a4b64f7f-0b03-4f47-965b-9fde048b735c" Oct 01 16:05:37 crc kubenswrapper[4869]: I1001 16:05:37.309427 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-config-data" Oct 01 16:05:39 crc kubenswrapper[4869]: I1001 16:05:39.557823 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-qp74x"] Oct 01 16:05:39 crc kubenswrapper[4869]: E1001 16:05:39.559440 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="62336757-3803-4630-868f-404f26706cf0" containerName="registry-server" Oct 01 16:05:39 crc kubenswrapper[4869]: I1001 16:05:39.559523 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="62336757-3803-4630-868f-404f26706cf0" containerName="registry-server" Oct 01 16:05:39 crc kubenswrapper[4869]: E1001 16:05:39.559595 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="62336757-3803-4630-868f-404f26706cf0" containerName="extract-content" Oct 01 16:05:39 crc kubenswrapper[4869]: I1001 16:05:39.559612 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="62336757-3803-4630-868f-404f26706cf0" containerName="extract-content" Oct 01 16:05:39 crc kubenswrapper[4869]: E1001 16:05:39.559657 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="62336757-3803-4630-868f-404f26706cf0" containerName="extract-utilities" Oct 01 16:05:39 crc kubenswrapper[4869]: I1001 16:05:39.559674 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="62336757-3803-4630-868f-404f26706cf0" containerName="extract-utilities" Oct 01 16:05:39 crc kubenswrapper[4869]: I1001 16:05:39.560343 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="62336757-3803-4630-868f-404f26706cf0" containerName="registry-server" Oct 01 16:05:39 crc kubenswrapper[4869]: I1001 16:05:39.563623 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-qp74x" Oct 01 16:05:39 crc kubenswrapper[4869]: I1001 16:05:39.567622 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-qp74x"] Oct 01 16:05:39 crc kubenswrapper[4869]: I1001 16:05:39.733764 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/bc1a79e5-a111-48db-9d8c-4b4237b21107-utilities\") pod \"certified-operators-qp74x\" (UID: \"bc1a79e5-a111-48db-9d8c-4b4237b21107\") " pod="openshift-marketplace/certified-operators-qp74x" Oct 01 16:05:39 crc kubenswrapper[4869]: I1001 16:05:39.734063 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4t47d\" (UniqueName: \"kubernetes.io/projected/bc1a79e5-a111-48db-9d8c-4b4237b21107-kube-api-access-4t47d\") pod \"certified-operators-qp74x\" (UID: \"bc1a79e5-a111-48db-9d8c-4b4237b21107\") " pod="openshift-marketplace/certified-operators-qp74x" Oct 01 16:05:39 crc kubenswrapper[4869]: I1001 16:05:39.734197 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/bc1a79e5-a111-48db-9d8c-4b4237b21107-catalog-content\") pod \"certified-operators-qp74x\" (UID: \"bc1a79e5-a111-48db-9d8c-4b4237b21107\") " pod="openshift-marketplace/certified-operators-qp74x" Oct 01 16:05:39 crc kubenswrapper[4869]: I1001 16:05:39.835624 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4t47d\" (UniqueName: \"kubernetes.io/projected/bc1a79e5-a111-48db-9d8c-4b4237b21107-kube-api-access-4t47d\") pod \"certified-operators-qp74x\" (UID: \"bc1a79e5-a111-48db-9d8c-4b4237b21107\") " pod="openshift-marketplace/certified-operators-qp74x" Oct 01 16:05:39 crc kubenswrapper[4869]: I1001 16:05:39.835743 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/bc1a79e5-a111-48db-9d8c-4b4237b21107-catalog-content\") pod \"certified-operators-qp74x\" (UID: \"bc1a79e5-a111-48db-9d8c-4b4237b21107\") " pod="openshift-marketplace/certified-operators-qp74x" Oct 01 16:05:39 crc kubenswrapper[4869]: I1001 16:05:39.835768 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/bc1a79e5-a111-48db-9d8c-4b4237b21107-utilities\") pod \"certified-operators-qp74x\" (UID: \"bc1a79e5-a111-48db-9d8c-4b4237b21107\") " pod="openshift-marketplace/certified-operators-qp74x" Oct 01 16:05:39 crc kubenswrapper[4869]: I1001 16:05:39.836247 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/bc1a79e5-a111-48db-9d8c-4b4237b21107-utilities\") pod \"certified-operators-qp74x\" (UID: \"bc1a79e5-a111-48db-9d8c-4b4237b21107\") " pod="openshift-marketplace/certified-operators-qp74x" Oct 01 16:05:39 crc kubenswrapper[4869]: I1001 16:05:39.836742 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/bc1a79e5-a111-48db-9d8c-4b4237b21107-catalog-content\") pod \"certified-operators-qp74x\" (UID: \"bc1a79e5-a111-48db-9d8c-4b4237b21107\") " pod="openshift-marketplace/certified-operators-qp74x" Oct 01 16:05:39 crc kubenswrapper[4869]: I1001 16:05:39.858094 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4t47d\" (UniqueName: \"kubernetes.io/projected/bc1a79e5-a111-48db-9d8c-4b4237b21107-kube-api-access-4t47d\") pod \"certified-operators-qp74x\" (UID: \"bc1a79e5-a111-48db-9d8c-4b4237b21107\") " pod="openshift-marketplace/certified-operators-qp74x" Oct 01 16:05:39 crc kubenswrapper[4869]: I1001 16:05:39.893487 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-qp74x" Oct 01 16:05:40 crc kubenswrapper[4869]: I1001 16:05:40.424140 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-qp74x"] Oct 01 16:05:40 crc kubenswrapper[4869]: W1001 16:05:40.425489 4869 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podbc1a79e5_a111_48db_9d8c_4b4237b21107.slice/crio-fbba0267015fda31224fb6068a7cb8d77926ee9b91ef769f6fc7a49ec7d0521b WatchSource:0}: Error finding container fbba0267015fda31224fb6068a7cb8d77926ee9b91ef769f6fc7a49ec7d0521b: Status 404 returned error can't find the container with id fbba0267015fda31224fb6068a7cb8d77926ee9b91ef769f6fc7a49ec7d0521b Oct 01 16:05:41 crc kubenswrapper[4869]: I1001 16:05:41.382933 4869 generic.go:334] "Generic (PLEG): container finished" podID="bc1a79e5-a111-48db-9d8c-4b4237b21107" containerID="79f8be28e76d2ec708f1beea38d8d6bc6fe92a04ef6e462f95789d49045dae18" exitCode=0 Oct 01 16:05:41 crc kubenswrapper[4869]: I1001 16:05:41.383026 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-qp74x" event={"ID":"bc1a79e5-a111-48db-9d8c-4b4237b21107","Type":"ContainerDied","Data":"79f8be28e76d2ec708f1beea38d8d6bc6fe92a04ef6e462f95789d49045dae18"} Oct 01 16:05:41 crc kubenswrapper[4869]: I1001 16:05:41.383447 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-qp74x" event={"ID":"bc1a79e5-a111-48db-9d8c-4b4237b21107","Type":"ContainerStarted","Data":"fbba0267015fda31224fb6068a7cb8d77926ee9b91ef769f6fc7a49ec7d0521b"} Oct 01 16:05:41 crc kubenswrapper[4869]: I1001 16:05:41.588949 4869 scope.go:117] "RemoveContainer" containerID="79c9485f04215c7c3b6f12e2efc6b087d5369f6ae2f17fb6c51f154c13b2407a" Oct 01 16:05:41 crc kubenswrapper[4869]: E1001 16:05:41.589193 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-c86m8_openshift-machine-config-operator(a4b64f7f-0b03-4f47-965b-9fde048b735c)\"" pod="openshift-machine-config-operator/machine-config-daemon-c86m8" podUID="a4b64f7f-0b03-4f47-965b-9fde048b735c" Oct 01 16:05:43 crc kubenswrapper[4869]: I1001 16:05:43.402787 4869 generic.go:334] "Generic (PLEG): container finished" podID="bc1a79e5-a111-48db-9d8c-4b4237b21107" containerID="3ded6fe6c410e3d227fa68f6d2ba9cc7479239b6c61eb68ce4df180959ddaf3f" exitCode=0 Oct 01 16:05:43 crc kubenswrapper[4869]: I1001 16:05:43.402927 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-qp74x" event={"ID":"bc1a79e5-a111-48db-9d8c-4b4237b21107","Type":"ContainerDied","Data":"3ded6fe6c410e3d227fa68f6d2ba9cc7479239b6c61eb68ce4df180959ddaf3f"} Oct 01 16:05:44 crc kubenswrapper[4869]: I1001 16:05:44.341389 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-mk4rh"] Oct 01 16:05:44 crc kubenswrapper[4869]: I1001 16:05:44.343990 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-mk4rh" Oct 01 16:05:44 crc kubenswrapper[4869]: I1001 16:05:44.354021 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-mk4rh"] Oct 01 16:05:44 crc kubenswrapper[4869]: I1001 16:05:44.439149 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/810ac7c2-fe05-4ca0-917e-da3f0427e918-catalog-content\") pod \"redhat-marketplace-mk4rh\" (UID: \"810ac7c2-fe05-4ca0-917e-da3f0427e918\") " pod="openshift-marketplace/redhat-marketplace-mk4rh" Oct 01 16:05:44 crc kubenswrapper[4869]: I1001 16:05:44.439206 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/810ac7c2-fe05-4ca0-917e-da3f0427e918-utilities\") pod \"redhat-marketplace-mk4rh\" (UID: \"810ac7c2-fe05-4ca0-917e-da3f0427e918\") " pod="openshift-marketplace/redhat-marketplace-mk4rh" Oct 01 16:05:44 crc kubenswrapper[4869]: I1001 16:05:44.439233 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8v8hb\" (UniqueName: \"kubernetes.io/projected/810ac7c2-fe05-4ca0-917e-da3f0427e918-kube-api-access-8v8hb\") pod \"redhat-marketplace-mk4rh\" (UID: \"810ac7c2-fe05-4ca0-917e-da3f0427e918\") " pod="openshift-marketplace/redhat-marketplace-mk4rh" Oct 01 16:05:44 crc kubenswrapper[4869]: I1001 16:05:44.439469 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-qp74x" event={"ID":"bc1a79e5-a111-48db-9d8c-4b4237b21107","Type":"ContainerStarted","Data":"8ff60453e03b13ccfa66f965929e4099950cc1b736bb2dbcaf943015bc3b933f"} Oct 01 16:05:44 crc kubenswrapper[4869]: I1001 16:05:44.460044 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-qp74x" podStartSLOduration=3.00694551 podStartE2EDuration="5.460021423s" podCreationTimestamp="2025-10-01 16:05:39 +0000 UTC" firstStartedPulling="2025-10-01 16:05:41.385829404 +0000 UTC m=+3650.532672540" lastFinishedPulling="2025-10-01 16:05:43.838905327 +0000 UTC m=+3652.985748453" observedRunningTime="2025-10-01 16:05:44.454682459 +0000 UTC m=+3653.601525595" watchObservedRunningTime="2025-10-01 16:05:44.460021423 +0000 UTC m=+3653.606864539" Oct 01 16:05:44 crc kubenswrapper[4869]: I1001 16:05:44.540904 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/810ac7c2-fe05-4ca0-917e-da3f0427e918-catalog-content\") pod \"redhat-marketplace-mk4rh\" (UID: \"810ac7c2-fe05-4ca0-917e-da3f0427e918\") " pod="openshift-marketplace/redhat-marketplace-mk4rh" Oct 01 16:05:44 crc kubenswrapper[4869]: I1001 16:05:44.540971 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/810ac7c2-fe05-4ca0-917e-da3f0427e918-utilities\") pod \"redhat-marketplace-mk4rh\" (UID: \"810ac7c2-fe05-4ca0-917e-da3f0427e918\") " pod="openshift-marketplace/redhat-marketplace-mk4rh" Oct 01 16:05:44 crc kubenswrapper[4869]: I1001 16:05:44.541010 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8v8hb\" (UniqueName: \"kubernetes.io/projected/810ac7c2-fe05-4ca0-917e-da3f0427e918-kube-api-access-8v8hb\") pod \"redhat-marketplace-mk4rh\" (UID: \"810ac7c2-fe05-4ca0-917e-da3f0427e918\") " pod="openshift-marketplace/redhat-marketplace-mk4rh" Oct 01 16:05:44 crc kubenswrapper[4869]: I1001 16:05:44.541477 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/810ac7c2-fe05-4ca0-917e-da3f0427e918-catalog-content\") pod \"redhat-marketplace-mk4rh\" (UID: \"810ac7c2-fe05-4ca0-917e-da3f0427e918\") " pod="openshift-marketplace/redhat-marketplace-mk4rh" Oct 01 16:05:44 crc kubenswrapper[4869]: I1001 16:05:44.541488 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/810ac7c2-fe05-4ca0-917e-da3f0427e918-utilities\") pod \"redhat-marketplace-mk4rh\" (UID: \"810ac7c2-fe05-4ca0-917e-da3f0427e918\") " pod="openshift-marketplace/redhat-marketplace-mk4rh" Oct 01 16:05:44 crc kubenswrapper[4869]: I1001 16:05:44.563951 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8v8hb\" (UniqueName: \"kubernetes.io/projected/810ac7c2-fe05-4ca0-917e-da3f0427e918-kube-api-access-8v8hb\") pod \"redhat-marketplace-mk4rh\" (UID: \"810ac7c2-fe05-4ca0-917e-da3f0427e918\") " pod="openshift-marketplace/redhat-marketplace-mk4rh" Oct 01 16:05:44 crc kubenswrapper[4869]: I1001 16:05:44.666734 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-mk4rh" Oct 01 16:05:45 crc kubenswrapper[4869]: I1001 16:05:45.156995 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-mk4rh"] Oct 01 16:05:45 crc kubenswrapper[4869]: W1001 16:05:45.159825 4869 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod810ac7c2_fe05_4ca0_917e_da3f0427e918.slice/crio-e5393e2f2bd6c36c6c0cf1edc718a989145d700eb2c460bf24c42f91263a332d WatchSource:0}: Error finding container e5393e2f2bd6c36c6c0cf1edc718a989145d700eb2c460bf24c42f91263a332d: Status 404 returned error can't find the container with id e5393e2f2bd6c36c6c0cf1edc718a989145d700eb2c460bf24c42f91263a332d Oct 01 16:05:45 crc kubenswrapper[4869]: I1001 16:05:45.449369 4869 generic.go:334] "Generic (PLEG): container finished" podID="810ac7c2-fe05-4ca0-917e-da3f0427e918" containerID="197f6c03bc94d7bd4ea7e337daf9f0e2c3ceaa3711837c9f9dd52d0d03a5e720" exitCode=0 Oct 01 16:05:45 crc kubenswrapper[4869]: I1001 16:05:45.449427 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-mk4rh" event={"ID":"810ac7c2-fe05-4ca0-917e-da3f0427e918","Type":"ContainerDied","Data":"197f6c03bc94d7bd4ea7e337daf9f0e2c3ceaa3711837c9f9dd52d0d03a5e720"} Oct 01 16:05:45 crc kubenswrapper[4869]: I1001 16:05:45.449496 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-mk4rh" event={"ID":"810ac7c2-fe05-4ca0-917e-da3f0427e918","Type":"ContainerStarted","Data":"e5393e2f2bd6c36c6c0cf1edc718a989145d700eb2c460bf24c42f91263a332d"} Oct 01 16:05:46 crc kubenswrapper[4869]: I1001 16:05:46.461277 4869 generic.go:334] "Generic (PLEG): container finished" podID="810ac7c2-fe05-4ca0-917e-da3f0427e918" containerID="b0eae9c4d0ff8dbee2eb02e089663b9052888440fda00d7841d12f57d9f0adec" exitCode=0 Oct 01 16:05:46 crc kubenswrapper[4869]: I1001 16:05:46.461416 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-mk4rh" event={"ID":"810ac7c2-fe05-4ca0-917e-da3f0427e918","Type":"ContainerDied","Data":"b0eae9c4d0ff8dbee2eb02e089663b9052888440fda00d7841d12f57d9f0adec"} Oct 01 16:05:47 crc kubenswrapper[4869]: I1001 16:05:47.473206 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-mk4rh" event={"ID":"810ac7c2-fe05-4ca0-917e-da3f0427e918","Type":"ContainerStarted","Data":"092f4d144e7327c49428d99c14e65fa3c69667031843a46beaed1c84257f9030"} Oct 01 16:05:47 crc kubenswrapper[4869]: I1001 16:05:47.499467 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-mk4rh" podStartSLOduration=2.046086775 podStartE2EDuration="3.499447536s" podCreationTimestamp="2025-10-01 16:05:44 +0000 UTC" firstStartedPulling="2025-10-01 16:05:45.451085508 +0000 UTC m=+3654.597928624" lastFinishedPulling="2025-10-01 16:05:46.904446269 +0000 UTC m=+3656.051289385" observedRunningTime="2025-10-01 16:05:47.491519546 +0000 UTC m=+3656.638362662" watchObservedRunningTime="2025-10-01 16:05:47.499447536 +0000 UTC m=+3656.646290652" Oct 01 16:05:49 crc kubenswrapper[4869]: I1001 16:05:49.893762 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-qp74x" Oct 01 16:05:49 crc kubenswrapper[4869]: I1001 16:05:49.894155 4869 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-qp74x" Oct 01 16:05:49 crc kubenswrapper[4869]: I1001 16:05:49.948366 4869 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-qp74x" Oct 01 16:05:50 crc kubenswrapper[4869]: I1001 16:05:50.560097 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-qp74x" Oct 01 16:05:51 crc kubenswrapper[4869]: I1001 16:05:51.934039 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-qp74x"] Oct 01 16:05:52 crc kubenswrapper[4869]: I1001 16:05:52.523581 4869 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-qp74x" podUID="bc1a79e5-a111-48db-9d8c-4b4237b21107" containerName="registry-server" containerID="cri-o://8ff60453e03b13ccfa66f965929e4099950cc1b736bb2dbcaf943015bc3b933f" gracePeriod=2 Oct 01 16:05:53 crc kubenswrapper[4869]: I1001 16:05:53.141221 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-qp74x" Oct 01 16:05:53 crc kubenswrapper[4869]: I1001 16:05:53.245659 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/bc1a79e5-a111-48db-9d8c-4b4237b21107-catalog-content\") pod \"bc1a79e5-a111-48db-9d8c-4b4237b21107\" (UID: \"bc1a79e5-a111-48db-9d8c-4b4237b21107\") " Oct 01 16:05:53 crc kubenswrapper[4869]: I1001 16:05:53.245935 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/bc1a79e5-a111-48db-9d8c-4b4237b21107-utilities\") pod \"bc1a79e5-a111-48db-9d8c-4b4237b21107\" (UID: \"bc1a79e5-a111-48db-9d8c-4b4237b21107\") " Oct 01 16:05:53 crc kubenswrapper[4869]: I1001 16:05:53.245987 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4t47d\" (UniqueName: \"kubernetes.io/projected/bc1a79e5-a111-48db-9d8c-4b4237b21107-kube-api-access-4t47d\") pod \"bc1a79e5-a111-48db-9d8c-4b4237b21107\" (UID: \"bc1a79e5-a111-48db-9d8c-4b4237b21107\") " Oct 01 16:05:53 crc kubenswrapper[4869]: I1001 16:05:53.246766 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/bc1a79e5-a111-48db-9d8c-4b4237b21107-utilities" (OuterVolumeSpecName: "utilities") pod "bc1a79e5-a111-48db-9d8c-4b4237b21107" (UID: "bc1a79e5-a111-48db-9d8c-4b4237b21107"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 16:05:53 crc kubenswrapper[4869]: I1001 16:05:53.255565 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bc1a79e5-a111-48db-9d8c-4b4237b21107-kube-api-access-4t47d" (OuterVolumeSpecName: "kube-api-access-4t47d") pod "bc1a79e5-a111-48db-9d8c-4b4237b21107" (UID: "bc1a79e5-a111-48db-9d8c-4b4237b21107"). InnerVolumeSpecName "kube-api-access-4t47d". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 16:05:53 crc kubenswrapper[4869]: I1001 16:05:53.283197 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/bc1a79e5-a111-48db-9d8c-4b4237b21107-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "bc1a79e5-a111-48db-9d8c-4b4237b21107" (UID: "bc1a79e5-a111-48db-9d8c-4b4237b21107"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 16:05:53 crc kubenswrapper[4869]: I1001 16:05:53.347996 4869 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/bc1a79e5-a111-48db-9d8c-4b4237b21107-utilities\") on node \"crc\" DevicePath \"\"" Oct 01 16:05:53 crc kubenswrapper[4869]: I1001 16:05:53.348026 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4t47d\" (UniqueName: \"kubernetes.io/projected/bc1a79e5-a111-48db-9d8c-4b4237b21107-kube-api-access-4t47d\") on node \"crc\" DevicePath \"\"" Oct 01 16:05:53 crc kubenswrapper[4869]: I1001 16:05:53.348035 4869 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/bc1a79e5-a111-48db-9d8c-4b4237b21107-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 01 16:05:53 crc kubenswrapper[4869]: I1001 16:05:53.533332 4869 generic.go:334] "Generic (PLEG): container finished" podID="bc1a79e5-a111-48db-9d8c-4b4237b21107" containerID="8ff60453e03b13ccfa66f965929e4099950cc1b736bb2dbcaf943015bc3b933f" exitCode=0 Oct 01 16:05:53 crc kubenswrapper[4869]: I1001 16:05:53.533368 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-qp74x" event={"ID":"bc1a79e5-a111-48db-9d8c-4b4237b21107","Type":"ContainerDied","Data":"8ff60453e03b13ccfa66f965929e4099950cc1b736bb2dbcaf943015bc3b933f"} Oct 01 16:05:53 crc kubenswrapper[4869]: I1001 16:05:53.533394 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-qp74x" Oct 01 16:05:53 crc kubenswrapper[4869]: I1001 16:05:53.533417 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-qp74x" event={"ID":"bc1a79e5-a111-48db-9d8c-4b4237b21107","Type":"ContainerDied","Data":"fbba0267015fda31224fb6068a7cb8d77926ee9b91ef769f6fc7a49ec7d0521b"} Oct 01 16:05:53 crc kubenswrapper[4869]: I1001 16:05:53.533450 4869 scope.go:117] "RemoveContainer" containerID="8ff60453e03b13ccfa66f965929e4099950cc1b736bb2dbcaf943015bc3b933f" Oct 01 16:05:53 crc kubenswrapper[4869]: I1001 16:05:53.569029 4869 scope.go:117] "RemoveContainer" containerID="3ded6fe6c410e3d227fa68f6d2ba9cc7479239b6c61eb68ce4df180959ddaf3f" Oct 01 16:05:53 crc kubenswrapper[4869]: I1001 16:05:53.608474 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-qp74x"] Oct 01 16:05:53 crc kubenswrapper[4869]: I1001 16:05:53.609007 4869 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-qp74x"] Oct 01 16:05:53 crc kubenswrapper[4869]: I1001 16:05:53.610586 4869 scope.go:117] "RemoveContainer" containerID="79f8be28e76d2ec708f1beea38d8d6bc6fe92a04ef6e462f95789d49045dae18" Oct 01 16:05:53 crc kubenswrapper[4869]: I1001 16:05:53.657138 4869 scope.go:117] "RemoveContainer" containerID="8ff60453e03b13ccfa66f965929e4099950cc1b736bb2dbcaf943015bc3b933f" Oct 01 16:05:53 crc kubenswrapper[4869]: E1001 16:05:53.657588 4869 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8ff60453e03b13ccfa66f965929e4099950cc1b736bb2dbcaf943015bc3b933f\": container with ID starting with 8ff60453e03b13ccfa66f965929e4099950cc1b736bb2dbcaf943015bc3b933f not found: ID does not exist" containerID="8ff60453e03b13ccfa66f965929e4099950cc1b736bb2dbcaf943015bc3b933f" Oct 01 16:05:53 crc kubenswrapper[4869]: I1001 16:05:53.657636 4869 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8ff60453e03b13ccfa66f965929e4099950cc1b736bb2dbcaf943015bc3b933f"} err="failed to get container status \"8ff60453e03b13ccfa66f965929e4099950cc1b736bb2dbcaf943015bc3b933f\": rpc error: code = NotFound desc = could not find container \"8ff60453e03b13ccfa66f965929e4099950cc1b736bb2dbcaf943015bc3b933f\": container with ID starting with 8ff60453e03b13ccfa66f965929e4099950cc1b736bb2dbcaf943015bc3b933f not found: ID does not exist" Oct 01 16:05:53 crc kubenswrapper[4869]: I1001 16:05:53.657670 4869 scope.go:117] "RemoveContainer" containerID="3ded6fe6c410e3d227fa68f6d2ba9cc7479239b6c61eb68ce4df180959ddaf3f" Oct 01 16:05:53 crc kubenswrapper[4869]: E1001 16:05:53.658001 4869 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3ded6fe6c410e3d227fa68f6d2ba9cc7479239b6c61eb68ce4df180959ddaf3f\": container with ID starting with 3ded6fe6c410e3d227fa68f6d2ba9cc7479239b6c61eb68ce4df180959ddaf3f not found: ID does not exist" containerID="3ded6fe6c410e3d227fa68f6d2ba9cc7479239b6c61eb68ce4df180959ddaf3f" Oct 01 16:05:53 crc kubenswrapper[4869]: I1001 16:05:53.658048 4869 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3ded6fe6c410e3d227fa68f6d2ba9cc7479239b6c61eb68ce4df180959ddaf3f"} err="failed to get container status \"3ded6fe6c410e3d227fa68f6d2ba9cc7479239b6c61eb68ce4df180959ddaf3f\": rpc error: code = NotFound desc = could not find container \"3ded6fe6c410e3d227fa68f6d2ba9cc7479239b6c61eb68ce4df180959ddaf3f\": container with ID starting with 3ded6fe6c410e3d227fa68f6d2ba9cc7479239b6c61eb68ce4df180959ddaf3f not found: ID does not exist" Oct 01 16:05:53 crc kubenswrapper[4869]: I1001 16:05:53.658077 4869 scope.go:117] "RemoveContainer" containerID="79f8be28e76d2ec708f1beea38d8d6bc6fe92a04ef6e462f95789d49045dae18" Oct 01 16:05:53 crc kubenswrapper[4869]: E1001 16:05:53.658409 4869 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"79f8be28e76d2ec708f1beea38d8d6bc6fe92a04ef6e462f95789d49045dae18\": container with ID starting with 79f8be28e76d2ec708f1beea38d8d6bc6fe92a04ef6e462f95789d49045dae18 not found: ID does not exist" containerID="79f8be28e76d2ec708f1beea38d8d6bc6fe92a04ef6e462f95789d49045dae18" Oct 01 16:05:53 crc kubenswrapper[4869]: I1001 16:05:53.658447 4869 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"79f8be28e76d2ec708f1beea38d8d6bc6fe92a04ef6e462f95789d49045dae18"} err="failed to get container status \"79f8be28e76d2ec708f1beea38d8d6bc6fe92a04ef6e462f95789d49045dae18\": rpc error: code = NotFound desc = could not find container \"79f8be28e76d2ec708f1beea38d8d6bc6fe92a04ef6e462f95789d49045dae18\": container with ID starting with 79f8be28e76d2ec708f1beea38d8d6bc6fe92a04ef6e462f95789d49045dae18 not found: ID does not exist" Oct 01 16:05:54 crc kubenswrapper[4869]: I1001 16:05:54.667895 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-mk4rh" Oct 01 16:05:54 crc kubenswrapper[4869]: I1001 16:05:54.668174 4869 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-mk4rh" Oct 01 16:05:54 crc kubenswrapper[4869]: I1001 16:05:54.722697 4869 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-mk4rh" Oct 01 16:05:55 crc kubenswrapper[4869]: I1001 16:05:55.581339 4869 scope.go:117] "RemoveContainer" containerID="79c9485f04215c7c3b6f12e2efc6b087d5369f6ae2f17fb6c51f154c13b2407a" Oct 01 16:05:55 crc kubenswrapper[4869]: E1001 16:05:55.581884 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-c86m8_openshift-machine-config-operator(a4b64f7f-0b03-4f47-965b-9fde048b735c)\"" pod="openshift-machine-config-operator/machine-config-daemon-c86m8" podUID="a4b64f7f-0b03-4f47-965b-9fde048b735c" Oct 01 16:05:55 crc kubenswrapper[4869]: I1001 16:05:55.598997 4869 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bc1a79e5-a111-48db-9d8c-4b4237b21107" path="/var/lib/kubelet/pods/bc1a79e5-a111-48db-9d8c-4b4237b21107/volumes" Oct 01 16:05:55 crc kubenswrapper[4869]: I1001 16:05:55.615010 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-mk4rh" Oct 01 16:05:56 crc kubenswrapper[4869]: I1001 16:05:56.539248 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-mk4rh"] Oct 01 16:05:57 crc kubenswrapper[4869]: I1001 16:05:57.567005 4869 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-mk4rh" podUID="810ac7c2-fe05-4ca0-917e-da3f0427e918" containerName="registry-server" containerID="cri-o://092f4d144e7327c49428d99c14e65fa3c69667031843a46beaed1c84257f9030" gracePeriod=2 Oct 01 16:05:58 crc kubenswrapper[4869]: I1001 16:05:58.285743 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-mk4rh" Oct 01 16:05:58 crc kubenswrapper[4869]: I1001 16:05:58.452917 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/810ac7c2-fe05-4ca0-917e-da3f0427e918-catalog-content\") pod \"810ac7c2-fe05-4ca0-917e-da3f0427e918\" (UID: \"810ac7c2-fe05-4ca0-917e-da3f0427e918\") " Oct 01 16:05:58 crc kubenswrapper[4869]: I1001 16:05:58.453186 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/810ac7c2-fe05-4ca0-917e-da3f0427e918-utilities\") pod \"810ac7c2-fe05-4ca0-917e-da3f0427e918\" (UID: \"810ac7c2-fe05-4ca0-917e-da3f0427e918\") " Oct 01 16:05:58 crc kubenswrapper[4869]: I1001 16:05:58.453284 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8v8hb\" (UniqueName: \"kubernetes.io/projected/810ac7c2-fe05-4ca0-917e-da3f0427e918-kube-api-access-8v8hb\") pod \"810ac7c2-fe05-4ca0-917e-da3f0427e918\" (UID: \"810ac7c2-fe05-4ca0-917e-da3f0427e918\") " Oct 01 16:05:58 crc kubenswrapper[4869]: I1001 16:05:58.454203 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/810ac7c2-fe05-4ca0-917e-da3f0427e918-utilities" (OuterVolumeSpecName: "utilities") pod "810ac7c2-fe05-4ca0-917e-da3f0427e918" (UID: "810ac7c2-fe05-4ca0-917e-da3f0427e918"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 16:05:58 crc kubenswrapper[4869]: I1001 16:05:58.460003 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/810ac7c2-fe05-4ca0-917e-da3f0427e918-kube-api-access-8v8hb" (OuterVolumeSpecName: "kube-api-access-8v8hb") pod "810ac7c2-fe05-4ca0-917e-da3f0427e918" (UID: "810ac7c2-fe05-4ca0-917e-da3f0427e918"). InnerVolumeSpecName "kube-api-access-8v8hb". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 16:05:58 crc kubenswrapper[4869]: I1001 16:05:58.466666 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/810ac7c2-fe05-4ca0-917e-da3f0427e918-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "810ac7c2-fe05-4ca0-917e-da3f0427e918" (UID: "810ac7c2-fe05-4ca0-917e-da3f0427e918"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 16:05:58 crc kubenswrapper[4869]: I1001 16:05:58.556067 4869 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/810ac7c2-fe05-4ca0-917e-da3f0427e918-utilities\") on node \"crc\" DevicePath \"\"" Oct 01 16:05:58 crc kubenswrapper[4869]: I1001 16:05:58.556114 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8v8hb\" (UniqueName: \"kubernetes.io/projected/810ac7c2-fe05-4ca0-917e-da3f0427e918-kube-api-access-8v8hb\") on node \"crc\" DevicePath \"\"" Oct 01 16:05:58 crc kubenswrapper[4869]: I1001 16:05:58.556132 4869 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/810ac7c2-fe05-4ca0-917e-da3f0427e918-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 01 16:05:58 crc kubenswrapper[4869]: I1001 16:05:58.576665 4869 generic.go:334] "Generic (PLEG): container finished" podID="810ac7c2-fe05-4ca0-917e-da3f0427e918" containerID="092f4d144e7327c49428d99c14e65fa3c69667031843a46beaed1c84257f9030" exitCode=0 Oct 01 16:05:58 crc kubenswrapper[4869]: I1001 16:05:58.576712 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-mk4rh" event={"ID":"810ac7c2-fe05-4ca0-917e-da3f0427e918","Type":"ContainerDied","Data":"092f4d144e7327c49428d99c14e65fa3c69667031843a46beaed1c84257f9030"} Oct 01 16:05:58 crc kubenswrapper[4869]: I1001 16:05:58.576742 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-mk4rh" event={"ID":"810ac7c2-fe05-4ca0-917e-da3f0427e918","Type":"ContainerDied","Data":"e5393e2f2bd6c36c6c0cf1edc718a989145d700eb2c460bf24c42f91263a332d"} Oct 01 16:05:58 crc kubenswrapper[4869]: I1001 16:05:58.576761 4869 scope.go:117] "RemoveContainer" containerID="092f4d144e7327c49428d99c14e65fa3c69667031843a46beaed1c84257f9030" Oct 01 16:05:58 crc kubenswrapper[4869]: I1001 16:05:58.576802 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-mk4rh" Oct 01 16:05:58 crc kubenswrapper[4869]: I1001 16:05:58.612502 4869 scope.go:117] "RemoveContainer" containerID="b0eae9c4d0ff8dbee2eb02e089663b9052888440fda00d7841d12f57d9f0adec" Oct 01 16:05:58 crc kubenswrapper[4869]: I1001 16:05:58.626231 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-mk4rh"] Oct 01 16:05:58 crc kubenswrapper[4869]: I1001 16:05:58.635805 4869 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-mk4rh"] Oct 01 16:05:58 crc kubenswrapper[4869]: I1001 16:05:58.655285 4869 scope.go:117] "RemoveContainer" containerID="197f6c03bc94d7bd4ea7e337daf9f0e2c3ceaa3711837c9f9dd52d0d03a5e720" Oct 01 16:05:58 crc kubenswrapper[4869]: I1001 16:05:58.685573 4869 scope.go:117] "RemoveContainer" containerID="092f4d144e7327c49428d99c14e65fa3c69667031843a46beaed1c84257f9030" Oct 01 16:05:58 crc kubenswrapper[4869]: E1001 16:05:58.686383 4869 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"092f4d144e7327c49428d99c14e65fa3c69667031843a46beaed1c84257f9030\": container with ID starting with 092f4d144e7327c49428d99c14e65fa3c69667031843a46beaed1c84257f9030 not found: ID does not exist" containerID="092f4d144e7327c49428d99c14e65fa3c69667031843a46beaed1c84257f9030" Oct 01 16:05:58 crc kubenswrapper[4869]: I1001 16:05:58.686427 4869 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"092f4d144e7327c49428d99c14e65fa3c69667031843a46beaed1c84257f9030"} err="failed to get container status \"092f4d144e7327c49428d99c14e65fa3c69667031843a46beaed1c84257f9030\": rpc error: code = NotFound desc = could not find container \"092f4d144e7327c49428d99c14e65fa3c69667031843a46beaed1c84257f9030\": container with ID starting with 092f4d144e7327c49428d99c14e65fa3c69667031843a46beaed1c84257f9030 not found: ID does not exist" Oct 01 16:05:58 crc kubenswrapper[4869]: I1001 16:05:58.686453 4869 scope.go:117] "RemoveContainer" containerID="b0eae9c4d0ff8dbee2eb02e089663b9052888440fda00d7841d12f57d9f0adec" Oct 01 16:05:58 crc kubenswrapper[4869]: E1001 16:05:58.686970 4869 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b0eae9c4d0ff8dbee2eb02e089663b9052888440fda00d7841d12f57d9f0adec\": container with ID starting with b0eae9c4d0ff8dbee2eb02e089663b9052888440fda00d7841d12f57d9f0adec not found: ID does not exist" containerID="b0eae9c4d0ff8dbee2eb02e089663b9052888440fda00d7841d12f57d9f0adec" Oct 01 16:05:58 crc kubenswrapper[4869]: I1001 16:05:58.687013 4869 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b0eae9c4d0ff8dbee2eb02e089663b9052888440fda00d7841d12f57d9f0adec"} err="failed to get container status \"b0eae9c4d0ff8dbee2eb02e089663b9052888440fda00d7841d12f57d9f0adec\": rpc error: code = NotFound desc = could not find container \"b0eae9c4d0ff8dbee2eb02e089663b9052888440fda00d7841d12f57d9f0adec\": container with ID starting with b0eae9c4d0ff8dbee2eb02e089663b9052888440fda00d7841d12f57d9f0adec not found: ID does not exist" Oct 01 16:05:58 crc kubenswrapper[4869]: I1001 16:05:58.687040 4869 scope.go:117] "RemoveContainer" containerID="197f6c03bc94d7bd4ea7e337daf9f0e2c3ceaa3711837c9f9dd52d0d03a5e720" Oct 01 16:05:58 crc kubenswrapper[4869]: E1001 16:05:58.687415 4869 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"197f6c03bc94d7bd4ea7e337daf9f0e2c3ceaa3711837c9f9dd52d0d03a5e720\": container with ID starting with 197f6c03bc94d7bd4ea7e337daf9f0e2c3ceaa3711837c9f9dd52d0d03a5e720 not found: ID does not exist" containerID="197f6c03bc94d7bd4ea7e337daf9f0e2c3ceaa3711837c9f9dd52d0d03a5e720" Oct 01 16:05:58 crc kubenswrapper[4869]: I1001 16:05:58.687441 4869 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"197f6c03bc94d7bd4ea7e337daf9f0e2c3ceaa3711837c9f9dd52d0d03a5e720"} err="failed to get container status \"197f6c03bc94d7bd4ea7e337daf9f0e2c3ceaa3711837c9f9dd52d0d03a5e720\": rpc error: code = NotFound desc = could not find container \"197f6c03bc94d7bd4ea7e337daf9f0e2c3ceaa3711837c9f9dd52d0d03a5e720\": container with ID starting with 197f6c03bc94d7bd4ea7e337daf9f0e2c3ceaa3711837c9f9dd52d0d03a5e720 not found: ID does not exist" Oct 01 16:05:59 crc kubenswrapper[4869]: I1001 16:05:59.593923 4869 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="810ac7c2-fe05-4ca0-917e-da3f0427e918" path="/var/lib/kubelet/pods/810ac7c2-fe05-4ca0-917e-da3f0427e918/volumes" Oct 01 16:06:07 crc kubenswrapper[4869]: I1001 16:06:07.581456 4869 scope.go:117] "RemoveContainer" containerID="79c9485f04215c7c3b6f12e2efc6b087d5369f6ae2f17fb6c51f154c13b2407a" Oct 01 16:06:07 crc kubenswrapper[4869]: E1001 16:06:07.582201 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-c86m8_openshift-machine-config-operator(a4b64f7f-0b03-4f47-965b-9fde048b735c)\"" pod="openshift-machine-config-operator/machine-config-daemon-c86m8" podUID="a4b64f7f-0b03-4f47-965b-9fde048b735c" Oct 01 16:06:16 crc kubenswrapper[4869]: I1001 16:06:16.339002 4869 scope.go:117] "RemoveContainer" containerID="9a6ff92c736b56b7e0c8e4755710c6604d708a1425dd8a03279af01354109637" Oct 01 16:06:21 crc kubenswrapper[4869]: I1001 16:06:21.603771 4869 scope.go:117] "RemoveContainer" containerID="79c9485f04215c7c3b6f12e2efc6b087d5369f6ae2f17fb6c51f154c13b2407a" Oct 01 16:06:21 crc kubenswrapper[4869]: E1001 16:06:21.604979 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-c86m8_openshift-machine-config-operator(a4b64f7f-0b03-4f47-965b-9fde048b735c)\"" pod="openshift-machine-config-operator/machine-config-daemon-c86m8" podUID="a4b64f7f-0b03-4f47-965b-9fde048b735c" Oct 01 16:06:32 crc kubenswrapper[4869]: I1001 16:06:32.581768 4869 scope.go:117] "RemoveContainer" containerID="79c9485f04215c7c3b6f12e2efc6b087d5369f6ae2f17fb6c51f154c13b2407a" Oct 01 16:06:32 crc kubenswrapper[4869]: E1001 16:06:32.582748 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-c86m8_openshift-machine-config-operator(a4b64f7f-0b03-4f47-965b-9fde048b735c)\"" pod="openshift-machine-config-operator/machine-config-daemon-c86m8" podUID="a4b64f7f-0b03-4f47-965b-9fde048b735c" Oct 01 16:06:44 crc kubenswrapper[4869]: I1001 16:06:44.581755 4869 scope.go:117] "RemoveContainer" containerID="79c9485f04215c7c3b6f12e2efc6b087d5369f6ae2f17fb6c51f154c13b2407a" Oct 01 16:06:44 crc kubenswrapper[4869]: E1001 16:06:44.583236 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-c86m8_openshift-machine-config-operator(a4b64f7f-0b03-4f47-965b-9fde048b735c)\"" pod="openshift-machine-config-operator/machine-config-daemon-c86m8" podUID="a4b64f7f-0b03-4f47-965b-9fde048b735c" Oct 01 16:06:56 crc kubenswrapper[4869]: I1001 16:06:56.580892 4869 scope.go:117] "RemoveContainer" containerID="79c9485f04215c7c3b6f12e2efc6b087d5369f6ae2f17fb6c51f154c13b2407a" Oct 01 16:06:56 crc kubenswrapper[4869]: E1001 16:06:56.581820 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-c86m8_openshift-machine-config-operator(a4b64f7f-0b03-4f47-965b-9fde048b735c)\"" pod="openshift-machine-config-operator/machine-config-daemon-c86m8" podUID="a4b64f7f-0b03-4f47-965b-9fde048b735c" Oct 01 16:07:10 crc kubenswrapper[4869]: I1001 16:07:10.581634 4869 scope.go:117] "RemoveContainer" containerID="79c9485f04215c7c3b6f12e2efc6b087d5369f6ae2f17fb6c51f154c13b2407a" Oct 01 16:07:10 crc kubenswrapper[4869]: E1001 16:07:10.582443 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-c86m8_openshift-machine-config-operator(a4b64f7f-0b03-4f47-965b-9fde048b735c)\"" pod="openshift-machine-config-operator/machine-config-daemon-c86m8" podUID="a4b64f7f-0b03-4f47-965b-9fde048b735c" Oct 01 16:07:24 crc kubenswrapper[4869]: I1001 16:07:24.580990 4869 scope.go:117] "RemoveContainer" containerID="79c9485f04215c7c3b6f12e2efc6b087d5369f6ae2f17fb6c51f154c13b2407a" Oct 01 16:07:24 crc kubenswrapper[4869]: E1001 16:07:24.581657 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-c86m8_openshift-machine-config-operator(a4b64f7f-0b03-4f47-965b-9fde048b735c)\"" pod="openshift-machine-config-operator/machine-config-daemon-c86m8" podUID="a4b64f7f-0b03-4f47-965b-9fde048b735c" Oct 01 16:07:35 crc kubenswrapper[4869]: I1001 16:07:35.581807 4869 scope.go:117] "RemoveContainer" containerID="79c9485f04215c7c3b6f12e2efc6b087d5369f6ae2f17fb6c51f154c13b2407a" Oct 01 16:07:35 crc kubenswrapper[4869]: E1001 16:07:35.582873 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-c86m8_openshift-machine-config-operator(a4b64f7f-0b03-4f47-965b-9fde048b735c)\"" pod="openshift-machine-config-operator/machine-config-daemon-c86m8" podUID="a4b64f7f-0b03-4f47-965b-9fde048b735c" Oct 01 16:07:46 crc kubenswrapper[4869]: I1001 16:07:46.580691 4869 scope.go:117] "RemoveContainer" containerID="79c9485f04215c7c3b6f12e2efc6b087d5369f6ae2f17fb6c51f154c13b2407a" Oct 01 16:07:46 crc kubenswrapper[4869]: E1001 16:07:46.581562 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-c86m8_openshift-machine-config-operator(a4b64f7f-0b03-4f47-965b-9fde048b735c)\"" pod="openshift-machine-config-operator/machine-config-daemon-c86m8" podUID="a4b64f7f-0b03-4f47-965b-9fde048b735c" Oct 01 16:08:01 crc kubenswrapper[4869]: I1001 16:08:01.589074 4869 scope.go:117] "RemoveContainer" containerID="79c9485f04215c7c3b6f12e2efc6b087d5369f6ae2f17fb6c51f154c13b2407a" Oct 01 16:08:01 crc kubenswrapper[4869]: E1001 16:08:01.589919 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-c86m8_openshift-machine-config-operator(a4b64f7f-0b03-4f47-965b-9fde048b735c)\"" pod="openshift-machine-config-operator/machine-config-daemon-c86m8" podUID="a4b64f7f-0b03-4f47-965b-9fde048b735c" Oct 01 16:08:16 crc kubenswrapper[4869]: I1001 16:08:16.581372 4869 scope.go:117] "RemoveContainer" containerID="79c9485f04215c7c3b6f12e2efc6b087d5369f6ae2f17fb6c51f154c13b2407a" Oct 01 16:08:16 crc kubenswrapper[4869]: E1001 16:08:16.582141 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-c86m8_openshift-machine-config-operator(a4b64f7f-0b03-4f47-965b-9fde048b735c)\"" pod="openshift-machine-config-operator/machine-config-daemon-c86m8" podUID="a4b64f7f-0b03-4f47-965b-9fde048b735c" Oct 01 16:08:21 crc kubenswrapper[4869]: I1001 16:08:21.738482 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-86tmn"] Oct 01 16:08:21 crc kubenswrapper[4869]: E1001 16:08:21.739629 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="810ac7c2-fe05-4ca0-917e-da3f0427e918" containerName="registry-server" Oct 01 16:08:21 crc kubenswrapper[4869]: I1001 16:08:21.739645 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="810ac7c2-fe05-4ca0-917e-da3f0427e918" containerName="registry-server" Oct 01 16:08:21 crc kubenswrapper[4869]: E1001 16:08:21.739660 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="810ac7c2-fe05-4ca0-917e-da3f0427e918" containerName="extract-content" Oct 01 16:08:21 crc kubenswrapper[4869]: I1001 16:08:21.739667 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="810ac7c2-fe05-4ca0-917e-da3f0427e918" containerName="extract-content" Oct 01 16:08:21 crc kubenswrapper[4869]: E1001 16:08:21.739691 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bc1a79e5-a111-48db-9d8c-4b4237b21107" containerName="extract-utilities" Oct 01 16:08:21 crc kubenswrapper[4869]: I1001 16:08:21.739698 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="bc1a79e5-a111-48db-9d8c-4b4237b21107" containerName="extract-utilities" Oct 01 16:08:21 crc kubenswrapper[4869]: E1001 16:08:21.739712 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bc1a79e5-a111-48db-9d8c-4b4237b21107" containerName="registry-server" Oct 01 16:08:21 crc kubenswrapper[4869]: I1001 16:08:21.739719 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="bc1a79e5-a111-48db-9d8c-4b4237b21107" containerName="registry-server" Oct 01 16:08:21 crc kubenswrapper[4869]: E1001 16:08:21.739739 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="810ac7c2-fe05-4ca0-917e-da3f0427e918" containerName="extract-utilities" Oct 01 16:08:21 crc kubenswrapper[4869]: I1001 16:08:21.739748 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="810ac7c2-fe05-4ca0-917e-da3f0427e918" containerName="extract-utilities" Oct 01 16:08:21 crc kubenswrapper[4869]: E1001 16:08:21.739762 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bc1a79e5-a111-48db-9d8c-4b4237b21107" containerName="extract-content" Oct 01 16:08:21 crc kubenswrapper[4869]: I1001 16:08:21.739771 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="bc1a79e5-a111-48db-9d8c-4b4237b21107" containerName="extract-content" Oct 01 16:08:21 crc kubenswrapper[4869]: I1001 16:08:21.739979 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="810ac7c2-fe05-4ca0-917e-da3f0427e918" containerName="registry-server" Oct 01 16:08:21 crc kubenswrapper[4869]: I1001 16:08:21.739997 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="bc1a79e5-a111-48db-9d8c-4b4237b21107" containerName="registry-server" Oct 01 16:08:21 crc kubenswrapper[4869]: I1001 16:08:21.742771 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-86tmn" Oct 01 16:08:21 crc kubenswrapper[4869]: I1001 16:08:21.766716 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-86tmn"] Oct 01 16:08:21 crc kubenswrapper[4869]: I1001 16:08:21.864135 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6ff77e4e-8510-40f2-a6ce-b4b276e8fd81-utilities\") pod \"community-operators-86tmn\" (UID: \"6ff77e4e-8510-40f2-a6ce-b4b276e8fd81\") " pod="openshift-marketplace/community-operators-86tmn" Oct 01 16:08:21 crc kubenswrapper[4869]: I1001 16:08:21.864533 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-b6gkz\" (UniqueName: \"kubernetes.io/projected/6ff77e4e-8510-40f2-a6ce-b4b276e8fd81-kube-api-access-b6gkz\") pod \"community-operators-86tmn\" (UID: \"6ff77e4e-8510-40f2-a6ce-b4b276e8fd81\") " pod="openshift-marketplace/community-operators-86tmn" Oct 01 16:08:21 crc kubenswrapper[4869]: I1001 16:08:21.864789 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6ff77e4e-8510-40f2-a6ce-b4b276e8fd81-catalog-content\") pod \"community-operators-86tmn\" (UID: \"6ff77e4e-8510-40f2-a6ce-b4b276e8fd81\") " pod="openshift-marketplace/community-operators-86tmn" Oct 01 16:08:21 crc kubenswrapper[4869]: I1001 16:08:21.967025 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6ff77e4e-8510-40f2-a6ce-b4b276e8fd81-utilities\") pod \"community-operators-86tmn\" (UID: \"6ff77e4e-8510-40f2-a6ce-b4b276e8fd81\") " pod="openshift-marketplace/community-operators-86tmn" Oct 01 16:08:21 crc kubenswrapper[4869]: I1001 16:08:21.967088 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-b6gkz\" (UniqueName: \"kubernetes.io/projected/6ff77e4e-8510-40f2-a6ce-b4b276e8fd81-kube-api-access-b6gkz\") pod \"community-operators-86tmn\" (UID: \"6ff77e4e-8510-40f2-a6ce-b4b276e8fd81\") " pod="openshift-marketplace/community-operators-86tmn" Oct 01 16:08:21 crc kubenswrapper[4869]: I1001 16:08:21.967244 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6ff77e4e-8510-40f2-a6ce-b4b276e8fd81-catalog-content\") pod \"community-operators-86tmn\" (UID: \"6ff77e4e-8510-40f2-a6ce-b4b276e8fd81\") " pod="openshift-marketplace/community-operators-86tmn" Oct 01 16:08:21 crc kubenswrapper[4869]: I1001 16:08:21.967727 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6ff77e4e-8510-40f2-a6ce-b4b276e8fd81-utilities\") pod \"community-operators-86tmn\" (UID: \"6ff77e4e-8510-40f2-a6ce-b4b276e8fd81\") " pod="openshift-marketplace/community-operators-86tmn" Oct 01 16:08:21 crc kubenswrapper[4869]: I1001 16:08:21.967783 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6ff77e4e-8510-40f2-a6ce-b4b276e8fd81-catalog-content\") pod \"community-operators-86tmn\" (UID: \"6ff77e4e-8510-40f2-a6ce-b4b276e8fd81\") " pod="openshift-marketplace/community-operators-86tmn" Oct 01 16:08:21 crc kubenswrapper[4869]: I1001 16:08:21.997603 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-b6gkz\" (UniqueName: \"kubernetes.io/projected/6ff77e4e-8510-40f2-a6ce-b4b276e8fd81-kube-api-access-b6gkz\") pod \"community-operators-86tmn\" (UID: \"6ff77e4e-8510-40f2-a6ce-b4b276e8fd81\") " pod="openshift-marketplace/community-operators-86tmn" Oct 01 16:08:22 crc kubenswrapper[4869]: I1001 16:08:22.061862 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-86tmn" Oct 01 16:08:22 crc kubenswrapper[4869]: I1001 16:08:22.635218 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-86tmn"] Oct 01 16:08:22 crc kubenswrapper[4869]: I1001 16:08:22.889423 4869 generic.go:334] "Generic (PLEG): container finished" podID="6ff77e4e-8510-40f2-a6ce-b4b276e8fd81" containerID="f559b96d909062b49e1d3730094caf87d7db415aceb069969adca5c29d12bf51" exitCode=0 Oct 01 16:08:22 crc kubenswrapper[4869]: I1001 16:08:22.889712 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-86tmn" event={"ID":"6ff77e4e-8510-40f2-a6ce-b4b276e8fd81","Type":"ContainerDied","Data":"f559b96d909062b49e1d3730094caf87d7db415aceb069969adca5c29d12bf51"} Oct 01 16:08:22 crc kubenswrapper[4869]: I1001 16:08:22.889742 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-86tmn" event={"ID":"6ff77e4e-8510-40f2-a6ce-b4b276e8fd81","Type":"ContainerStarted","Data":"5a3e099253372a261dad4e05a9ae16959893a8889a774803c43f69c126b9a82a"} Oct 01 16:08:24 crc kubenswrapper[4869]: I1001 16:08:24.911644 4869 generic.go:334] "Generic (PLEG): container finished" podID="6ff77e4e-8510-40f2-a6ce-b4b276e8fd81" containerID="4b56609ebe4f0412d08c646cb640904b6933e7702f6f583a2ea814988d5747d2" exitCode=0 Oct 01 16:08:24 crc kubenswrapper[4869]: I1001 16:08:24.911762 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-86tmn" event={"ID":"6ff77e4e-8510-40f2-a6ce-b4b276e8fd81","Type":"ContainerDied","Data":"4b56609ebe4f0412d08c646cb640904b6933e7702f6f583a2ea814988d5747d2"} Oct 01 16:08:25 crc kubenswrapper[4869]: I1001 16:08:25.928098 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-86tmn" event={"ID":"6ff77e4e-8510-40f2-a6ce-b4b276e8fd81","Type":"ContainerStarted","Data":"891c40f04b4584c53035a5c8828bc036c8dbd90c8221b0e3a4b32fd23e912623"} Oct 01 16:08:31 crc kubenswrapper[4869]: I1001 16:08:31.588359 4869 scope.go:117] "RemoveContainer" containerID="79c9485f04215c7c3b6f12e2efc6b087d5369f6ae2f17fb6c51f154c13b2407a" Oct 01 16:08:31 crc kubenswrapper[4869]: E1001 16:08:31.589478 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-c86m8_openshift-machine-config-operator(a4b64f7f-0b03-4f47-965b-9fde048b735c)\"" pod="openshift-machine-config-operator/machine-config-daemon-c86m8" podUID="a4b64f7f-0b03-4f47-965b-9fde048b735c" Oct 01 16:08:32 crc kubenswrapper[4869]: I1001 16:08:32.062697 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-86tmn" Oct 01 16:08:32 crc kubenswrapper[4869]: I1001 16:08:32.063095 4869 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-86tmn" Oct 01 16:08:32 crc kubenswrapper[4869]: I1001 16:08:32.112666 4869 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-86tmn" Oct 01 16:08:32 crc kubenswrapper[4869]: I1001 16:08:32.131964 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-86tmn" podStartSLOduration=8.69253219 podStartE2EDuration="11.131947546s" podCreationTimestamp="2025-10-01 16:08:21 +0000 UTC" firstStartedPulling="2025-10-01 16:08:22.891533535 +0000 UTC m=+3812.038376661" lastFinishedPulling="2025-10-01 16:08:25.330948901 +0000 UTC m=+3814.477792017" observedRunningTime="2025-10-01 16:08:25.971629447 +0000 UTC m=+3815.118472583" watchObservedRunningTime="2025-10-01 16:08:32.131947546 +0000 UTC m=+3821.278790662" Oct 01 16:08:33 crc kubenswrapper[4869]: I1001 16:08:33.041059 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-86tmn" Oct 01 16:08:33 crc kubenswrapper[4869]: I1001 16:08:33.088120 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-86tmn"] Oct 01 16:08:35 crc kubenswrapper[4869]: I1001 16:08:35.010802 4869 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-86tmn" podUID="6ff77e4e-8510-40f2-a6ce-b4b276e8fd81" containerName="registry-server" containerID="cri-o://891c40f04b4584c53035a5c8828bc036c8dbd90c8221b0e3a4b32fd23e912623" gracePeriod=2 Oct 01 16:08:35 crc kubenswrapper[4869]: I1001 16:08:35.953018 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-86tmn" Oct 01 16:08:36 crc kubenswrapper[4869]: I1001 16:08:36.028860 4869 generic.go:334] "Generic (PLEG): container finished" podID="6ff77e4e-8510-40f2-a6ce-b4b276e8fd81" containerID="891c40f04b4584c53035a5c8828bc036c8dbd90c8221b0e3a4b32fd23e912623" exitCode=0 Oct 01 16:08:36 crc kubenswrapper[4869]: I1001 16:08:36.028907 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-86tmn" event={"ID":"6ff77e4e-8510-40f2-a6ce-b4b276e8fd81","Type":"ContainerDied","Data":"891c40f04b4584c53035a5c8828bc036c8dbd90c8221b0e3a4b32fd23e912623"} Oct 01 16:08:36 crc kubenswrapper[4869]: I1001 16:08:36.028943 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-86tmn" event={"ID":"6ff77e4e-8510-40f2-a6ce-b4b276e8fd81","Type":"ContainerDied","Data":"5a3e099253372a261dad4e05a9ae16959893a8889a774803c43f69c126b9a82a"} Oct 01 16:08:36 crc kubenswrapper[4869]: I1001 16:08:36.028963 4869 scope.go:117] "RemoveContainer" containerID="891c40f04b4584c53035a5c8828bc036c8dbd90c8221b0e3a4b32fd23e912623" Oct 01 16:08:36 crc kubenswrapper[4869]: I1001 16:08:36.028973 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-86tmn" Oct 01 16:08:36 crc kubenswrapper[4869]: I1001 16:08:36.060071 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6ff77e4e-8510-40f2-a6ce-b4b276e8fd81-utilities\") pod \"6ff77e4e-8510-40f2-a6ce-b4b276e8fd81\" (UID: \"6ff77e4e-8510-40f2-a6ce-b4b276e8fd81\") " Oct 01 16:08:36 crc kubenswrapper[4869]: I1001 16:08:36.060201 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-b6gkz\" (UniqueName: \"kubernetes.io/projected/6ff77e4e-8510-40f2-a6ce-b4b276e8fd81-kube-api-access-b6gkz\") pod \"6ff77e4e-8510-40f2-a6ce-b4b276e8fd81\" (UID: \"6ff77e4e-8510-40f2-a6ce-b4b276e8fd81\") " Oct 01 16:08:36 crc kubenswrapper[4869]: I1001 16:08:36.060363 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6ff77e4e-8510-40f2-a6ce-b4b276e8fd81-catalog-content\") pod \"6ff77e4e-8510-40f2-a6ce-b4b276e8fd81\" (UID: \"6ff77e4e-8510-40f2-a6ce-b4b276e8fd81\") " Oct 01 16:08:36 crc kubenswrapper[4869]: I1001 16:08:36.061469 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/6ff77e4e-8510-40f2-a6ce-b4b276e8fd81-utilities" (OuterVolumeSpecName: "utilities") pod "6ff77e4e-8510-40f2-a6ce-b4b276e8fd81" (UID: "6ff77e4e-8510-40f2-a6ce-b4b276e8fd81"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 16:08:36 crc kubenswrapper[4869]: I1001 16:08:36.071758 4869 scope.go:117] "RemoveContainer" containerID="4b56609ebe4f0412d08c646cb640904b6933e7702f6f583a2ea814988d5747d2" Oct 01 16:08:36 crc kubenswrapper[4869]: I1001 16:08:36.078592 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6ff77e4e-8510-40f2-a6ce-b4b276e8fd81-kube-api-access-b6gkz" (OuterVolumeSpecName: "kube-api-access-b6gkz") pod "6ff77e4e-8510-40f2-a6ce-b4b276e8fd81" (UID: "6ff77e4e-8510-40f2-a6ce-b4b276e8fd81"). InnerVolumeSpecName "kube-api-access-b6gkz". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 16:08:36 crc kubenswrapper[4869]: I1001 16:08:36.125499 4869 scope.go:117] "RemoveContainer" containerID="f559b96d909062b49e1d3730094caf87d7db415aceb069969adca5c29d12bf51" Oct 01 16:08:36 crc kubenswrapper[4869]: I1001 16:08:36.132741 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/6ff77e4e-8510-40f2-a6ce-b4b276e8fd81-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "6ff77e4e-8510-40f2-a6ce-b4b276e8fd81" (UID: "6ff77e4e-8510-40f2-a6ce-b4b276e8fd81"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 16:08:36 crc kubenswrapper[4869]: I1001 16:08:36.162651 4869 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6ff77e4e-8510-40f2-a6ce-b4b276e8fd81-utilities\") on node \"crc\" DevicePath \"\"" Oct 01 16:08:36 crc kubenswrapper[4869]: I1001 16:08:36.162690 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-b6gkz\" (UniqueName: \"kubernetes.io/projected/6ff77e4e-8510-40f2-a6ce-b4b276e8fd81-kube-api-access-b6gkz\") on node \"crc\" DevicePath \"\"" Oct 01 16:08:36 crc kubenswrapper[4869]: I1001 16:08:36.162701 4869 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6ff77e4e-8510-40f2-a6ce-b4b276e8fd81-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 01 16:08:36 crc kubenswrapper[4869]: I1001 16:08:36.179597 4869 scope.go:117] "RemoveContainer" containerID="891c40f04b4584c53035a5c8828bc036c8dbd90c8221b0e3a4b32fd23e912623" Oct 01 16:08:36 crc kubenswrapper[4869]: E1001 16:08:36.180292 4869 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"891c40f04b4584c53035a5c8828bc036c8dbd90c8221b0e3a4b32fd23e912623\": container with ID starting with 891c40f04b4584c53035a5c8828bc036c8dbd90c8221b0e3a4b32fd23e912623 not found: ID does not exist" containerID="891c40f04b4584c53035a5c8828bc036c8dbd90c8221b0e3a4b32fd23e912623" Oct 01 16:08:36 crc kubenswrapper[4869]: I1001 16:08:36.180335 4869 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"891c40f04b4584c53035a5c8828bc036c8dbd90c8221b0e3a4b32fd23e912623"} err="failed to get container status \"891c40f04b4584c53035a5c8828bc036c8dbd90c8221b0e3a4b32fd23e912623\": rpc error: code = NotFound desc = could not find container \"891c40f04b4584c53035a5c8828bc036c8dbd90c8221b0e3a4b32fd23e912623\": container with ID starting with 891c40f04b4584c53035a5c8828bc036c8dbd90c8221b0e3a4b32fd23e912623 not found: ID does not exist" Oct 01 16:08:36 crc kubenswrapper[4869]: I1001 16:08:36.180364 4869 scope.go:117] "RemoveContainer" containerID="4b56609ebe4f0412d08c646cb640904b6933e7702f6f583a2ea814988d5747d2" Oct 01 16:08:36 crc kubenswrapper[4869]: E1001 16:08:36.180834 4869 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4b56609ebe4f0412d08c646cb640904b6933e7702f6f583a2ea814988d5747d2\": container with ID starting with 4b56609ebe4f0412d08c646cb640904b6933e7702f6f583a2ea814988d5747d2 not found: ID does not exist" containerID="4b56609ebe4f0412d08c646cb640904b6933e7702f6f583a2ea814988d5747d2" Oct 01 16:08:36 crc kubenswrapper[4869]: I1001 16:08:36.180935 4869 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4b56609ebe4f0412d08c646cb640904b6933e7702f6f583a2ea814988d5747d2"} err="failed to get container status \"4b56609ebe4f0412d08c646cb640904b6933e7702f6f583a2ea814988d5747d2\": rpc error: code = NotFound desc = could not find container \"4b56609ebe4f0412d08c646cb640904b6933e7702f6f583a2ea814988d5747d2\": container with ID starting with 4b56609ebe4f0412d08c646cb640904b6933e7702f6f583a2ea814988d5747d2 not found: ID does not exist" Oct 01 16:08:36 crc kubenswrapper[4869]: I1001 16:08:36.181028 4869 scope.go:117] "RemoveContainer" containerID="f559b96d909062b49e1d3730094caf87d7db415aceb069969adca5c29d12bf51" Oct 01 16:08:36 crc kubenswrapper[4869]: E1001 16:08:36.181388 4869 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f559b96d909062b49e1d3730094caf87d7db415aceb069969adca5c29d12bf51\": container with ID starting with f559b96d909062b49e1d3730094caf87d7db415aceb069969adca5c29d12bf51 not found: ID does not exist" containerID="f559b96d909062b49e1d3730094caf87d7db415aceb069969adca5c29d12bf51" Oct 01 16:08:36 crc kubenswrapper[4869]: I1001 16:08:36.181508 4869 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f559b96d909062b49e1d3730094caf87d7db415aceb069969adca5c29d12bf51"} err="failed to get container status \"f559b96d909062b49e1d3730094caf87d7db415aceb069969adca5c29d12bf51\": rpc error: code = NotFound desc = could not find container \"f559b96d909062b49e1d3730094caf87d7db415aceb069969adca5c29d12bf51\": container with ID starting with f559b96d909062b49e1d3730094caf87d7db415aceb069969adca5c29d12bf51 not found: ID does not exist" Oct 01 16:08:36 crc kubenswrapper[4869]: I1001 16:08:36.360209 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-86tmn"] Oct 01 16:08:36 crc kubenswrapper[4869]: I1001 16:08:36.373725 4869 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-86tmn"] Oct 01 16:08:37 crc kubenswrapper[4869]: I1001 16:08:37.592241 4869 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6ff77e4e-8510-40f2-a6ce-b4b276e8fd81" path="/var/lib/kubelet/pods/6ff77e4e-8510-40f2-a6ce-b4b276e8fd81/volumes" Oct 01 16:08:46 crc kubenswrapper[4869]: I1001 16:08:46.580843 4869 scope.go:117] "RemoveContainer" containerID="79c9485f04215c7c3b6f12e2efc6b087d5369f6ae2f17fb6c51f154c13b2407a" Oct 01 16:08:47 crc kubenswrapper[4869]: I1001 16:08:47.131768 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-c86m8" event={"ID":"a4b64f7f-0b03-4f47-965b-9fde048b735c","Type":"ContainerStarted","Data":"ec4f8580bbf4c9d8e958518924e5a68ca1724128d84f5b618afdec23bde550e3"} Oct 01 16:11:13 crc kubenswrapper[4869]: I1001 16:11:13.353765 4869 patch_prober.go:28] interesting pod/machine-config-daemon-c86m8 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 01 16:11:13 crc kubenswrapper[4869]: I1001 16:11:13.354244 4869 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-c86m8" podUID="a4b64f7f-0b03-4f47-965b-9fde048b735c" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 01 16:11:43 crc kubenswrapper[4869]: I1001 16:11:43.354581 4869 patch_prober.go:28] interesting pod/machine-config-daemon-c86m8 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 01 16:11:43 crc kubenswrapper[4869]: I1001 16:11:43.355197 4869 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-c86m8" podUID="a4b64f7f-0b03-4f47-965b-9fde048b735c" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 01 16:12:13 crc kubenswrapper[4869]: I1001 16:12:13.354622 4869 patch_prober.go:28] interesting pod/machine-config-daemon-c86m8 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 01 16:12:13 crc kubenswrapper[4869]: I1001 16:12:13.355067 4869 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-c86m8" podUID="a4b64f7f-0b03-4f47-965b-9fde048b735c" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 01 16:12:13 crc kubenswrapper[4869]: I1001 16:12:13.355119 4869 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-c86m8" Oct 01 16:12:13 crc kubenswrapper[4869]: I1001 16:12:13.355954 4869 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"ec4f8580bbf4c9d8e958518924e5a68ca1724128d84f5b618afdec23bde550e3"} pod="openshift-machine-config-operator/machine-config-daemon-c86m8" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Oct 01 16:12:13 crc kubenswrapper[4869]: I1001 16:12:13.356008 4869 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-c86m8" podUID="a4b64f7f-0b03-4f47-965b-9fde048b735c" containerName="machine-config-daemon" containerID="cri-o://ec4f8580bbf4c9d8e958518924e5a68ca1724128d84f5b618afdec23bde550e3" gracePeriod=600 Oct 01 16:12:13 crc kubenswrapper[4869]: I1001 16:12:13.982785 4869 generic.go:334] "Generic (PLEG): container finished" podID="a4b64f7f-0b03-4f47-965b-9fde048b735c" containerID="ec4f8580bbf4c9d8e958518924e5a68ca1724128d84f5b618afdec23bde550e3" exitCode=0 Oct 01 16:12:13 crc kubenswrapper[4869]: I1001 16:12:13.982851 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-c86m8" event={"ID":"a4b64f7f-0b03-4f47-965b-9fde048b735c","Type":"ContainerDied","Data":"ec4f8580bbf4c9d8e958518924e5a68ca1724128d84f5b618afdec23bde550e3"} Oct 01 16:12:13 crc kubenswrapper[4869]: I1001 16:12:13.983351 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-c86m8" event={"ID":"a4b64f7f-0b03-4f47-965b-9fde048b735c","Type":"ContainerStarted","Data":"f08ce71040f6817aa52b8a625d6cc8a13d74732e306c081f0fd134fb173f09d7"} Oct 01 16:12:13 crc kubenswrapper[4869]: I1001 16:12:13.983380 4869 scope.go:117] "RemoveContainer" containerID="79c9485f04215c7c3b6f12e2efc6b087d5369f6ae2f17fb6c51f154c13b2407a" Oct 01 16:14:13 crc kubenswrapper[4869]: I1001 16:14:13.354254 4869 patch_prober.go:28] interesting pod/machine-config-daemon-c86m8 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 01 16:14:13 crc kubenswrapper[4869]: I1001 16:14:13.354730 4869 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-c86m8" podUID="a4b64f7f-0b03-4f47-965b-9fde048b735c" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 01 16:14:43 crc kubenswrapper[4869]: I1001 16:14:43.353933 4869 patch_prober.go:28] interesting pod/machine-config-daemon-c86m8 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 01 16:14:43 crc kubenswrapper[4869]: I1001 16:14:43.354370 4869 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-c86m8" podUID="a4b64f7f-0b03-4f47-965b-9fde048b735c" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 01 16:15:00 crc kubenswrapper[4869]: I1001 16:15:00.145651 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29322255-pnzkx"] Oct 01 16:15:00 crc kubenswrapper[4869]: E1001 16:15:00.146635 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6ff77e4e-8510-40f2-a6ce-b4b276e8fd81" containerName="extract-utilities" Oct 01 16:15:00 crc kubenswrapper[4869]: I1001 16:15:00.146656 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="6ff77e4e-8510-40f2-a6ce-b4b276e8fd81" containerName="extract-utilities" Oct 01 16:15:00 crc kubenswrapper[4869]: E1001 16:15:00.146666 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6ff77e4e-8510-40f2-a6ce-b4b276e8fd81" containerName="registry-server" Oct 01 16:15:00 crc kubenswrapper[4869]: I1001 16:15:00.146674 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="6ff77e4e-8510-40f2-a6ce-b4b276e8fd81" containerName="registry-server" Oct 01 16:15:00 crc kubenswrapper[4869]: E1001 16:15:00.146704 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6ff77e4e-8510-40f2-a6ce-b4b276e8fd81" containerName="extract-content" Oct 01 16:15:00 crc kubenswrapper[4869]: I1001 16:15:00.146712 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="6ff77e4e-8510-40f2-a6ce-b4b276e8fd81" containerName="extract-content" Oct 01 16:15:00 crc kubenswrapper[4869]: I1001 16:15:00.146901 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="6ff77e4e-8510-40f2-a6ce-b4b276e8fd81" containerName="registry-server" Oct 01 16:15:00 crc kubenswrapper[4869]: I1001 16:15:00.147561 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29322255-pnzkx" Oct 01 16:15:00 crc kubenswrapper[4869]: I1001 16:15:00.155751 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Oct 01 16:15:00 crc kubenswrapper[4869]: I1001 16:15:00.155776 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Oct 01 16:15:00 crc kubenswrapper[4869]: I1001 16:15:00.169090 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29322255-pnzkx"] Oct 01 16:15:00 crc kubenswrapper[4869]: I1001 16:15:00.194867 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/4c7e9681-1077-4754-b7bb-a8ebb308ed4e-secret-volume\") pod \"collect-profiles-29322255-pnzkx\" (UID: \"4c7e9681-1077-4754-b7bb-a8ebb308ed4e\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29322255-pnzkx" Oct 01 16:15:00 crc kubenswrapper[4869]: I1001 16:15:00.195010 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/4c7e9681-1077-4754-b7bb-a8ebb308ed4e-config-volume\") pod \"collect-profiles-29322255-pnzkx\" (UID: \"4c7e9681-1077-4754-b7bb-a8ebb308ed4e\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29322255-pnzkx" Oct 01 16:15:00 crc kubenswrapper[4869]: I1001 16:15:00.195061 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jn7kr\" (UniqueName: \"kubernetes.io/projected/4c7e9681-1077-4754-b7bb-a8ebb308ed4e-kube-api-access-jn7kr\") pod \"collect-profiles-29322255-pnzkx\" (UID: \"4c7e9681-1077-4754-b7bb-a8ebb308ed4e\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29322255-pnzkx" Oct 01 16:15:00 crc kubenswrapper[4869]: I1001 16:15:00.296583 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jn7kr\" (UniqueName: \"kubernetes.io/projected/4c7e9681-1077-4754-b7bb-a8ebb308ed4e-kube-api-access-jn7kr\") pod \"collect-profiles-29322255-pnzkx\" (UID: \"4c7e9681-1077-4754-b7bb-a8ebb308ed4e\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29322255-pnzkx" Oct 01 16:15:00 crc kubenswrapper[4869]: I1001 16:15:00.297147 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/4c7e9681-1077-4754-b7bb-a8ebb308ed4e-secret-volume\") pod \"collect-profiles-29322255-pnzkx\" (UID: \"4c7e9681-1077-4754-b7bb-a8ebb308ed4e\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29322255-pnzkx" Oct 01 16:15:00 crc kubenswrapper[4869]: I1001 16:15:00.297373 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/4c7e9681-1077-4754-b7bb-a8ebb308ed4e-config-volume\") pod \"collect-profiles-29322255-pnzkx\" (UID: \"4c7e9681-1077-4754-b7bb-a8ebb308ed4e\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29322255-pnzkx" Oct 01 16:15:00 crc kubenswrapper[4869]: I1001 16:15:00.298417 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/4c7e9681-1077-4754-b7bb-a8ebb308ed4e-config-volume\") pod \"collect-profiles-29322255-pnzkx\" (UID: \"4c7e9681-1077-4754-b7bb-a8ebb308ed4e\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29322255-pnzkx" Oct 01 16:15:00 crc kubenswrapper[4869]: I1001 16:15:00.303238 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/4c7e9681-1077-4754-b7bb-a8ebb308ed4e-secret-volume\") pod \"collect-profiles-29322255-pnzkx\" (UID: \"4c7e9681-1077-4754-b7bb-a8ebb308ed4e\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29322255-pnzkx" Oct 01 16:15:00 crc kubenswrapper[4869]: I1001 16:15:00.311122 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jn7kr\" (UniqueName: \"kubernetes.io/projected/4c7e9681-1077-4754-b7bb-a8ebb308ed4e-kube-api-access-jn7kr\") pod \"collect-profiles-29322255-pnzkx\" (UID: \"4c7e9681-1077-4754-b7bb-a8ebb308ed4e\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29322255-pnzkx" Oct 01 16:15:00 crc kubenswrapper[4869]: I1001 16:15:00.466009 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29322255-pnzkx" Oct 01 16:15:00 crc kubenswrapper[4869]: I1001 16:15:00.933641 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29322255-pnzkx"] Oct 01 16:15:01 crc kubenswrapper[4869]: I1001 16:15:01.497675 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29322255-pnzkx" event={"ID":"4c7e9681-1077-4754-b7bb-a8ebb308ed4e","Type":"ContainerStarted","Data":"0f214d87a1cb3868b9ece6047f64e3f16ba06fb91650611da15d9f47f4bfd521"} Oct 01 16:15:01 crc kubenswrapper[4869]: I1001 16:15:01.498086 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29322255-pnzkx" event={"ID":"4c7e9681-1077-4754-b7bb-a8ebb308ed4e","Type":"ContainerStarted","Data":"52a9d8d1d86d50fe117929c584bcc9badb2c663b9bae78738f3f7f789c2f1542"} Oct 01 16:15:01 crc kubenswrapper[4869]: I1001 16:15:01.518698 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/collect-profiles-29322255-pnzkx" podStartSLOduration=1.518681107 podStartE2EDuration="1.518681107s" podCreationTimestamp="2025-10-01 16:15:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-01 16:15:01.515661741 +0000 UTC m=+4210.662504857" watchObservedRunningTime="2025-10-01 16:15:01.518681107 +0000 UTC m=+4210.665524223" Oct 01 16:15:02 crc kubenswrapper[4869]: I1001 16:15:02.507647 4869 generic.go:334] "Generic (PLEG): container finished" podID="4c7e9681-1077-4754-b7bb-a8ebb308ed4e" containerID="0f214d87a1cb3868b9ece6047f64e3f16ba06fb91650611da15d9f47f4bfd521" exitCode=0 Oct 01 16:15:02 crc kubenswrapper[4869]: I1001 16:15:02.507708 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29322255-pnzkx" event={"ID":"4c7e9681-1077-4754-b7bb-a8ebb308ed4e","Type":"ContainerDied","Data":"0f214d87a1cb3868b9ece6047f64e3f16ba06fb91650611da15d9f47f4bfd521"} Oct 01 16:15:04 crc kubenswrapper[4869]: I1001 16:15:04.169850 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29322255-pnzkx" Oct 01 16:15:04 crc kubenswrapper[4869]: I1001 16:15:04.289182 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/4c7e9681-1077-4754-b7bb-a8ebb308ed4e-secret-volume\") pod \"4c7e9681-1077-4754-b7bb-a8ebb308ed4e\" (UID: \"4c7e9681-1077-4754-b7bb-a8ebb308ed4e\") " Oct 01 16:15:04 crc kubenswrapper[4869]: I1001 16:15:04.289247 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jn7kr\" (UniqueName: \"kubernetes.io/projected/4c7e9681-1077-4754-b7bb-a8ebb308ed4e-kube-api-access-jn7kr\") pod \"4c7e9681-1077-4754-b7bb-a8ebb308ed4e\" (UID: \"4c7e9681-1077-4754-b7bb-a8ebb308ed4e\") " Oct 01 16:15:04 crc kubenswrapper[4869]: I1001 16:15:04.289538 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/4c7e9681-1077-4754-b7bb-a8ebb308ed4e-config-volume\") pod \"4c7e9681-1077-4754-b7bb-a8ebb308ed4e\" (UID: \"4c7e9681-1077-4754-b7bb-a8ebb308ed4e\") " Oct 01 16:15:04 crc kubenswrapper[4869]: I1001 16:15:04.290615 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4c7e9681-1077-4754-b7bb-a8ebb308ed4e-config-volume" (OuterVolumeSpecName: "config-volume") pod "4c7e9681-1077-4754-b7bb-a8ebb308ed4e" (UID: "4c7e9681-1077-4754-b7bb-a8ebb308ed4e"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 16:15:04 crc kubenswrapper[4869]: I1001 16:15:04.296210 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4c7e9681-1077-4754-b7bb-a8ebb308ed4e-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "4c7e9681-1077-4754-b7bb-a8ebb308ed4e" (UID: "4c7e9681-1077-4754-b7bb-a8ebb308ed4e"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 16:15:04 crc kubenswrapper[4869]: I1001 16:15:04.297571 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4c7e9681-1077-4754-b7bb-a8ebb308ed4e-kube-api-access-jn7kr" (OuterVolumeSpecName: "kube-api-access-jn7kr") pod "4c7e9681-1077-4754-b7bb-a8ebb308ed4e" (UID: "4c7e9681-1077-4754-b7bb-a8ebb308ed4e"). InnerVolumeSpecName "kube-api-access-jn7kr". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 16:15:04 crc kubenswrapper[4869]: I1001 16:15:04.392039 4869 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/4c7e9681-1077-4754-b7bb-a8ebb308ed4e-secret-volume\") on node \"crc\" DevicePath \"\"" Oct 01 16:15:04 crc kubenswrapper[4869]: I1001 16:15:04.392073 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jn7kr\" (UniqueName: \"kubernetes.io/projected/4c7e9681-1077-4754-b7bb-a8ebb308ed4e-kube-api-access-jn7kr\") on node \"crc\" DevicePath \"\"" Oct 01 16:15:04 crc kubenswrapper[4869]: I1001 16:15:04.392082 4869 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/4c7e9681-1077-4754-b7bb-a8ebb308ed4e-config-volume\") on node \"crc\" DevicePath \"\"" Oct 01 16:15:04 crc kubenswrapper[4869]: I1001 16:15:04.524711 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29322255-pnzkx" event={"ID":"4c7e9681-1077-4754-b7bb-a8ebb308ed4e","Type":"ContainerDied","Data":"52a9d8d1d86d50fe117929c584bcc9badb2c663b9bae78738f3f7f789c2f1542"} Oct 01 16:15:04 crc kubenswrapper[4869]: I1001 16:15:04.524753 4869 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="52a9d8d1d86d50fe117929c584bcc9badb2c663b9bae78738f3f7f789c2f1542" Oct 01 16:15:04 crc kubenswrapper[4869]: I1001 16:15:04.524828 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29322255-pnzkx" Oct 01 16:15:04 crc kubenswrapper[4869]: I1001 16:15:04.581774 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29322210-r2pxf"] Oct 01 16:15:04 crc kubenswrapper[4869]: I1001 16:15:04.589981 4869 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29322210-r2pxf"] Oct 01 16:15:05 crc kubenswrapper[4869]: I1001 16:15:05.592441 4869 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b3c7b9cf-c4b5-45bd-a4f4-6178797e2d04" path="/var/lib/kubelet/pods/b3c7b9cf-c4b5-45bd-a4f4-6178797e2d04/volumes" Oct 01 16:15:13 crc kubenswrapper[4869]: I1001 16:15:13.354211 4869 patch_prober.go:28] interesting pod/machine-config-daemon-c86m8 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 01 16:15:13 crc kubenswrapper[4869]: I1001 16:15:13.354798 4869 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-c86m8" podUID="a4b64f7f-0b03-4f47-965b-9fde048b735c" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 01 16:15:13 crc kubenswrapper[4869]: I1001 16:15:13.354854 4869 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-c86m8" Oct 01 16:15:13 crc kubenswrapper[4869]: I1001 16:15:13.355782 4869 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"f08ce71040f6817aa52b8a625d6cc8a13d74732e306c081f0fd134fb173f09d7"} pod="openshift-machine-config-operator/machine-config-daemon-c86m8" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Oct 01 16:15:13 crc kubenswrapper[4869]: I1001 16:15:13.355851 4869 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-c86m8" podUID="a4b64f7f-0b03-4f47-965b-9fde048b735c" containerName="machine-config-daemon" containerID="cri-o://f08ce71040f6817aa52b8a625d6cc8a13d74732e306c081f0fd134fb173f09d7" gracePeriod=600 Oct 01 16:15:13 crc kubenswrapper[4869]: E1001 16:15:13.493582 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-c86m8_openshift-machine-config-operator(a4b64f7f-0b03-4f47-965b-9fde048b735c)\"" pod="openshift-machine-config-operator/machine-config-daemon-c86m8" podUID="a4b64f7f-0b03-4f47-965b-9fde048b735c" Oct 01 16:15:13 crc kubenswrapper[4869]: I1001 16:15:13.620727 4869 generic.go:334] "Generic (PLEG): container finished" podID="a4b64f7f-0b03-4f47-965b-9fde048b735c" containerID="f08ce71040f6817aa52b8a625d6cc8a13d74732e306c081f0fd134fb173f09d7" exitCode=0 Oct 01 16:15:13 crc kubenswrapper[4869]: I1001 16:15:13.620806 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-c86m8" event={"ID":"a4b64f7f-0b03-4f47-965b-9fde048b735c","Type":"ContainerDied","Data":"f08ce71040f6817aa52b8a625d6cc8a13d74732e306c081f0fd134fb173f09d7"} Oct 01 16:15:13 crc kubenswrapper[4869]: I1001 16:15:13.621129 4869 scope.go:117] "RemoveContainer" containerID="ec4f8580bbf4c9d8e958518924e5a68ca1724128d84f5b618afdec23bde550e3" Oct 01 16:15:13 crc kubenswrapper[4869]: I1001 16:15:13.621820 4869 scope.go:117] "RemoveContainer" containerID="f08ce71040f6817aa52b8a625d6cc8a13d74732e306c081f0fd134fb173f09d7" Oct 01 16:15:13 crc kubenswrapper[4869]: E1001 16:15:13.622444 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-c86m8_openshift-machine-config-operator(a4b64f7f-0b03-4f47-965b-9fde048b735c)\"" pod="openshift-machine-config-operator/machine-config-daemon-c86m8" podUID="a4b64f7f-0b03-4f47-965b-9fde048b735c" Oct 01 16:15:16 crc kubenswrapper[4869]: I1001 16:15:16.583076 4869 scope.go:117] "RemoveContainer" containerID="4dd69d9fdaac305e0e109c5778080b188ff1dda5220701c8338b9458fae97169" Oct 01 16:15:23 crc kubenswrapper[4869]: I1001 16:15:23.642855 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-z42lr"] Oct 01 16:15:23 crc kubenswrapper[4869]: E1001 16:15:23.644227 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4c7e9681-1077-4754-b7bb-a8ebb308ed4e" containerName="collect-profiles" Oct 01 16:15:23 crc kubenswrapper[4869]: I1001 16:15:23.644247 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="4c7e9681-1077-4754-b7bb-a8ebb308ed4e" containerName="collect-profiles" Oct 01 16:15:23 crc kubenswrapper[4869]: I1001 16:15:23.644554 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="4c7e9681-1077-4754-b7bb-a8ebb308ed4e" containerName="collect-profiles" Oct 01 16:15:23 crc kubenswrapper[4869]: I1001 16:15:23.646638 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-z42lr" Oct 01 16:15:23 crc kubenswrapper[4869]: I1001 16:15:23.659788 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-z42lr"] Oct 01 16:15:23 crc kubenswrapper[4869]: I1001 16:15:23.802005 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/790b3bd4-9593-4f39-a398-fcf7e50e1d8a-utilities\") pod \"redhat-operators-z42lr\" (UID: \"790b3bd4-9593-4f39-a398-fcf7e50e1d8a\") " pod="openshift-marketplace/redhat-operators-z42lr" Oct 01 16:15:23 crc kubenswrapper[4869]: I1001 16:15:23.802425 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dmhpc\" (UniqueName: \"kubernetes.io/projected/790b3bd4-9593-4f39-a398-fcf7e50e1d8a-kube-api-access-dmhpc\") pod \"redhat-operators-z42lr\" (UID: \"790b3bd4-9593-4f39-a398-fcf7e50e1d8a\") " pod="openshift-marketplace/redhat-operators-z42lr" Oct 01 16:15:23 crc kubenswrapper[4869]: I1001 16:15:23.802570 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/790b3bd4-9593-4f39-a398-fcf7e50e1d8a-catalog-content\") pod \"redhat-operators-z42lr\" (UID: \"790b3bd4-9593-4f39-a398-fcf7e50e1d8a\") " pod="openshift-marketplace/redhat-operators-z42lr" Oct 01 16:15:23 crc kubenswrapper[4869]: I1001 16:15:23.905897 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/790b3bd4-9593-4f39-a398-fcf7e50e1d8a-utilities\") pod \"redhat-operators-z42lr\" (UID: \"790b3bd4-9593-4f39-a398-fcf7e50e1d8a\") " pod="openshift-marketplace/redhat-operators-z42lr" Oct 01 16:15:23 crc kubenswrapper[4869]: I1001 16:15:23.906021 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dmhpc\" (UniqueName: \"kubernetes.io/projected/790b3bd4-9593-4f39-a398-fcf7e50e1d8a-kube-api-access-dmhpc\") pod \"redhat-operators-z42lr\" (UID: \"790b3bd4-9593-4f39-a398-fcf7e50e1d8a\") " pod="openshift-marketplace/redhat-operators-z42lr" Oct 01 16:15:23 crc kubenswrapper[4869]: I1001 16:15:23.906154 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/790b3bd4-9593-4f39-a398-fcf7e50e1d8a-catalog-content\") pod \"redhat-operators-z42lr\" (UID: \"790b3bd4-9593-4f39-a398-fcf7e50e1d8a\") " pod="openshift-marketplace/redhat-operators-z42lr" Oct 01 16:15:23 crc kubenswrapper[4869]: I1001 16:15:23.906482 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/790b3bd4-9593-4f39-a398-fcf7e50e1d8a-utilities\") pod \"redhat-operators-z42lr\" (UID: \"790b3bd4-9593-4f39-a398-fcf7e50e1d8a\") " pod="openshift-marketplace/redhat-operators-z42lr" Oct 01 16:15:23 crc kubenswrapper[4869]: I1001 16:15:23.906690 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/790b3bd4-9593-4f39-a398-fcf7e50e1d8a-catalog-content\") pod \"redhat-operators-z42lr\" (UID: \"790b3bd4-9593-4f39-a398-fcf7e50e1d8a\") " pod="openshift-marketplace/redhat-operators-z42lr" Oct 01 16:15:23 crc kubenswrapper[4869]: I1001 16:15:23.928989 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dmhpc\" (UniqueName: \"kubernetes.io/projected/790b3bd4-9593-4f39-a398-fcf7e50e1d8a-kube-api-access-dmhpc\") pod \"redhat-operators-z42lr\" (UID: \"790b3bd4-9593-4f39-a398-fcf7e50e1d8a\") " pod="openshift-marketplace/redhat-operators-z42lr" Oct 01 16:15:23 crc kubenswrapper[4869]: I1001 16:15:23.982564 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-z42lr" Oct 01 16:15:24 crc kubenswrapper[4869]: I1001 16:15:24.581921 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-z42lr"] Oct 01 16:15:24 crc kubenswrapper[4869]: I1001 16:15:24.737252 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-z42lr" event={"ID":"790b3bd4-9593-4f39-a398-fcf7e50e1d8a","Type":"ContainerStarted","Data":"b828a906ef9488ed6539d1b4b101a50a679ea1c89a6d3f65742413972777d1f8"} Oct 01 16:15:25 crc kubenswrapper[4869]: I1001 16:15:25.750395 4869 generic.go:334] "Generic (PLEG): container finished" podID="790b3bd4-9593-4f39-a398-fcf7e50e1d8a" containerID="0f3e08c26757200974712b0d91f0dbbffc093b2df8068e39a7a378726c24afcb" exitCode=0 Oct 01 16:15:25 crc kubenswrapper[4869]: I1001 16:15:25.750528 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-z42lr" event={"ID":"790b3bd4-9593-4f39-a398-fcf7e50e1d8a","Type":"ContainerDied","Data":"0f3e08c26757200974712b0d91f0dbbffc093b2df8068e39a7a378726c24afcb"} Oct 01 16:15:25 crc kubenswrapper[4869]: I1001 16:15:25.753484 4869 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Oct 01 16:15:27 crc kubenswrapper[4869]: I1001 16:15:27.580960 4869 scope.go:117] "RemoveContainer" containerID="f08ce71040f6817aa52b8a625d6cc8a13d74732e306c081f0fd134fb173f09d7" Oct 01 16:15:27 crc kubenswrapper[4869]: E1001 16:15:27.581830 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-c86m8_openshift-machine-config-operator(a4b64f7f-0b03-4f47-965b-9fde048b735c)\"" pod="openshift-machine-config-operator/machine-config-daemon-c86m8" podUID="a4b64f7f-0b03-4f47-965b-9fde048b735c" Oct 01 16:15:27 crc kubenswrapper[4869]: I1001 16:15:27.773288 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-z42lr" event={"ID":"790b3bd4-9593-4f39-a398-fcf7e50e1d8a","Type":"ContainerStarted","Data":"b6834d0dff51b17e68647f7f7f6e3c25084612c6436bfbfccaa6e38d57b6d1d6"} Oct 01 16:15:28 crc kubenswrapper[4869]: I1001 16:15:28.788516 4869 generic.go:334] "Generic (PLEG): container finished" podID="790b3bd4-9593-4f39-a398-fcf7e50e1d8a" containerID="b6834d0dff51b17e68647f7f7f6e3c25084612c6436bfbfccaa6e38d57b6d1d6" exitCode=0 Oct 01 16:15:28 crc kubenswrapper[4869]: I1001 16:15:28.788996 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-z42lr" event={"ID":"790b3bd4-9593-4f39-a398-fcf7e50e1d8a","Type":"ContainerDied","Data":"b6834d0dff51b17e68647f7f7f6e3c25084612c6436bfbfccaa6e38d57b6d1d6"} Oct 01 16:15:30 crc kubenswrapper[4869]: I1001 16:15:30.806246 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-z42lr" event={"ID":"790b3bd4-9593-4f39-a398-fcf7e50e1d8a","Type":"ContainerStarted","Data":"31a5c090db612d51bea3a218c07f0e2f8520d405e9c70fec985d96760cd1e07a"} Oct 01 16:15:30 crc kubenswrapper[4869]: I1001 16:15:30.834726 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-z42lr" podStartSLOduration=3.752022391 podStartE2EDuration="7.834708437s" podCreationTimestamp="2025-10-01 16:15:23 +0000 UTC" firstStartedPulling="2025-10-01 16:15:25.753163343 +0000 UTC m=+4234.900006459" lastFinishedPulling="2025-10-01 16:15:29.835849389 +0000 UTC m=+4238.982692505" observedRunningTime="2025-10-01 16:15:30.825515515 +0000 UTC m=+4239.972358631" watchObservedRunningTime="2025-10-01 16:15:30.834708437 +0000 UTC m=+4239.981551553" Oct 01 16:15:33 crc kubenswrapper[4869]: I1001 16:15:33.984596 4869 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-z42lr" Oct 01 16:15:33 crc kubenswrapper[4869]: I1001 16:15:33.985213 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-z42lr" Oct 01 16:15:35 crc kubenswrapper[4869]: I1001 16:15:35.087310 4869 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-z42lr" podUID="790b3bd4-9593-4f39-a398-fcf7e50e1d8a" containerName="registry-server" probeResult="failure" output=< Oct 01 16:15:35 crc kubenswrapper[4869]: timeout: failed to connect service ":50051" within 1s Oct 01 16:15:35 crc kubenswrapper[4869]: > Oct 01 16:15:40 crc kubenswrapper[4869]: I1001 16:15:40.581532 4869 scope.go:117] "RemoveContainer" containerID="f08ce71040f6817aa52b8a625d6cc8a13d74732e306c081f0fd134fb173f09d7" Oct 01 16:15:40 crc kubenswrapper[4869]: E1001 16:15:40.582536 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-c86m8_openshift-machine-config-operator(a4b64f7f-0b03-4f47-965b-9fde048b735c)\"" pod="openshift-machine-config-operator/machine-config-daemon-c86m8" podUID="a4b64f7f-0b03-4f47-965b-9fde048b735c" Oct 01 16:15:44 crc kubenswrapper[4869]: I1001 16:15:44.034342 4869 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-z42lr" Oct 01 16:15:44 crc kubenswrapper[4869]: I1001 16:15:44.097979 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-z42lr" Oct 01 16:15:44 crc kubenswrapper[4869]: I1001 16:15:44.277207 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-z42lr"] Oct 01 16:15:45 crc kubenswrapper[4869]: I1001 16:15:45.920880 4869 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-z42lr" podUID="790b3bd4-9593-4f39-a398-fcf7e50e1d8a" containerName="registry-server" containerID="cri-o://31a5c090db612d51bea3a218c07f0e2f8520d405e9c70fec985d96760cd1e07a" gracePeriod=2 Oct 01 16:15:46 crc kubenswrapper[4869]: I1001 16:15:46.828038 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-z42lr" Oct 01 16:15:46 crc kubenswrapper[4869]: I1001 16:15:46.902927 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/790b3bd4-9593-4f39-a398-fcf7e50e1d8a-utilities\") pod \"790b3bd4-9593-4f39-a398-fcf7e50e1d8a\" (UID: \"790b3bd4-9593-4f39-a398-fcf7e50e1d8a\") " Oct 01 16:15:46 crc kubenswrapper[4869]: I1001 16:15:46.903000 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/790b3bd4-9593-4f39-a398-fcf7e50e1d8a-catalog-content\") pod \"790b3bd4-9593-4f39-a398-fcf7e50e1d8a\" (UID: \"790b3bd4-9593-4f39-a398-fcf7e50e1d8a\") " Oct 01 16:15:46 crc kubenswrapper[4869]: I1001 16:15:46.903035 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dmhpc\" (UniqueName: \"kubernetes.io/projected/790b3bd4-9593-4f39-a398-fcf7e50e1d8a-kube-api-access-dmhpc\") pod \"790b3bd4-9593-4f39-a398-fcf7e50e1d8a\" (UID: \"790b3bd4-9593-4f39-a398-fcf7e50e1d8a\") " Oct 01 16:15:46 crc kubenswrapper[4869]: I1001 16:15:46.903996 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/790b3bd4-9593-4f39-a398-fcf7e50e1d8a-utilities" (OuterVolumeSpecName: "utilities") pod "790b3bd4-9593-4f39-a398-fcf7e50e1d8a" (UID: "790b3bd4-9593-4f39-a398-fcf7e50e1d8a"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 16:15:46 crc kubenswrapper[4869]: I1001 16:15:46.911492 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/790b3bd4-9593-4f39-a398-fcf7e50e1d8a-kube-api-access-dmhpc" (OuterVolumeSpecName: "kube-api-access-dmhpc") pod "790b3bd4-9593-4f39-a398-fcf7e50e1d8a" (UID: "790b3bd4-9593-4f39-a398-fcf7e50e1d8a"). InnerVolumeSpecName "kube-api-access-dmhpc". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 16:15:46 crc kubenswrapper[4869]: I1001 16:15:46.931499 4869 generic.go:334] "Generic (PLEG): container finished" podID="790b3bd4-9593-4f39-a398-fcf7e50e1d8a" containerID="31a5c090db612d51bea3a218c07f0e2f8520d405e9c70fec985d96760cd1e07a" exitCode=0 Oct 01 16:15:46 crc kubenswrapper[4869]: I1001 16:15:46.932386 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-z42lr" event={"ID":"790b3bd4-9593-4f39-a398-fcf7e50e1d8a","Type":"ContainerDied","Data":"31a5c090db612d51bea3a218c07f0e2f8520d405e9c70fec985d96760cd1e07a"} Oct 01 16:15:46 crc kubenswrapper[4869]: I1001 16:15:46.932454 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-z42lr" event={"ID":"790b3bd4-9593-4f39-a398-fcf7e50e1d8a","Type":"ContainerDied","Data":"b828a906ef9488ed6539d1b4b101a50a679ea1c89a6d3f65742413972777d1f8"} Oct 01 16:15:46 crc kubenswrapper[4869]: I1001 16:15:46.932476 4869 scope.go:117] "RemoveContainer" containerID="31a5c090db612d51bea3a218c07f0e2f8520d405e9c70fec985d96760cd1e07a" Oct 01 16:15:46 crc kubenswrapper[4869]: I1001 16:15:46.932540 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-z42lr" Oct 01 16:15:46 crc kubenswrapper[4869]: I1001 16:15:46.978905 4869 scope.go:117] "RemoveContainer" containerID="b6834d0dff51b17e68647f7f7f6e3c25084612c6436bfbfccaa6e38d57b6d1d6" Oct 01 16:15:46 crc kubenswrapper[4869]: I1001 16:15:46.994346 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/790b3bd4-9593-4f39-a398-fcf7e50e1d8a-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "790b3bd4-9593-4f39-a398-fcf7e50e1d8a" (UID: "790b3bd4-9593-4f39-a398-fcf7e50e1d8a"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 16:15:47 crc kubenswrapper[4869]: I1001 16:15:47.005716 4869 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/790b3bd4-9593-4f39-a398-fcf7e50e1d8a-utilities\") on node \"crc\" DevicePath \"\"" Oct 01 16:15:47 crc kubenswrapper[4869]: I1001 16:15:47.005755 4869 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/790b3bd4-9593-4f39-a398-fcf7e50e1d8a-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 01 16:15:47 crc kubenswrapper[4869]: I1001 16:15:47.005766 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dmhpc\" (UniqueName: \"kubernetes.io/projected/790b3bd4-9593-4f39-a398-fcf7e50e1d8a-kube-api-access-dmhpc\") on node \"crc\" DevicePath \"\"" Oct 01 16:15:47 crc kubenswrapper[4869]: I1001 16:15:47.267824 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-z42lr"] Oct 01 16:15:47 crc kubenswrapper[4869]: I1001 16:15:47.279092 4869 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-z42lr"] Oct 01 16:15:47 crc kubenswrapper[4869]: I1001 16:15:47.564227 4869 scope.go:117] "RemoveContainer" containerID="0f3e08c26757200974712b0d91f0dbbffc093b2df8068e39a7a378726c24afcb" Oct 01 16:15:47 crc kubenswrapper[4869]: I1001 16:15:47.592531 4869 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="790b3bd4-9593-4f39-a398-fcf7e50e1d8a" path="/var/lib/kubelet/pods/790b3bd4-9593-4f39-a398-fcf7e50e1d8a/volumes" Oct 01 16:15:47 crc kubenswrapper[4869]: I1001 16:15:47.735409 4869 scope.go:117] "RemoveContainer" containerID="31a5c090db612d51bea3a218c07f0e2f8520d405e9c70fec985d96760cd1e07a" Oct 01 16:15:47 crc kubenswrapper[4869]: E1001 16:15:47.735885 4869 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"31a5c090db612d51bea3a218c07f0e2f8520d405e9c70fec985d96760cd1e07a\": container with ID starting with 31a5c090db612d51bea3a218c07f0e2f8520d405e9c70fec985d96760cd1e07a not found: ID does not exist" containerID="31a5c090db612d51bea3a218c07f0e2f8520d405e9c70fec985d96760cd1e07a" Oct 01 16:15:47 crc kubenswrapper[4869]: I1001 16:15:47.735925 4869 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"31a5c090db612d51bea3a218c07f0e2f8520d405e9c70fec985d96760cd1e07a"} err="failed to get container status \"31a5c090db612d51bea3a218c07f0e2f8520d405e9c70fec985d96760cd1e07a\": rpc error: code = NotFound desc = could not find container \"31a5c090db612d51bea3a218c07f0e2f8520d405e9c70fec985d96760cd1e07a\": container with ID starting with 31a5c090db612d51bea3a218c07f0e2f8520d405e9c70fec985d96760cd1e07a not found: ID does not exist" Oct 01 16:15:47 crc kubenswrapper[4869]: I1001 16:15:47.735948 4869 scope.go:117] "RemoveContainer" containerID="b6834d0dff51b17e68647f7f7f6e3c25084612c6436bfbfccaa6e38d57b6d1d6" Oct 01 16:15:47 crc kubenswrapper[4869]: E1001 16:15:47.736340 4869 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b6834d0dff51b17e68647f7f7f6e3c25084612c6436bfbfccaa6e38d57b6d1d6\": container with ID starting with b6834d0dff51b17e68647f7f7f6e3c25084612c6436bfbfccaa6e38d57b6d1d6 not found: ID does not exist" containerID="b6834d0dff51b17e68647f7f7f6e3c25084612c6436bfbfccaa6e38d57b6d1d6" Oct 01 16:15:47 crc kubenswrapper[4869]: I1001 16:15:47.736377 4869 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b6834d0dff51b17e68647f7f7f6e3c25084612c6436bfbfccaa6e38d57b6d1d6"} err="failed to get container status \"b6834d0dff51b17e68647f7f7f6e3c25084612c6436bfbfccaa6e38d57b6d1d6\": rpc error: code = NotFound desc = could not find container \"b6834d0dff51b17e68647f7f7f6e3c25084612c6436bfbfccaa6e38d57b6d1d6\": container with ID starting with b6834d0dff51b17e68647f7f7f6e3c25084612c6436bfbfccaa6e38d57b6d1d6 not found: ID does not exist" Oct 01 16:15:47 crc kubenswrapper[4869]: I1001 16:15:47.736405 4869 scope.go:117] "RemoveContainer" containerID="0f3e08c26757200974712b0d91f0dbbffc093b2df8068e39a7a378726c24afcb" Oct 01 16:15:47 crc kubenswrapper[4869]: E1001 16:15:47.736874 4869 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0f3e08c26757200974712b0d91f0dbbffc093b2df8068e39a7a378726c24afcb\": container with ID starting with 0f3e08c26757200974712b0d91f0dbbffc093b2df8068e39a7a378726c24afcb not found: ID does not exist" containerID="0f3e08c26757200974712b0d91f0dbbffc093b2df8068e39a7a378726c24afcb" Oct 01 16:15:47 crc kubenswrapper[4869]: I1001 16:15:47.736900 4869 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0f3e08c26757200974712b0d91f0dbbffc093b2df8068e39a7a378726c24afcb"} err="failed to get container status \"0f3e08c26757200974712b0d91f0dbbffc093b2df8068e39a7a378726c24afcb\": rpc error: code = NotFound desc = could not find container \"0f3e08c26757200974712b0d91f0dbbffc093b2df8068e39a7a378726c24afcb\": container with ID starting with 0f3e08c26757200974712b0d91f0dbbffc093b2df8068e39a7a378726c24afcb not found: ID does not exist" Oct 01 16:15:52 crc kubenswrapper[4869]: I1001 16:15:52.581696 4869 scope.go:117] "RemoveContainer" containerID="f08ce71040f6817aa52b8a625d6cc8a13d74732e306c081f0fd134fb173f09d7" Oct 01 16:15:52 crc kubenswrapper[4869]: E1001 16:15:52.582440 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-c86m8_openshift-machine-config-operator(a4b64f7f-0b03-4f47-965b-9fde048b735c)\"" pod="openshift-machine-config-operator/machine-config-daemon-c86m8" podUID="a4b64f7f-0b03-4f47-965b-9fde048b735c" Oct 01 16:16:05 crc kubenswrapper[4869]: I1001 16:16:05.581731 4869 scope.go:117] "RemoveContainer" containerID="f08ce71040f6817aa52b8a625d6cc8a13d74732e306c081f0fd134fb173f09d7" Oct 01 16:16:05 crc kubenswrapper[4869]: E1001 16:16:05.582528 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-c86m8_openshift-machine-config-operator(a4b64f7f-0b03-4f47-965b-9fde048b735c)\"" pod="openshift-machine-config-operator/machine-config-daemon-c86m8" podUID="a4b64f7f-0b03-4f47-965b-9fde048b735c" Oct 01 16:16:16 crc kubenswrapper[4869]: I1001 16:16:16.581492 4869 scope.go:117] "RemoveContainer" containerID="f08ce71040f6817aa52b8a625d6cc8a13d74732e306c081f0fd134fb173f09d7" Oct 01 16:16:16 crc kubenswrapper[4869]: E1001 16:16:16.582383 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-c86m8_openshift-machine-config-operator(a4b64f7f-0b03-4f47-965b-9fde048b735c)\"" pod="openshift-machine-config-operator/machine-config-daemon-c86m8" podUID="a4b64f7f-0b03-4f47-965b-9fde048b735c" Oct 01 16:16:27 crc kubenswrapper[4869]: I1001 16:16:27.581098 4869 scope.go:117] "RemoveContainer" containerID="f08ce71040f6817aa52b8a625d6cc8a13d74732e306c081f0fd134fb173f09d7" Oct 01 16:16:27 crc kubenswrapper[4869]: E1001 16:16:27.581920 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-c86m8_openshift-machine-config-operator(a4b64f7f-0b03-4f47-965b-9fde048b735c)\"" pod="openshift-machine-config-operator/machine-config-daemon-c86m8" podUID="a4b64f7f-0b03-4f47-965b-9fde048b735c" Oct 01 16:16:37 crc kubenswrapper[4869]: I1001 16:16:37.368314 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-nvb7t"] Oct 01 16:16:37 crc kubenswrapper[4869]: E1001 16:16:37.369325 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="790b3bd4-9593-4f39-a398-fcf7e50e1d8a" containerName="extract-content" Oct 01 16:16:37 crc kubenswrapper[4869]: I1001 16:16:37.369350 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="790b3bd4-9593-4f39-a398-fcf7e50e1d8a" containerName="extract-content" Oct 01 16:16:37 crc kubenswrapper[4869]: E1001 16:16:37.369365 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="790b3bd4-9593-4f39-a398-fcf7e50e1d8a" containerName="extract-utilities" Oct 01 16:16:37 crc kubenswrapper[4869]: I1001 16:16:37.369373 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="790b3bd4-9593-4f39-a398-fcf7e50e1d8a" containerName="extract-utilities" Oct 01 16:16:37 crc kubenswrapper[4869]: E1001 16:16:37.369396 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="790b3bd4-9593-4f39-a398-fcf7e50e1d8a" containerName="registry-server" Oct 01 16:16:37 crc kubenswrapper[4869]: I1001 16:16:37.369406 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="790b3bd4-9593-4f39-a398-fcf7e50e1d8a" containerName="registry-server" Oct 01 16:16:37 crc kubenswrapper[4869]: I1001 16:16:37.369664 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="790b3bd4-9593-4f39-a398-fcf7e50e1d8a" containerName="registry-server" Oct 01 16:16:37 crc kubenswrapper[4869]: I1001 16:16:37.371328 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-nvb7t" Oct 01 16:16:37 crc kubenswrapper[4869]: I1001 16:16:37.426699 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-nvb7t"] Oct 01 16:16:37 crc kubenswrapper[4869]: I1001 16:16:37.471685 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/12b56197-83b7-4f2e-a7f3-d3b304b7c2fa-utilities\") pod \"certified-operators-nvb7t\" (UID: \"12b56197-83b7-4f2e-a7f3-d3b304b7c2fa\") " pod="openshift-marketplace/certified-operators-nvb7t" Oct 01 16:16:37 crc kubenswrapper[4869]: I1001 16:16:37.471951 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-x2ndm\" (UniqueName: \"kubernetes.io/projected/12b56197-83b7-4f2e-a7f3-d3b304b7c2fa-kube-api-access-x2ndm\") pod \"certified-operators-nvb7t\" (UID: \"12b56197-83b7-4f2e-a7f3-d3b304b7c2fa\") " pod="openshift-marketplace/certified-operators-nvb7t" Oct 01 16:16:37 crc kubenswrapper[4869]: I1001 16:16:37.472000 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/12b56197-83b7-4f2e-a7f3-d3b304b7c2fa-catalog-content\") pod \"certified-operators-nvb7t\" (UID: \"12b56197-83b7-4f2e-a7f3-d3b304b7c2fa\") " pod="openshift-marketplace/certified-operators-nvb7t" Oct 01 16:16:37 crc kubenswrapper[4869]: I1001 16:16:37.573767 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/12b56197-83b7-4f2e-a7f3-d3b304b7c2fa-utilities\") pod \"certified-operators-nvb7t\" (UID: \"12b56197-83b7-4f2e-a7f3-d3b304b7c2fa\") " pod="openshift-marketplace/certified-operators-nvb7t" Oct 01 16:16:37 crc kubenswrapper[4869]: I1001 16:16:37.573927 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-x2ndm\" (UniqueName: \"kubernetes.io/projected/12b56197-83b7-4f2e-a7f3-d3b304b7c2fa-kube-api-access-x2ndm\") pod \"certified-operators-nvb7t\" (UID: \"12b56197-83b7-4f2e-a7f3-d3b304b7c2fa\") " pod="openshift-marketplace/certified-operators-nvb7t" Oct 01 16:16:37 crc kubenswrapper[4869]: I1001 16:16:37.573956 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/12b56197-83b7-4f2e-a7f3-d3b304b7c2fa-catalog-content\") pod \"certified-operators-nvb7t\" (UID: \"12b56197-83b7-4f2e-a7f3-d3b304b7c2fa\") " pod="openshift-marketplace/certified-operators-nvb7t" Oct 01 16:16:37 crc kubenswrapper[4869]: I1001 16:16:37.574567 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/12b56197-83b7-4f2e-a7f3-d3b304b7c2fa-catalog-content\") pod \"certified-operators-nvb7t\" (UID: \"12b56197-83b7-4f2e-a7f3-d3b304b7c2fa\") " pod="openshift-marketplace/certified-operators-nvb7t" Oct 01 16:16:37 crc kubenswrapper[4869]: I1001 16:16:37.574703 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/12b56197-83b7-4f2e-a7f3-d3b304b7c2fa-utilities\") pod \"certified-operators-nvb7t\" (UID: \"12b56197-83b7-4f2e-a7f3-d3b304b7c2fa\") " pod="openshift-marketplace/certified-operators-nvb7t" Oct 01 16:16:37 crc kubenswrapper[4869]: I1001 16:16:37.596421 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-x2ndm\" (UniqueName: \"kubernetes.io/projected/12b56197-83b7-4f2e-a7f3-d3b304b7c2fa-kube-api-access-x2ndm\") pod \"certified-operators-nvb7t\" (UID: \"12b56197-83b7-4f2e-a7f3-d3b304b7c2fa\") " pod="openshift-marketplace/certified-operators-nvb7t" Oct 01 16:16:37 crc kubenswrapper[4869]: I1001 16:16:37.703990 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-nvb7t" Oct 01 16:16:38 crc kubenswrapper[4869]: I1001 16:16:38.340580 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-nvb7t"] Oct 01 16:16:39 crc kubenswrapper[4869]: I1001 16:16:39.405003 4869 generic.go:334] "Generic (PLEG): container finished" podID="12b56197-83b7-4f2e-a7f3-d3b304b7c2fa" containerID="8c4001960297f62e059d9f976ab01c37a02a9be2206354cdc4c54369bb530e9c" exitCode=0 Oct 01 16:16:39 crc kubenswrapper[4869]: I1001 16:16:39.405232 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-nvb7t" event={"ID":"12b56197-83b7-4f2e-a7f3-d3b304b7c2fa","Type":"ContainerDied","Data":"8c4001960297f62e059d9f976ab01c37a02a9be2206354cdc4c54369bb530e9c"} Oct 01 16:16:39 crc kubenswrapper[4869]: I1001 16:16:39.405348 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-nvb7t" event={"ID":"12b56197-83b7-4f2e-a7f3-d3b304b7c2fa","Type":"ContainerStarted","Data":"3d6cfa5037f813a0ad5b8a8eeaed8c1c7a92ca97ab8486ff584c92fbd2e5953a"} Oct 01 16:16:40 crc kubenswrapper[4869]: I1001 16:16:40.582501 4869 scope.go:117] "RemoveContainer" containerID="f08ce71040f6817aa52b8a625d6cc8a13d74732e306c081f0fd134fb173f09d7" Oct 01 16:16:40 crc kubenswrapper[4869]: E1001 16:16:40.583400 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-c86m8_openshift-machine-config-operator(a4b64f7f-0b03-4f47-965b-9fde048b735c)\"" pod="openshift-machine-config-operator/machine-config-daemon-c86m8" podUID="a4b64f7f-0b03-4f47-965b-9fde048b735c" Oct 01 16:16:45 crc kubenswrapper[4869]: I1001 16:16:45.507448 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-nvb7t" event={"ID":"12b56197-83b7-4f2e-a7f3-d3b304b7c2fa","Type":"ContainerStarted","Data":"a2ff5d6e58db8c3bdf5184e6da86b34e466fca1236c0130ab7563cb8560a21d7"} Oct 01 16:16:46 crc kubenswrapper[4869]: I1001 16:16:46.490687 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-d5k88"] Oct 01 16:16:46 crc kubenswrapper[4869]: I1001 16:16:46.498411 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-d5k88" Oct 01 16:16:46 crc kubenswrapper[4869]: I1001 16:16:46.508837 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-d5k88"] Oct 01 16:16:46 crc kubenswrapper[4869]: I1001 16:16:46.527932 4869 generic.go:334] "Generic (PLEG): container finished" podID="12b56197-83b7-4f2e-a7f3-d3b304b7c2fa" containerID="a2ff5d6e58db8c3bdf5184e6da86b34e466fca1236c0130ab7563cb8560a21d7" exitCode=0 Oct 01 16:16:46 crc kubenswrapper[4869]: I1001 16:16:46.528038 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-nvb7t" event={"ID":"12b56197-83b7-4f2e-a7f3-d3b304b7c2fa","Type":"ContainerDied","Data":"a2ff5d6e58db8c3bdf5184e6da86b34e466fca1236c0130ab7563cb8560a21d7"} Oct 01 16:16:46 crc kubenswrapper[4869]: I1001 16:16:46.555853 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vw5t7\" (UniqueName: \"kubernetes.io/projected/0fc8293b-c0da-4104-b8c6-6ec2a892de46-kube-api-access-vw5t7\") pod \"redhat-marketplace-d5k88\" (UID: \"0fc8293b-c0da-4104-b8c6-6ec2a892de46\") " pod="openshift-marketplace/redhat-marketplace-d5k88" Oct 01 16:16:46 crc kubenswrapper[4869]: I1001 16:16:46.555911 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0fc8293b-c0da-4104-b8c6-6ec2a892de46-catalog-content\") pod \"redhat-marketplace-d5k88\" (UID: \"0fc8293b-c0da-4104-b8c6-6ec2a892de46\") " pod="openshift-marketplace/redhat-marketplace-d5k88" Oct 01 16:16:46 crc kubenswrapper[4869]: I1001 16:16:46.556088 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0fc8293b-c0da-4104-b8c6-6ec2a892de46-utilities\") pod \"redhat-marketplace-d5k88\" (UID: \"0fc8293b-c0da-4104-b8c6-6ec2a892de46\") " pod="openshift-marketplace/redhat-marketplace-d5k88" Oct 01 16:16:46 crc kubenswrapper[4869]: I1001 16:16:46.657316 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0fc8293b-c0da-4104-b8c6-6ec2a892de46-utilities\") pod \"redhat-marketplace-d5k88\" (UID: \"0fc8293b-c0da-4104-b8c6-6ec2a892de46\") " pod="openshift-marketplace/redhat-marketplace-d5k88" Oct 01 16:16:46 crc kubenswrapper[4869]: I1001 16:16:46.657461 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vw5t7\" (UniqueName: \"kubernetes.io/projected/0fc8293b-c0da-4104-b8c6-6ec2a892de46-kube-api-access-vw5t7\") pod \"redhat-marketplace-d5k88\" (UID: \"0fc8293b-c0da-4104-b8c6-6ec2a892de46\") " pod="openshift-marketplace/redhat-marketplace-d5k88" Oct 01 16:16:46 crc kubenswrapper[4869]: I1001 16:16:46.657492 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0fc8293b-c0da-4104-b8c6-6ec2a892de46-catalog-content\") pod \"redhat-marketplace-d5k88\" (UID: \"0fc8293b-c0da-4104-b8c6-6ec2a892de46\") " pod="openshift-marketplace/redhat-marketplace-d5k88" Oct 01 16:16:46 crc kubenswrapper[4869]: I1001 16:16:46.657981 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0fc8293b-c0da-4104-b8c6-6ec2a892de46-utilities\") pod \"redhat-marketplace-d5k88\" (UID: \"0fc8293b-c0da-4104-b8c6-6ec2a892de46\") " pod="openshift-marketplace/redhat-marketplace-d5k88" Oct 01 16:16:46 crc kubenswrapper[4869]: I1001 16:16:46.658104 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0fc8293b-c0da-4104-b8c6-6ec2a892de46-catalog-content\") pod \"redhat-marketplace-d5k88\" (UID: \"0fc8293b-c0da-4104-b8c6-6ec2a892de46\") " pod="openshift-marketplace/redhat-marketplace-d5k88" Oct 01 16:16:46 crc kubenswrapper[4869]: I1001 16:16:46.679077 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vw5t7\" (UniqueName: \"kubernetes.io/projected/0fc8293b-c0da-4104-b8c6-6ec2a892de46-kube-api-access-vw5t7\") pod \"redhat-marketplace-d5k88\" (UID: \"0fc8293b-c0da-4104-b8c6-6ec2a892de46\") " pod="openshift-marketplace/redhat-marketplace-d5k88" Oct 01 16:16:46 crc kubenswrapper[4869]: I1001 16:16:46.830645 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-d5k88" Oct 01 16:16:47 crc kubenswrapper[4869]: I1001 16:16:47.304407 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-d5k88"] Oct 01 16:16:47 crc kubenswrapper[4869]: W1001 16:16:47.306974 4869 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod0fc8293b_c0da_4104_b8c6_6ec2a892de46.slice/crio-0c765d47c9ebf129a332579e917859bb49bac289d5f064caf2cf4afdea7e8aec WatchSource:0}: Error finding container 0c765d47c9ebf129a332579e917859bb49bac289d5f064caf2cf4afdea7e8aec: Status 404 returned error can't find the container with id 0c765d47c9ebf129a332579e917859bb49bac289d5f064caf2cf4afdea7e8aec Oct 01 16:16:47 crc kubenswrapper[4869]: I1001 16:16:47.538885 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-d5k88" event={"ID":"0fc8293b-c0da-4104-b8c6-6ec2a892de46","Type":"ContainerStarted","Data":"0c765d47c9ebf129a332579e917859bb49bac289d5f064caf2cf4afdea7e8aec"} Oct 01 16:16:48 crc kubenswrapper[4869]: I1001 16:16:48.551065 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-nvb7t" event={"ID":"12b56197-83b7-4f2e-a7f3-d3b304b7c2fa","Type":"ContainerStarted","Data":"a50546cd1cf6b5fae7fa2b886a2c5f3b55c3e6cec941dc42d0840275bb853e35"} Oct 01 16:16:48 crc kubenswrapper[4869]: I1001 16:16:48.553161 4869 generic.go:334] "Generic (PLEG): container finished" podID="0fc8293b-c0da-4104-b8c6-6ec2a892de46" containerID="8926f302a6a2131c0bba729c311da66de0b65b5b79dbdddfdca732f899fb4b68" exitCode=0 Oct 01 16:16:48 crc kubenswrapper[4869]: I1001 16:16:48.553216 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-d5k88" event={"ID":"0fc8293b-c0da-4104-b8c6-6ec2a892de46","Type":"ContainerDied","Data":"8926f302a6a2131c0bba729c311da66de0b65b5b79dbdddfdca732f899fb4b68"} Oct 01 16:16:48 crc kubenswrapper[4869]: I1001 16:16:48.587975 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-nvb7t" podStartSLOduration=3.6449475959999997 podStartE2EDuration="11.587945472s" podCreationTimestamp="2025-10-01 16:16:37 +0000 UTC" firstStartedPulling="2025-10-01 16:16:39.406829465 +0000 UTC m=+4308.553672581" lastFinishedPulling="2025-10-01 16:16:47.349827341 +0000 UTC m=+4316.496670457" observedRunningTime="2025-10-01 16:16:48.577609481 +0000 UTC m=+4317.724452607" watchObservedRunningTime="2025-10-01 16:16:48.587945472 +0000 UTC m=+4317.734788598" Oct 01 16:16:51 crc kubenswrapper[4869]: I1001 16:16:51.586045 4869 generic.go:334] "Generic (PLEG): container finished" podID="0fc8293b-c0da-4104-b8c6-6ec2a892de46" containerID="7ed7c32957dcd1f20bdb4c376cc29a1583e76ad604f6913568a4af2958303572" exitCode=0 Oct 01 16:16:51 crc kubenswrapper[4869]: I1001 16:16:51.599121 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-d5k88" event={"ID":"0fc8293b-c0da-4104-b8c6-6ec2a892de46","Type":"ContainerDied","Data":"7ed7c32957dcd1f20bdb4c376cc29a1583e76ad604f6913568a4af2958303572"} Oct 01 16:16:52 crc kubenswrapper[4869]: I1001 16:16:52.581761 4869 scope.go:117] "RemoveContainer" containerID="f08ce71040f6817aa52b8a625d6cc8a13d74732e306c081f0fd134fb173f09d7" Oct 01 16:16:52 crc kubenswrapper[4869]: E1001 16:16:52.582391 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-c86m8_openshift-machine-config-operator(a4b64f7f-0b03-4f47-965b-9fde048b735c)\"" pod="openshift-machine-config-operator/machine-config-daemon-c86m8" podUID="a4b64f7f-0b03-4f47-965b-9fde048b735c" Oct 01 16:16:52 crc kubenswrapper[4869]: I1001 16:16:52.597842 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-d5k88" event={"ID":"0fc8293b-c0da-4104-b8c6-6ec2a892de46","Type":"ContainerStarted","Data":"613f37400c02dacb1fa10ac11bb24e0eb250fcad8e8535b93d175dbe1a3a0457"} Oct 01 16:16:52 crc kubenswrapper[4869]: I1001 16:16:52.617211 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-d5k88" podStartSLOduration=2.941249769 podStartE2EDuration="6.617188842s" podCreationTimestamp="2025-10-01 16:16:46 +0000 UTC" firstStartedPulling="2025-10-01 16:16:48.555157724 +0000 UTC m=+4317.702000840" lastFinishedPulling="2025-10-01 16:16:52.231096797 +0000 UTC m=+4321.377939913" observedRunningTime="2025-10-01 16:16:52.616482154 +0000 UTC m=+4321.763325280" watchObservedRunningTime="2025-10-01 16:16:52.617188842 +0000 UTC m=+4321.764031978" Oct 01 16:16:56 crc kubenswrapper[4869]: I1001 16:16:56.831224 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-d5k88" Oct 01 16:16:56 crc kubenswrapper[4869]: I1001 16:16:56.831661 4869 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-d5k88" Oct 01 16:16:56 crc kubenswrapper[4869]: I1001 16:16:56.892679 4869 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-d5k88" Oct 01 16:16:57 crc kubenswrapper[4869]: I1001 16:16:57.685226 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-d5k88" Oct 01 16:16:57 crc kubenswrapper[4869]: I1001 16:16:57.704374 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-nvb7t" Oct 01 16:16:57 crc kubenswrapper[4869]: I1001 16:16:57.704700 4869 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-nvb7t" Oct 01 16:16:57 crc kubenswrapper[4869]: I1001 16:16:57.734451 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-d5k88"] Oct 01 16:16:57 crc kubenswrapper[4869]: I1001 16:16:57.757788 4869 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-nvb7t" Oct 01 16:16:58 crc kubenswrapper[4869]: I1001 16:16:58.711352 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-nvb7t" Oct 01 16:16:59 crc kubenswrapper[4869]: I1001 16:16:59.480005 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-nvb7t"] Oct 01 16:16:59 crc kubenswrapper[4869]: I1001 16:16:59.536697 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-r6jsz"] Oct 01 16:16:59 crc kubenswrapper[4869]: I1001 16:16:59.536983 4869 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-r6jsz" podUID="fddf94b6-3137-4043-bbfd-28ae6650fb5a" containerName="registry-server" containerID="cri-o://ee266faccac44ec4083b108c3563d4fe990268f81fa9da85216b7c7d0a57459b" gracePeriod=2 Oct 01 16:16:59 crc kubenswrapper[4869]: I1001 16:16:59.652145 4869 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-d5k88" podUID="0fc8293b-c0da-4104-b8c6-6ec2a892de46" containerName="registry-server" containerID="cri-o://613f37400c02dacb1fa10ac11bb24e0eb250fcad8e8535b93d175dbe1a3a0457" gracePeriod=2 Oct 01 16:17:00 crc kubenswrapper[4869]: E1001 16:17:00.348869 4869 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of ee266faccac44ec4083b108c3563d4fe990268f81fa9da85216b7c7d0a57459b is running failed: container process not found" containerID="ee266faccac44ec4083b108c3563d4fe990268f81fa9da85216b7c7d0a57459b" cmd=["grpc_health_probe","-addr=:50051"] Oct 01 16:17:00 crc kubenswrapper[4869]: E1001 16:17:00.350992 4869 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of ee266faccac44ec4083b108c3563d4fe990268f81fa9da85216b7c7d0a57459b is running failed: container process not found" containerID="ee266faccac44ec4083b108c3563d4fe990268f81fa9da85216b7c7d0a57459b" cmd=["grpc_health_probe","-addr=:50051"] Oct 01 16:17:00 crc kubenswrapper[4869]: E1001 16:17:00.351775 4869 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of ee266faccac44ec4083b108c3563d4fe990268f81fa9da85216b7c7d0a57459b is running failed: container process not found" containerID="ee266faccac44ec4083b108c3563d4fe990268f81fa9da85216b7c7d0a57459b" cmd=["grpc_health_probe","-addr=:50051"] Oct 01 16:17:00 crc kubenswrapper[4869]: E1001 16:17:00.351821 4869 prober.go:104] "Probe errored" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of ee266faccac44ec4083b108c3563d4fe990268f81fa9da85216b7c7d0a57459b is running failed: container process not found" probeType="Readiness" pod="openshift-marketplace/certified-operators-r6jsz" podUID="fddf94b6-3137-4043-bbfd-28ae6650fb5a" containerName="registry-server" Oct 01 16:17:00 crc kubenswrapper[4869]: I1001 16:17:00.687700 4869 generic.go:334] "Generic (PLEG): container finished" podID="fddf94b6-3137-4043-bbfd-28ae6650fb5a" containerID="ee266faccac44ec4083b108c3563d4fe990268f81fa9da85216b7c7d0a57459b" exitCode=0 Oct 01 16:17:00 crc kubenswrapper[4869]: I1001 16:17:00.687784 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-r6jsz" event={"ID":"fddf94b6-3137-4043-bbfd-28ae6650fb5a","Type":"ContainerDied","Data":"ee266faccac44ec4083b108c3563d4fe990268f81fa9da85216b7c7d0a57459b"} Oct 01 16:17:00 crc kubenswrapper[4869]: I1001 16:17:00.703973 4869 generic.go:334] "Generic (PLEG): container finished" podID="0fc8293b-c0da-4104-b8c6-6ec2a892de46" containerID="613f37400c02dacb1fa10ac11bb24e0eb250fcad8e8535b93d175dbe1a3a0457" exitCode=0 Oct 01 16:17:00 crc kubenswrapper[4869]: I1001 16:17:00.705410 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-d5k88" event={"ID":"0fc8293b-c0da-4104-b8c6-6ec2a892de46","Type":"ContainerDied","Data":"613f37400c02dacb1fa10ac11bb24e0eb250fcad8e8535b93d175dbe1a3a0457"} Oct 01 16:17:01 crc kubenswrapper[4869]: I1001 16:17:01.120249 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-r6jsz" Oct 01 16:17:01 crc kubenswrapper[4869]: I1001 16:17:01.199964 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fddf94b6-3137-4043-bbfd-28ae6650fb5a-catalog-content\") pod \"fddf94b6-3137-4043-bbfd-28ae6650fb5a\" (UID: \"fddf94b6-3137-4043-bbfd-28ae6650fb5a\") " Oct 01 16:17:01 crc kubenswrapper[4869]: I1001 16:17:01.200177 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fddf94b6-3137-4043-bbfd-28ae6650fb5a-utilities\") pod \"fddf94b6-3137-4043-bbfd-28ae6650fb5a\" (UID: \"fddf94b6-3137-4043-bbfd-28ae6650fb5a\") " Oct 01 16:17:01 crc kubenswrapper[4869]: I1001 16:17:01.200287 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fwmwm\" (UniqueName: \"kubernetes.io/projected/fddf94b6-3137-4043-bbfd-28ae6650fb5a-kube-api-access-fwmwm\") pod \"fddf94b6-3137-4043-bbfd-28ae6650fb5a\" (UID: \"fddf94b6-3137-4043-bbfd-28ae6650fb5a\") " Oct 01 16:17:01 crc kubenswrapper[4869]: I1001 16:17:01.210395 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/fddf94b6-3137-4043-bbfd-28ae6650fb5a-utilities" (OuterVolumeSpecName: "utilities") pod "fddf94b6-3137-4043-bbfd-28ae6650fb5a" (UID: "fddf94b6-3137-4043-bbfd-28ae6650fb5a"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 16:17:01 crc kubenswrapper[4869]: I1001 16:17:01.294626 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fddf94b6-3137-4043-bbfd-28ae6650fb5a-kube-api-access-fwmwm" (OuterVolumeSpecName: "kube-api-access-fwmwm") pod "fddf94b6-3137-4043-bbfd-28ae6650fb5a" (UID: "fddf94b6-3137-4043-bbfd-28ae6650fb5a"). InnerVolumeSpecName "kube-api-access-fwmwm". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 16:17:01 crc kubenswrapper[4869]: I1001 16:17:01.303725 4869 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fddf94b6-3137-4043-bbfd-28ae6650fb5a-utilities\") on node \"crc\" DevicePath \"\"" Oct 01 16:17:01 crc kubenswrapper[4869]: I1001 16:17:01.303761 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fwmwm\" (UniqueName: \"kubernetes.io/projected/fddf94b6-3137-4043-bbfd-28ae6650fb5a-kube-api-access-fwmwm\") on node \"crc\" DevicePath \"\"" Oct 01 16:17:01 crc kubenswrapper[4869]: I1001 16:17:01.379173 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/fddf94b6-3137-4043-bbfd-28ae6650fb5a-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "fddf94b6-3137-4043-bbfd-28ae6650fb5a" (UID: "fddf94b6-3137-4043-bbfd-28ae6650fb5a"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 16:17:01 crc kubenswrapper[4869]: I1001 16:17:01.405331 4869 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fddf94b6-3137-4043-bbfd-28ae6650fb5a-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 01 16:17:01 crc kubenswrapper[4869]: I1001 16:17:01.694139 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-d5k88" Oct 01 16:17:01 crc kubenswrapper[4869]: I1001 16:17:01.720339 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-r6jsz" event={"ID":"fddf94b6-3137-4043-bbfd-28ae6650fb5a","Type":"ContainerDied","Data":"9f4e2ebbf9e5dc59c589a9bd6af34811abca5d28a3ea49c020bf67a52d5ee2b9"} Oct 01 16:17:01 crc kubenswrapper[4869]: I1001 16:17:01.720394 4869 scope.go:117] "RemoveContainer" containerID="ee266faccac44ec4083b108c3563d4fe990268f81fa9da85216b7c7d0a57459b" Oct 01 16:17:01 crc kubenswrapper[4869]: I1001 16:17:01.720513 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-r6jsz" Oct 01 16:17:01 crc kubenswrapper[4869]: I1001 16:17:01.725724 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-d5k88" event={"ID":"0fc8293b-c0da-4104-b8c6-6ec2a892de46","Type":"ContainerDied","Data":"0c765d47c9ebf129a332579e917859bb49bac289d5f064caf2cf4afdea7e8aec"} Oct 01 16:17:01 crc kubenswrapper[4869]: I1001 16:17:01.725790 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-d5k88" Oct 01 16:17:01 crc kubenswrapper[4869]: I1001 16:17:01.746586 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-r6jsz"] Oct 01 16:17:01 crc kubenswrapper[4869]: I1001 16:17:01.765700 4869 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-r6jsz"] Oct 01 16:17:01 crc kubenswrapper[4869]: I1001 16:17:01.802203 4869 scope.go:117] "RemoveContainer" containerID="6da13ad1750c234d333b71cb38d43ba4b5c612be7480373bd8cf9b38d2fb4fa1" Oct 01 16:17:01 crc kubenswrapper[4869]: I1001 16:17:01.812958 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0fc8293b-c0da-4104-b8c6-6ec2a892de46-utilities\") pod \"0fc8293b-c0da-4104-b8c6-6ec2a892de46\" (UID: \"0fc8293b-c0da-4104-b8c6-6ec2a892de46\") " Oct 01 16:17:01 crc kubenswrapper[4869]: I1001 16:17:01.814034 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0fc8293b-c0da-4104-b8c6-6ec2a892de46-utilities" (OuterVolumeSpecName: "utilities") pod "0fc8293b-c0da-4104-b8c6-6ec2a892de46" (UID: "0fc8293b-c0da-4104-b8c6-6ec2a892de46"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 16:17:01 crc kubenswrapper[4869]: I1001 16:17:01.814109 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vw5t7\" (UniqueName: \"kubernetes.io/projected/0fc8293b-c0da-4104-b8c6-6ec2a892de46-kube-api-access-vw5t7\") pod \"0fc8293b-c0da-4104-b8c6-6ec2a892de46\" (UID: \"0fc8293b-c0da-4104-b8c6-6ec2a892de46\") " Oct 01 16:17:01 crc kubenswrapper[4869]: I1001 16:17:01.814830 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0fc8293b-c0da-4104-b8c6-6ec2a892de46-catalog-content\") pod \"0fc8293b-c0da-4104-b8c6-6ec2a892de46\" (UID: \"0fc8293b-c0da-4104-b8c6-6ec2a892de46\") " Oct 01 16:17:01 crc kubenswrapper[4869]: I1001 16:17:01.815852 4869 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0fc8293b-c0da-4104-b8c6-6ec2a892de46-utilities\") on node \"crc\" DevicePath \"\"" Oct 01 16:17:01 crc kubenswrapper[4869]: I1001 16:17:01.826532 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0fc8293b-c0da-4104-b8c6-6ec2a892de46-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "0fc8293b-c0da-4104-b8c6-6ec2a892de46" (UID: "0fc8293b-c0da-4104-b8c6-6ec2a892de46"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 16:17:01 crc kubenswrapper[4869]: I1001 16:17:01.918975 4869 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0fc8293b-c0da-4104-b8c6-6ec2a892de46-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 01 16:17:02 crc kubenswrapper[4869]: I1001 16:17:02.260975 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0fc8293b-c0da-4104-b8c6-6ec2a892de46-kube-api-access-vw5t7" (OuterVolumeSpecName: "kube-api-access-vw5t7") pod "0fc8293b-c0da-4104-b8c6-6ec2a892de46" (UID: "0fc8293b-c0da-4104-b8c6-6ec2a892de46"). InnerVolumeSpecName "kube-api-access-vw5t7". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 16:17:02 crc kubenswrapper[4869]: I1001 16:17:02.273179 4869 scope.go:117] "RemoveContainer" containerID="787980efe2dcf822fc4b37e382881a56c6d9c274f4b89c637b6d5db49ebce5c4" Oct 01 16:17:02 crc kubenswrapper[4869]: I1001 16:17:02.328180 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vw5t7\" (UniqueName: \"kubernetes.io/projected/0fc8293b-c0da-4104-b8c6-6ec2a892de46-kube-api-access-vw5t7\") on node \"crc\" DevicePath \"\"" Oct 01 16:17:02 crc kubenswrapper[4869]: I1001 16:17:02.418139 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-d5k88"] Oct 01 16:17:02 crc kubenswrapper[4869]: I1001 16:17:02.427437 4869 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-d5k88"] Oct 01 16:17:02 crc kubenswrapper[4869]: I1001 16:17:02.462315 4869 scope.go:117] "RemoveContainer" containerID="613f37400c02dacb1fa10ac11bb24e0eb250fcad8e8535b93d175dbe1a3a0457" Oct 01 16:17:02 crc kubenswrapper[4869]: I1001 16:17:02.489887 4869 scope.go:117] "RemoveContainer" containerID="7ed7c32957dcd1f20bdb4c376cc29a1583e76ad604f6913568a4af2958303572" Oct 01 16:17:02 crc kubenswrapper[4869]: I1001 16:17:02.512685 4869 scope.go:117] "RemoveContainer" containerID="8926f302a6a2131c0bba729c311da66de0b65b5b79dbdddfdca732f899fb4b68" Oct 01 16:17:03 crc kubenswrapper[4869]: I1001 16:17:03.593459 4869 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0fc8293b-c0da-4104-b8c6-6ec2a892de46" path="/var/lib/kubelet/pods/0fc8293b-c0da-4104-b8c6-6ec2a892de46/volumes" Oct 01 16:17:03 crc kubenswrapper[4869]: I1001 16:17:03.595382 4869 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fddf94b6-3137-4043-bbfd-28ae6650fb5a" path="/var/lib/kubelet/pods/fddf94b6-3137-4043-bbfd-28ae6650fb5a/volumes" Oct 01 16:17:04 crc kubenswrapper[4869]: I1001 16:17:04.581729 4869 scope.go:117] "RemoveContainer" containerID="f08ce71040f6817aa52b8a625d6cc8a13d74732e306c081f0fd134fb173f09d7" Oct 01 16:17:04 crc kubenswrapper[4869]: E1001 16:17:04.582084 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-c86m8_openshift-machine-config-operator(a4b64f7f-0b03-4f47-965b-9fde048b735c)\"" pod="openshift-machine-config-operator/machine-config-daemon-c86m8" podUID="a4b64f7f-0b03-4f47-965b-9fde048b735c" Oct 01 16:17:18 crc kubenswrapper[4869]: I1001 16:17:18.583059 4869 scope.go:117] "RemoveContainer" containerID="f08ce71040f6817aa52b8a625d6cc8a13d74732e306c081f0fd134fb173f09d7" Oct 01 16:17:18 crc kubenswrapper[4869]: E1001 16:17:18.584505 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-c86m8_openshift-machine-config-operator(a4b64f7f-0b03-4f47-965b-9fde048b735c)\"" pod="openshift-machine-config-operator/machine-config-daemon-c86m8" podUID="a4b64f7f-0b03-4f47-965b-9fde048b735c" Oct 01 16:17:30 crc kubenswrapper[4869]: I1001 16:17:30.581514 4869 scope.go:117] "RemoveContainer" containerID="f08ce71040f6817aa52b8a625d6cc8a13d74732e306c081f0fd134fb173f09d7" Oct 01 16:17:30 crc kubenswrapper[4869]: E1001 16:17:30.582242 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-c86m8_openshift-machine-config-operator(a4b64f7f-0b03-4f47-965b-9fde048b735c)\"" pod="openshift-machine-config-operator/machine-config-daemon-c86m8" podUID="a4b64f7f-0b03-4f47-965b-9fde048b735c" Oct 01 16:17:41 crc kubenswrapper[4869]: I1001 16:17:41.589614 4869 scope.go:117] "RemoveContainer" containerID="f08ce71040f6817aa52b8a625d6cc8a13d74732e306c081f0fd134fb173f09d7" Oct 01 16:17:41 crc kubenswrapper[4869]: E1001 16:17:41.590451 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-c86m8_openshift-machine-config-operator(a4b64f7f-0b03-4f47-965b-9fde048b735c)\"" pod="openshift-machine-config-operator/machine-config-daemon-c86m8" podUID="a4b64f7f-0b03-4f47-965b-9fde048b735c" Oct 01 16:17:53 crc kubenswrapper[4869]: I1001 16:17:53.581493 4869 scope.go:117] "RemoveContainer" containerID="f08ce71040f6817aa52b8a625d6cc8a13d74732e306c081f0fd134fb173f09d7" Oct 01 16:17:53 crc kubenswrapper[4869]: E1001 16:17:53.583471 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-c86m8_openshift-machine-config-operator(a4b64f7f-0b03-4f47-965b-9fde048b735c)\"" pod="openshift-machine-config-operator/machine-config-daemon-c86m8" podUID="a4b64f7f-0b03-4f47-965b-9fde048b735c" Oct 01 16:18:04 crc kubenswrapper[4869]: I1001 16:18:04.581229 4869 scope.go:117] "RemoveContainer" containerID="f08ce71040f6817aa52b8a625d6cc8a13d74732e306c081f0fd134fb173f09d7" Oct 01 16:18:04 crc kubenswrapper[4869]: E1001 16:18:04.582243 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-c86m8_openshift-machine-config-operator(a4b64f7f-0b03-4f47-965b-9fde048b735c)\"" pod="openshift-machine-config-operator/machine-config-daemon-c86m8" podUID="a4b64f7f-0b03-4f47-965b-9fde048b735c" Oct 01 16:18:15 crc kubenswrapper[4869]: I1001 16:18:15.581202 4869 scope.go:117] "RemoveContainer" containerID="f08ce71040f6817aa52b8a625d6cc8a13d74732e306c081f0fd134fb173f09d7" Oct 01 16:18:15 crc kubenswrapper[4869]: E1001 16:18:15.582075 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-c86m8_openshift-machine-config-operator(a4b64f7f-0b03-4f47-965b-9fde048b735c)\"" pod="openshift-machine-config-operator/machine-config-daemon-c86m8" podUID="a4b64f7f-0b03-4f47-965b-9fde048b735c" Oct 01 16:18:27 crc kubenswrapper[4869]: I1001 16:18:27.582023 4869 scope.go:117] "RemoveContainer" containerID="f08ce71040f6817aa52b8a625d6cc8a13d74732e306c081f0fd134fb173f09d7" Oct 01 16:18:27 crc kubenswrapper[4869]: E1001 16:18:27.583071 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-c86m8_openshift-machine-config-operator(a4b64f7f-0b03-4f47-965b-9fde048b735c)\"" pod="openshift-machine-config-operator/machine-config-daemon-c86m8" podUID="a4b64f7f-0b03-4f47-965b-9fde048b735c" Oct 01 16:18:39 crc kubenswrapper[4869]: I1001 16:18:39.581060 4869 scope.go:117] "RemoveContainer" containerID="f08ce71040f6817aa52b8a625d6cc8a13d74732e306c081f0fd134fb173f09d7" Oct 01 16:18:39 crc kubenswrapper[4869]: E1001 16:18:39.581904 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-c86m8_openshift-machine-config-operator(a4b64f7f-0b03-4f47-965b-9fde048b735c)\"" pod="openshift-machine-config-operator/machine-config-daemon-c86m8" podUID="a4b64f7f-0b03-4f47-965b-9fde048b735c" Oct 01 16:18:53 crc kubenswrapper[4869]: I1001 16:18:53.581436 4869 scope.go:117] "RemoveContainer" containerID="f08ce71040f6817aa52b8a625d6cc8a13d74732e306c081f0fd134fb173f09d7" Oct 01 16:18:53 crc kubenswrapper[4869]: E1001 16:18:53.582340 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-c86m8_openshift-machine-config-operator(a4b64f7f-0b03-4f47-965b-9fde048b735c)\"" pod="openshift-machine-config-operator/machine-config-daemon-c86m8" podUID="a4b64f7f-0b03-4f47-965b-9fde048b735c" Oct 01 16:19:04 crc kubenswrapper[4869]: I1001 16:19:04.581620 4869 scope.go:117] "RemoveContainer" containerID="f08ce71040f6817aa52b8a625d6cc8a13d74732e306c081f0fd134fb173f09d7" Oct 01 16:19:04 crc kubenswrapper[4869]: E1001 16:19:04.582734 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-c86m8_openshift-machine-config-operator(a4b64f7f-0b03-4f47-965b-9fde048b735c)\"" pod="openshift-machine-config-operator/machine-config-daemon-c86m8" podUID="a4b64f7f-0b03-4f47-965b-9fde048b735c" Oct 01 16:19:16 crc kubenswrapper[4869]: I1001 16:19:16.564934 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-ch9h4"] Oct 01 16:19:16 crc kubenswrapper[4869]: E1001 16:19:16.565712 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0fc8293b-c0da-4104-b8c6-6ec2a892de46" containerName="extract-utilities" Oct 01 16:19:16 crc kubenswrapper[4869]: I1001 16:19:16.565733 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="0fc8293b-c0da-4104-b8c6-6ec2a892de46" containerName="extract-utilities" Oct 01 16:19:16 crc kubenswrapper[4869]: E1001 16:19:16.565757 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fddf94b6-3137-4043-bbfd-28ae6650fb5a" containerName="extract-utilities" Oct 01 16:19:16 crc kubenswrapper[4869]: I1001 16:19:16.565762 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="fddf94b6-3137-4043-bbfd-28ae6650fb5a" containerName="extract-utilities" Oct 01 16:19:16 crc kubenswrapper[4869]: E1001 16:19:16.565778 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0fc8293b-c0da-4104-b8c6-6ec2a892de46" containerName="registry-server" Oct 01 16:19:16 crc kubenswrapper[4869]: I1001 16:19:16.565784 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="0fc8293b-c0da-4104-b8c6-6ec2a892de46" containerName="registry-server" Oct 01 16:19:16 crc kubenswrapper[4869]: E1001 16:19:16.565804 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fddf94b6-3137-4043-bbfd-28ae6650fb5a" containerName="extract-content" Oct 01 16:19:16 crc kubenswrapper[4869]: I1001 16:19:16.565811 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="fddf94b6-3137-4043-bbfd-28ae6650fb5a" containerName="extract-content" Oct 01 16:19:16 crc kubenswrapper[4869]: E1001 16:19:16.565827 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fddf94b6-3137-4043-bbfd-28ae6650fb5a" containerName="registry-server" Oct 01 16:19:16 crc kubenswrapper[4869]: I1001 16:19:16.565832 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="fddf94b6-3137-4043-bbfd-28ae6650fb5a" containerName="registry-server" Oct 01 16:19:16 crc kubenswrapper[4869]: E1001 16:19:16.565842 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0fc8293b-c0da-4104-b8c6-6ec2a892de46" containerName="extract-content" Oct 01 16:19:16 crc kubenswrapper[4869]: I1001 16:19:16.565848 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="0fc8293b-c0da-4104-b8c6-6ec2a892de46" containerName="extract-content" Oct 01 16:19:16 crc kubenswrapper[4869]: I1001 16:19:16.566025 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="fddf94b6-3137-4043-bbfd-28ae6650fb5a" containerName="registry-server" Oct 01 16:19:16 crc kubenswrapper[4869]: I1001 16:19:16.566040 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="0fc8293b-c0da-4104-b8c6-6ec2a892de46" containerName="registry-server" Oct 01 16:19:16 crc kubenswrapper[4869]: I1001 16:19:16.567272 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-ch9h4" Oct 01 16:19:16 crc kubenswrapper[4869]: I1001 16:19:16.578662 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-ch9h4"] Oct 01 16:19:16 crc kubenswrapper[4869]: I1001 16:19:16.708695 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/bd424bf2-a6f2-4858-92c4-d3f1427a938f-utilities\") pod \"community-operators-ch9h4\" (UID: \"bd424bf2-a6f2-4858-92c4-d3f1427a938f\") " pod="openshift-marketplace/community-operators-ch9h4" Oct 01 16:19:16 crc kubenswrapper[4869]: I1001 16:19:16.708746 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/bd424bf2-a6f2-4858-92c4-d3f1427a938f-catalog-content\") pod \"community-operators-ch9h4\" (UID: \"bd424bf2-a6f2-4858-92c4-d3f1427a938f\") " pod="openshift-marketplace/community-operators-ch9h4" Oct 01 16:19:16 crc kubenswrapper[4869]: I1001 16:19:16.708902 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-p6tbm\" (UniqueName: \"kubernetes.io/projected/bd424bf2-a6f2-4858-92c4-d3f1427a938f-kube-api-access-p6tbm\") pod \"community-operators-ch9h4\" (UID: \"bd424bf2-a6f2-4858-92c4-d3f1427a938f\") " pod="openshift-marketplace/community-operators-ch9h4" Oct 01 16:19:16 crc kubenswrapper[4869]: I1001 16:19:16.812150 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/bd424bf2-a6f2-4858-92c4-d3f1427a938f-utilities\") pod \"community-operators-ch9h4\" (UID: \"bd424bf2-a6f2-4858-92c4-d3f1427a938f\") " pod="openshift-marketplace/community-operators-ch9h4" Oct 01 16:19:16 crc kubenswrapper[4869]: I1001 16:19:16.812492 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/bd424bf2-a6f2-4858-92c4-d3f1427a938f-catalog-content\") pod \"community-operators-ch9h4\" (UID: \"bd424bf2-a6f2-4858-92c4-d3f1427a938f\") " pod="openshift-marketplace/community-operators-ch9h4" Oct 01 16:19:16 crc kubenswrapper[4869]: I1001 16:19:16.812646 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-p6tbm\" (UniqueName: \"kubernetes.io/projected/bd424bf2-a6f2-4858-92c4-d3f1427a938f-kube-api-access-p6tbm\") pod \"community-operators-ch9h4\" (UID: \"bd424bf2-a6f2-4858-92c4-d3f1427a938f\") " pod="openshift-marketplace/community-operators-ch9h4" Oct 01 16:19:16 crc kubenswrapper[4869]: I1001 16:19:16.812764 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/bd424bf2-a6f2-4858-92c4-d3f1427a938f-utilities\") pod \"community-operators-ch9h4\" (UID: \"bd424bf2-a6f2-4858-92c4-d3f1427a938f\") " pod="openshift-marketplace/community-operators-ch9h4" Oct 01 16:19:16 crc kubenswrapper[4869]: I1001 16:19:16.812813 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/bd424bf2-a6f2-4858-92c4-d3f1427a938f-catalog-content\") pod \"community-operators-ch9h4\" (UID: \"bd424bf2-a6f2-4858-92c4-d3f1427a938f\") " pod="openshift-marketplace/community-operators-ch9h4" Oct 01 16:19:16 crc kubenswrapper[4869]: I1001 16:19:16.833996 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-p6tbm\" (UniqueName: \"kubernetes.io/projected/bd424bf2-a6f2-4858-92c4-d3f1427a938f-kube-api-access-p6tbm\") pod \"community-operators-ch9h4\" (UID: \"bd424bf2-a6f2-4858-92c4-d3f1427a938f\") " pod="openshift-marketplace/community-operators-ch9h4" Oct 01 16:19:16 crc kubenswrapper[4869]: I1001 16:19:16.890176 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-ch9h4" Oct 01 16:19:17 crc kubenswrapper[4869]: I1001 16:19:17.512991 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-ch9h4"] Oct 01 16:19:18 crc kubenswrapper[4869]: I1001 16:19:18.125243 4869 generic.go:334] "Generic (PLEG): container finished" podID="bd424bf2-a6f2-4858-92c4-d3f1427a938f" containerID="495b0c256bbf579587f6350a5205e7b2d0e3bc8c1aa1b5508d612326c754c801" exitCode=0 Oct 01 16:19:18 crc kubenswrapper[4869]: I1001 16:19:18.125355 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-ch9h4" event={"ID":"bd424bf2-a6f2-4858-92c4-d3f1427a938f","Type":"ContainerDied","Data":"495b0c256bbf579587f6350a5205e7b2d0e3bc8c1aa1b5508d612326c754c801"} Oct 01 16:19:18 crc kubenswrapper[4869]: I1001 16:19:18.125534 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-ch9h4" event={"ID":"bd424bf2-a6f2-4858-92c4-d3f1427a938f","Type":"ContainerStarted","Data":"4b82f7f02835e0ab9b0362d0984db8e393ea3372eb3534d7a4396faf6dd3d754"} Oct 01 16:19:19 crc kubenswrapper[4869]: I1001 16:19:19.581471 4869 scope.go:117] "RemoveContainer" containerID="f08ce71040f6817aa52b8a625d6cc8a13d74732e306c081f0fd134fb173f09d7" Oct 01 16:19:19 crc kubenswrapper[4869]: E1001 16:19:19.582148 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-c86m8_openshift-machine-config-operator(a4b64f7f-0b03-4f47-965b-9fde048b735c)\"" pod="openshift-machine-config-operator/machine-config-daemon-c86m8" podUID="a4b64f7f-0b03-4f47-965b-9fde048b735c" Oct 01 16:19:21 crc kubenswrapper[4869]: I1001 16:19:21.154486 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-ch9h4" event={"ID":"bd424bf2-a6f2-4858-92c4-d3f1427a938f","Type":"ContainerStarted","Data":"184215068c80edf4b79cf7e1ffd55a942891eb001ab0cefa756cd740a0f5d921"} Oct 01 16:19:22 crc kubenswrapper[4869]: I1001 16:19:22.169652 4869 generic.go:334] "Generic (PLEG): container finished" podID="bd424bf2-a6f2-4858-92c4-d3f1427a938f" containerID="184215068c80edf4b79cf7e1ffd55a942891eb001ab0cefa756cd740a0f5d921" exitCode=0 Oct 01 16:19:22 crc kubenswrapper[4869]: I1001 16:19:22.169941 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-ch9h4" event={"ID":"bd424bf2-a6f2-4858-92c4-d3f1427a938f","Type":"ContainerDied","Data":"184215068c80edf4b79cf7e1ffd55a942891eb001ab0cefa756cd740a0f5d921"} Oct 01 16:19:23 crc kubenswrapper[4869]: I1001 16:19:23.181556 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-ch9h4" event={"ID":"bd424bf2-a6f2-4858-92c4-d3f1427a938f","Type":"ContainerStarted","Data":"be1cfcde990d53bbae12081a70b5448fe7be9f5951b020ed990ecc1610377b7b"} Oct 01 16:19:23 crc kubenswrapper[4869]: I1001 16:19:23.205349 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-ch9h4" podStartSLOduration=2.771323599 podStartE2EDuration="7.205321355s" podCreationTimestamp="2025-10-01 16:19:16 +0000 UTC" firstStartedPulling="2025-10-01 16:19:18.128602606 +0000 UTC m=+4467.275445722" lastFinishedPulling="2025-10-01 16:19:22.562600362 +0000 UTC m=+4471.709443478" observedRunningTime="2025-10-01 16:19:23.198017751 +0000 UTC m=+4472.344860917" watchObservedRunningTime="2025-10-01 16:19:23.205321355 +0000 UTC m=+4472.352164491" Oct 01 16:19:26 crc kubenswrapper[4869]: I1001 16:19:26.891332 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-ch9h4" Oct 01 16:19:26 crc kubenswrapper[4869]: I1001 16:19:26.891793 4869 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-ch9h4" Oct 01 16:19:26 crc kubenswrapper[4869]: I1001 16:19:26.940113 4869 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-ch9h4" Oct 01 16:19:27 crc kubenswrapper[4869]: I1001 16:19:27.262426 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-ch9h4" Oct 01 16:19:27 crc kubenswrapper[4869]: I1001 16:19:27.307397 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-ch9h4"] Oct 01 16:19:29 crc kubenswrapper[4869]: I1001 16:19:29.229226 4869 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-ch9h4" podUID="bd424bf2-a6f2-4858-92c4-d3f1427a938f" containerName="registry-server" containerID="cri-o://be1cfcde990d53bbae12081a70b5448fe7be9f5951b020ed990ecc1610377b7b" gracePeriod=2 Oct 01 16:19:29 crc kubenswrapper[4869]: I1001 16:19:29.911802 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-ch9h4" Oct 01 16:19:30 crc kubenswrapper[4869]: I1001 16:19:30.089889 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/bd424bf2-a6f2-4858-92c4-d3f1427a938f-catalog-content\") pod \"bd424bf2-a6f2-4858-92c4-d3f1427a938f\" (UID: \"bd424bf2-a6f2-4858-92c4-d3f1427a938f\") " Oct 01 16:19:30 crc kubenswrapper[4869]: I1001 16:19:30.090394 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-p6tbm\" (UniqueName: \"kubernetes.io/projected/bd424bf2-a6f2-4858-92c4-d3f1427a938f-kube-api-access-p6tbm\") pod \"bd424bf2-a6f2-4858-92c4-d3f1427a938f\" (UID: \"bd424bf2-a6f2-4858-92c4-d3f1427a938f\") " Oct 01 16:19:30 crc kubenswrapper[4869]: I1001 16:19:30.090655 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/bd424bf2-a6f2-4858-92c4-d3f1427a938f-utilities\") pod \"bd424bf2-a6f2-4858-92c4-d3f1427a938f\" (UID: \"bd424bf2-a6f2-4858-92c4-d3f1427a938f\") " Oct 01 16:19:30 crc kubenswrapper[4869]: I1001 16:19:30.091305 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/bd424bf2-a6f2-4858-92c4-d3f1427a938f-utilities" (OuterVolumeSpecName: "utilities") pod "bd424bf2-a6f2-4858-92c4-d3f1427a938f" (UID: "bd424bf2-a6f2-4858-92c4-d3f1427a938f"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 16:19:30 crc kubenswrapper[4869]: I1001 16:19:30.092227 4869 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/bd424bf2-a6f2-4858-92c4-d3f1427a938f-utilities\") on node \"crc\" DevicePath \"\"" Oct 01 16:19:30 crc kubenswrapper[4869]: I1001 16:19:30.097437 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bd424bf2-a6f2-4858-92c4-d3f1427a938f-kube-api-access-p6tbm" (OuterVolumeSpecName: "kube-api-access-p6tbm") pod "bd424bf2-a6f2-4858-92c4-d3f1427a938f" (UID: "bd424bf2-a6f2-4858-92c4-d3f1427a938f"). InnerVolumeSpecName "kube-api-access-p6tbm". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 16:19:30 crc kubenswrapper[4869]: I1001 16:19:30.149469 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/bd424bf2-a6f2-4858-92c4-d3f1427a938f-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "bd424bf2-a6f2-4858-92c4-d3f1427a938f" (UID: "bd424bf2-a6f2-4858-92c4-d3f1427a938f"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 16:19:30 crc kubenswrapper[4869]: I1001 16:19:30.193710 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-p6tbm\" (UniqueName: \"kubernetes.io/projected/bd424bf2-a6f2-4858-92c4-d3f1427a938f-kube-api-access-p6tbm\") on node \"crc\" DevicePath \"\"" Oct 01 16:19:30 crc kubenswrapper[4869]: I1001 16:19:30.194021 4869 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/bd424bf2-a6f2-4858-92c4-d3f1427a938f-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 01 16:19:30 crc kubenswrapper[4869]: I1001 16:19:30.237558 4869 generic.go:334] "Generic (PLEG): container finished" podID="bd424bf2-a6f2-4858-92c4-d3f1427a938f" containerID="be1cfcde990d53bbae12081a70b5448fe7be9f5951b020ed990ecc1610377b7b" exitCode=0 Oct 01 16:19:30 crc kubenswrapper[4869]: I1001 16:19:30.237593 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-ch9h4" Oct 01 16:19:30 crc kubenswrapper[4869]: I1001 16:19:30.237593 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-ch9h4" event={"ID":"bd424bf2-a6f2-4858-92c4-d3f1427a938f","Type":"ContainerDied","Data":"be1cfcde990d53bbae12081a70b5448fe7be9f5951b020ed990ecc1610377b7b"} Oct 01 16:19:30 crc kubenswrapper[4869]: I1001 16:19:30.238670 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-ch9h4" event={"ID":"bd424bf2-a6f2-4858-92c4-d3f1427a938f","Type":"ContainerDied","Data":"4b82f7f02835e0ab9b0362d0984db8e393ea3372eb3534d7a4396faf6dd3d754"} Oct 01 16:19:30 crc kubenswrapper[4869]: I1001 16:19:30.238779 4869 scope.go:117] "RemoveContainer" containerID="be1cfcde990d53bbae12081a70b5448fe7be9f5951b020ed990ecc1610377b7b" Oct 01 16:19:30 crc kubenswrapper[4869]: I1001 16:19:30.268454 4869 scope.go:117] "RemoveContainer" containerID="184215068c80edf4b79cf7e1ffd55a942891eb001ab0cefa756cd740a0f5d921" Oct 01 16:19:30 crc kubenswrapper[4869]: I1001 16:19:30.275064 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-ch9h4"] Oct 01 16:19:30 crc kubenswrapper[4869]: I1001 16:19:30.284533 4869 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-ch9h4"] Oct 01 16:19:30 crc kubenswrapper[4869]: I1001 16:19:30.582117 4869 scope.go:117] "RemoveContainer" containerID="f08ce71040f6817aa52b8a625d6cc8a13d74732e306c081f0fd134fb173f09d7" Oct 01 16:19:30 crc kubenswrapper[4869]: E1001 16:19:30.582520 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-c86m8_openshift-machine-config-operator(a4b64f7f-0b03-4f47-965b-9fde048b735c)\"" pod="openshift-machine-config-operator/machine-config-daemon-c86m8" podUID="a4b64f7f-0b03-4f47-965b-9fde048b735c" Oct 01 16:19:30 crc kubenswrapper[4869]: I1001 16:19:30.667021 4869 scope.go:117] "RemoveContainer" containerID="495b0c256bbf579587f6350a5205e7b2d0e3bc8c1aa1b5508d612326c754c801" Oct 01 16:19:30 crc kubenswrapper[4869]: I1001 16:19:30.719299 4869 scope.go:117] "RemoveContainer" containerID="be1cfcde990d53bbae12081a70b5448fe7be9f5951b020ed990ecc1610377b7b" Oct 01 16:19:30 crc kubenswrapper[4869]: E1001 16:19:30.719974 4869 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"be1cfcde990d53bbae12081a70b5448fe7be9f5951b020ed990ecc1610377b7b\": container with ID starting with be1cfcde990d53bbae12081a70b5448fe7be9f5951b020ed990ecc1610377b7b not found: ID does not exist" containerID="be1cfcde990d53bbae12081a70b5448fe7be9f5951b020ed990ecc1610377b7b" Oct 01 16:19:30 crc kubenswrapper[4869]: I1001 16:19:30.720003 4869 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"be1cfcde990d53bbae12081a70b5448fe7be9f5951b020ed990ecc1610377b7b"} err="failed to get container status \"be1cfcde990d53bbae12081a70b5448fe7be9f5951b020ed990ecc1610377b7b\": rpc error: code = NotFound desc = could not find container \"be1cfcde990d53bbae12081a70b5448fe7be9f5951b020ed990ecc1610377b7b\": container with ID starting with be1cfcde990d53bbae12081a70b5448fe7be9f5951b020ed990ecc1610377b7b not found: ID does not exist" Oct 01 16:19:30 crc kubenswrapper[4869]: I1001 16:19:30.720024 4869 scope.go:117] "RemoveContainer" containerID="184215068c80edf4b79cf7e1ffd55a942891eb001ab0cefa756cd740a0f5d921" Oct 01 16:19:30 crc kubenswrapper[4869]: E1001 16:19:30.720352 4869 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"184215068c80edf4b79cf7e1ffd55a942891eb001ab0cefa756cd740a0f5d921\": container with ID starting with 184215068c80edf4b79cf7e1ffd55a942891eb001ab0cefa756cd740a0f5d921 not found: ID does not exist" containerID="184215068c80edf4b79cf7e1ffd55a942891eb001ab0cefa756cd740a0f5d921" Oct 01 16:19:30 crc kubenswrapper[4869]: I1001 16:19:30.720398 4869 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"184215068c80edf4b79cf7e1ffd55a942891eb001ab0cefa756cd740a0f5d921"} err="failed to get container status \"184215068c80edf4b79cf7e1ffd55a942891eb001ab0cefa756cd740a0f5d921\": rpc error: code = NotFound desc = could not find container \"184215068c80edf4b79cf7e1ffd55a942891eb001ab0cefa756cd740a0f5d921\": container with ID starting with 184215068c80edf4b79cf7e1ffd55a942891eb001ab0cefa756cd740a0f5d921 not found: ID does not exist" Oct 01 16:19:30 crc kubenswrapper[4869]: I1001 16:19:30.720431 4869 scope.go:117] "RemoveContainer" containerID="495b0c256bbf579587f6350a5205e7b2d0e3bc8c1aa1b5508d612326c754c801" Oct 01 16:19:30 crc kubenswrapper[4869]: E1001 16:19:30.720711 4869 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"495b0c256bbf579587f6350a5205e7b2d0e3bc8c1aa1b5508d612326c754c801\": container with ID starting with 495b0c256bbf579587f6350a5205e7b2d0e3bc8c1aa1b5508d612326c754c801 not found: ID does not exist" containerID="495b0c256bbf579587f6350a5205e7b2d0e3bc8c1aa1b5508d612326c754c801" Oct 01 16:19:30 crc kubenswrapper[4869]: I1001 16:19:30.720735 4869 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"495b0c256bbf579587f6350a5205e7b2d0e3bc8c1aa1b5508d612326c754c801"} err="failed to get container status \"495b0c256bbf579587f6350a5205e7b2d0e3bc8c1aa1b5508d612326c754c801\": rpc error: code = NotFound desc = could not find container \"495b0c256bbf579587f6350a5205e7b2d0e3bc8c1aa1b5508d612326c754c801\": container with ID starting with 495b0c256bbf579587f6350a5205e7b2d0e3bc8c1aa1b5508d612326c754c801 not found: ID does not exist" Oct 01 16:19:31 crc kubenswrapper[4869]: I1001 16:19:31.593558 4869 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bd424bf2-a6f2-4858-92c4-d3f1427a938f" path="/var/lib/kubelet/pods/bd424bf2-a6f2-4858-92c4-d3f1427a938f/volumes" Oct 01 16:19:42 crc kubenswrapper[4869]: I1001 16:19:42.581875 4869 scope.go:117] "RemoveContainer" containerID="f08ce71040f6817aa52b8a625d6cc8a13d74732e306c081f0fd134fb173f09d7" Oct 01 16:19:42 crc kubenswrapper[4869]: E1001 16:19:42.582707 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-c86m8_openshift-machine-config-operator(a4b64f7f-0b03-4f47-965b-9fde048b735c)\"" pod="openshift-machine-config-operator/machine-config-daemon-c86m8" podUID="a4b64f7f-0b03-4f47-965b-9fde048b735c" Oct 01 16:19:56 crc kubenswrapper[4869]: I1001 16:19:56.581732 4869 scope.go:117] "RemoveContainer" containerID="f08ce71040f6817aa52b8a625d6cc8a13d74732e306c081f0fd134fb173f09d7" Oct 01 16:19:56 crc kubenswrapper[4869]: E1001 16:19:56.582616 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-c86m8_openshift-machine-config-operator(a4b64f7f-0b03-4f47-965b-9fde048b735c)\"" pod="openshift-machine-config-operator/machine-config-daemon-c86m8" podUID="a4b64f7f-0b03-4f47-965b-9fde048b735c" Oct 01 16:20:09 crc kubenswrapper[4869]: I1001 16:20:09.580629 4869 scope.go:117] "RemoveContainer" containerID="f08ce71040f6817aa52b8a625d6cc8a13d74732e306c081f0fd134fb173f09d7" Oct 01 16:20:09 crc kubenswrapper[4869]: E1001 16:20:09.581400 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-c86m8_openshift-machine-config-operator(a4b64f7f-0b03-4f47-965b-9fde048b735c)\"" pod="openshift-machine-config-operator/machine-config-daemon-c86m8" podUID="a4b64f7f-0b03-4f47-965b-9fde048b735c" Oct 01 16:20:22 crc kubenswrapper[4869]: I1001 16:20:22.580842 4869 scope.go:117] "RemoveContainer" containerID="f08ce71040f6817aa52b8a625d6cc8a13d74732e306c081f0fd134fb173f09d7" Oct 01 16:20:23 crc kubenswrapper[4869]: I1001 16:20:23.721925 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-c86m8" event={"ID":"a4b64f7f-0b03-4f47-965b-9fde048b735c","Type":"ContainerStarted","Data":"5e2d8e6198d2f74bc8fa7e68cea96d1e2800cee50b353247648e0d66925b176c"} Oct 01 16:22:43 crc kubenswrapper[4869]: I1001 16:22:43.354245 4869 patch_prober.go:28] interesting pod/machine-config-daemon-c86m8 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 01 16:22:43 crc kubenswrapper[4869]: I1001 16:22:43.355317 4869 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-c86m8" podUID="a4b64f7f-0b03-4f47-965b-9fde048b735c" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 01 16:23:13 crc kubenswrapper[4869]: I1001 16:23:13.354945 4869 patch_prober.go:28] interesting pod/machine-config-daemon-c86m8 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 01 16:23:13 crc kubenswrapper[4869]: I1001 16:23:13.355507 4869 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-c86m8" podUID="a4b64f7f-0b03-4f47-965b-9fde048b735c" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 01 16:23:43 crc kubenswrapper[4869]: I1001 16:23:43.354709 4869 patch_prober.go:28] interesting pod/machine-config-daemon-c86m8 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 01 16:23:43 crc kubenswrapper[4869]: I1001 16:23:43.355191 4869 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-c86m8" podUID="a4b64f7f-0b03-4f47-965b-9fde048b735c" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 01 16:23:43 crc kubenswrapper[4869]: I1001 16:23:43.355241 4869 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-c86m8" Oct 01 16:23:43 crc kubenswrapper[4869]: I1001 16:23:43.356068 4869 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"5e2d8e6198d2f74bc8fa7e68cea96d1e2800cee50b353247648e0d66925b176c"} pod="openshift-machine-config-operator/machine-config-daemon-c86m8" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Oct 01 16:23:43 crc kubenswrapper[4869]: I1001 16:23:43.356125 4869 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-c86m8" podUID="a4b64f7f-0b03-4f47-965b-9fde048b735c" containerName="machine-config-daemon" containerID="cri-o://5e2d8e6198d2f74bc8fa7e68cea96d1e2800cee50b353247648e0d66925b176c" gracePeriod=600 Oct 01 16:23:44 crc kubenswrapper[4869]: I1001 16:23:44.485166 4869 generic.go:334] "Generic (PLEG): container finished" podID="a4b64f7f-0b03-4f47-965b-9fde048b735c" containerID="5e2d8e6198d2f74bc8fa7e68cea96d1e2800cee50b353247648e0d66925b176c" exitCode=0 Oct 01 16:23:44 crc kubenswrapper[4869]: I1001 16:23:44.485212 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-c86m8" event={"ID":"a4b64f7f-0b03-4f47-965b-9fde048b735c","Type":"ContainerDied","Data":"5e2d8e6198d2f74bc8fa7e68cea96d1e2800cee50b353247648e0d66925b176c"} Oct 01 16:23:44 crc kubenswrapper[4869]: I1001 16:23:44.485544 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-c86m8" event={"ID":"a4b64f7f-0b03-4f47-965b-9fde048b735c","Type":"ContainerStarted","Data":"0eaa53cd5d569b0e6bef9edefc7d1fa07489655924f7f785aa79773af28d727f"} Oct 01 16:23:44 crc kubenswrapper[4869]: I1001 16:23:44.485572 4869 scope.go:117] "RemoveContainer" containerID="f08ce71040f6817aa52b8a625d6cc8a13d74732e306c081f0fd134fb173f09d7" Oct 01 16:26:13 crc kubenswrapper[4869]: I1001 16:26:13.354004 4869 patch_prober.go:28] interesting pod/machine-config-daemon-c86m8 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 01 16:26:13 crc kubenswrapper[4869]: I1001 16:26:13.354611 4869 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-c86m8" podUID="a4b64f7f-0b03-4f47-965b-9fde048b735c" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 01 16:26:43 crc kubenswrapper[4869]: I1001 16:26:43.354746 4869 patch_prober.go:28] interesting pod/machine-config-daemon-c86m8 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 01 16:26:43 crc kubenswrapper[4869]: I1001 16:26:43.355338 4869 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-c86m8" podUID="a4b64f7f-0b03-4f47-965b-9fde048b735c" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 01 16:26:53 crc kubenswrapper[4869]: I1001 16:26:53.654771 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-mf2dz"] Oct 01 16:26:53 crc kubenswrapper[4869]: E1001 16:26:53.655853 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bd424bf2-a6f2-4858-92c4-d3f1427a938f" containerName="extract-content" Oct 01 16:26:53 crc kubenswrapper[4869]: I1001 16:26:53.655872 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="bd424bf2-a6f2-4858-92c4-d3f1427a938f" containerName="extract-content" Oct 01 16:26:53 crc kubenswrapper[4869]: E1001 16:26:53.655906 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bd424bf2-a6f2-4858-92c4-d3f1427a938f" containerName="registry-server" Oct 01 16:26:53 crc kubenswrapper[4869]: I1001 16:26:53.655917 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="bd424bf2-a6f2-4858-92c4-d3f1427a938f" containerName="registry-server" Oct 01 16:26:53 crc kubenswrapper[4869]: E1001 16:26:53.655938 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bd424bf2-a6f2-4858-92c4-d3f1427a938f" containerName="extract-utilities" Oct 01 16:26:53 crc kubenswrapper[4869]: I1001 16:26:53.655948 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="bd424bf2-a6f2-4858-92c4-d3f1427a938f" containerName="extract-utilities" Oct 01 16:26:53 crc kubenswrapper[4869]: I1001 16:26:53.656186 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="bd424bf2-a6f2-4858-92c4-d3f1427a938f" containerName="registry-server" Oct 01 16:26:53 crc kubenswrapper[4869]: I1001 16:26:53.659072 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-mf2dz" Oct 01 16:26:53 crc kubenswrapper[4869]: I1001 16:26:53.673503 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-mf2dz"] Oct 01 16:26:53 crc kubenswrapper[4869]: I1001 16:26:53.698713 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/40f107b2-b2e7-4ef0-8566-1c3c1b7122f7-catalog-content\") pod \"redhat-marketplace-mf2dz\" (UID: \"40f107b2-b2e7-4ef0-8566-1c3c1b7122f7\") " pod="openshift-marketplace/redhat-marketplace-mf2dz" Oct 01 16:26:53 crc kubenswrapper[4869]: I1001 16:26:53.698770 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/40f107b2-b2e7-4ef0-8566-1c3c1b7122f7-utilities\") pod \"redhat-marketplace-mf2dz\" (UID: \"40f107b2-b2e7-4ef0-8566-1c3c1b7122f7\") " pod="openshift-marketplace/redhat-marketplace-mf2dz" Oct 01 16:26:53 crc kubenswrapper[4869]: I1001 16:26:53.698922 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zw68s\" (UniqueName: \"kubernetes.io/projected/40f107b2-b2e7-4ef0-8566-1c3c1b7122f7-kube-api-access-zw68s\") pod \"redhat-marketplace-mf2dz\" (UID: \"40f107b2-b2e7-4ef0-8566-1c3c1b7122f7\") " pod="openshift-marketplace/redhat-marketplace-mf2dz" Oct 01 16:26:53 crc kubenswrapper[4869]: I1001 16:26:53.800511 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zw68s\" (UniqueName: \"kubernetes.io/projected/40f107b2-b2e7-4ef0-8566-1c3c1b7122f7-kube-api-access-zw68s\") pod \"redhat-marketplace-mf2dz\" (UID: \"40f107b2-b2e7-4ef0-8566-1c3c1b7122f7\") " pod="openshift-marketplace/redhat-marketplace-mf2dz" Oct 01 16:26:53 crc kubenswrapper[4869]: I1001 16:26:53.801001 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/40f107b2-b2e7-4ef0-8566-1c3c1b7122f7-catalog-content\") pod \"redhat-marketplace-mf2dz\" (UID: \"40f107b2-b2e7-4ef0-8566-1c3c1b7122f7\") " pod="openshift-marketplace/redhat-marketplace-mf2dz" Oct 01 16:26:53 crc kubenswrapper[4869]: I1001 16:26:53.801022 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/40f107b2-b2e7-4ef0-8566-1c3c1b7122f7-utilities\") pod \"redhat-marketplace-mf2dz\" (UID: \"40f107b2-b2e7-4ef0-8566-1c3c1b7122f7\") " pod="openshift-marketplace/redhat-marketplace-mf2dz" Oct 01 16:26:53 crc kubenswrapper[4869]: I1001 16:26:53.801592 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/40f107b2-b2e7-4ef0-8566-1c3c1b7122f7-catalog-content\") pod \"redhat-marketplace-mf2dz\" (UID: \"40f107b2-b2e7-4ef0-8566-1c3c1b7122f7\") " pod="openshift-marketplace/redhat-marketplace-mf2dz" Oct 01 16:26:53 crc kubenswrapper[4869]: I1001 16:26:53.801612 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/40f107b2-b2e7-4ef0-8566-1c3c1b7122f7-utilities\") pod \"redhat-marketplace-mf2dz\" (UID: \"40f107b2-b2e7-4ef0-8566-1c3c1b7122f7\") " pod="openshift-marketplace/redhat-marketplace-mf2dz" Oct 01 16:26:53 crc kubenswrapper[4869]: I1001 16:26:53.820605 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zw68s\" (UniqueName: \"kubernetes.io/projected/40f107b2-b2e7-4ef0-8566-1c3c1b7122f7-kube-api-access-zw68s\") pod \"redhat-marketplace-mf2dz\" (UID: \"40f107b2-b2e7-4ef0-8566-1c3c1b7122f7\") " pod="openshift-marketplace/redhat-marketplace-mf2dz" Oct 01 16:26:53 crc kubenswrapper[4869]: I1001 16:26:53.990842 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-mf2dz" Oct 01 16:26:54 crc kubenswrapper[4869]: I1001 16:26:54.481705 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-mf2dz"] Oct 01 16:26:55 crc kubenswrapper[4869]: I1001 16:26:55.220000 4869 generic.go:334] "Generic (PLEG): container finished" podID="40f107b2-b2e7-4ef0-8566-1c3c1b7122f7" containerID="6ab6019578963a7d7fd4280076a31cc989e13d26d895e283393bdced06f5e368" exitCode=0 Oct 01 16:26:55 crc kubenswrapper[4869]: I1001 16:26:55.220217 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-mf2dz" event={"ID":"40f107b2-b2e7-4ef0-8566-1c3c1b7122f7","Type":"ContainerDied","Data":"6ab6019578963a7d7fd4280076a31cc989e13d26d895e283393bdced06f5e368"} Oct 01 16:26:55 crc kubenswrapper[4869]: I1001 16:26:55.220241 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-mf2dz" event={"ID":"40f107b2-b2e7-4ef0-8566-1c3c1b7122f7","Type":"ContainerStarted","Data":"36bd1ad83cdfdec86e413e5477e35b53248a9c6d2f3ba33b59ebaa5dfa645aae"} Oct 01 16:26:55 crc kubenswrapper[4869]: I1001 16:26:55.222081 4869 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Oct 01 16:26:57 crc kubenswrapper[4869]: I1001 16:26:57.241103 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-mf2dz" event={"ID":"40f107b2-b2e7-4ef0-8566-1c3c1b7122f7","Type":"ContainerStarted","Data":"62c1f0344bf15a375e99af93bd96c39778e11d91b8ea95cb1e67ed9c6bf2ec89"} Oct 01 16:26:57 crc kubenswrapper[4869]: I1001 16:26:57.834559 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-xssmj"] Oct 01 16:26:57 crc kubenswrapper[4869]: I1001 16:26:57.836943 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-xssmj" Oct 01 16:26:57 crc kubenswrapper[4869]: I1001 16:26:57.848066 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-xssmj"] Oct 01 16:26:57 crc kubenswrapper[4869]: I1001 16:26:57.900731 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jwrlc\" (UniqueName: \"kubernetes.io/projected/4fb87c85-fa63-4813-a35c-4e9342046f8c-kube-api-access-jwrlc\") pod \"certified-operators-xssmj\" (UID: \"4fb87c85-fa63-4813-a35c-4e9342046f8c\") " pod="openshift-marketplace/certified-operators-xssmj" Oct 01 16:26:57 crc kubenswrapper[4869]: I1001 16:26:57.900912 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4fb87c85-fa63-4813-a35c-4e9342046f8c-utilities\") pod \"certified-operators-xssmj\" (UID: \"4fb87c85-fa63-4813-a35c-4e9342046f8c\") " pod="openshift-marketplace/certified-operators-xssmj" Oct 01 16:26:57 crc kubenswrapper[4869]: I1001 16:26:57.901151 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4fb87c85-fa63-4813-a35c-4e9342046f8c-catalog-content\") pod \"certified-operators-xssmj\" (UID: \"4fb87c85-fa63-4813-a35c-4e9342046f8c\") " pod="openshift-marketplace/certified-operators-xssmj" Oct 01 16:26:58 crc kubenswrapper[4869]: I1001 16:26:58.003293 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jwrlc\" (UniqueName: \"kubernetes.io/projected/4fb87c85-fa63-4813-a35c-4e9342046f8c-kube-api-access-jwrlc\") pod \"certified-operators-xssmj\" (UID: \"4fb87c85-fa63-4813-a35c-4e9342046f8c\") " pod="openshift-marketplace/certified-operators-xssmj" Oct 01 16:26:58 crc kubenswrapper[4869]: I1001 16:26:58.003402 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4fb87c85-fa63-4813-a35c-4e9342046f8c-utilities\") pod \"certified-operators-xssmj\" (UID: \"4fb87c85-fa63-4813-a35c-4e9342046f8c\") " pod="openshift-marketplace/certified-operators-xssmj" Oct 01 16:26:58 crc kubenswrapper[4869]: I1001 16:26:58.003502 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4fb87c85-fa63-4813-a35c-4e9342046f8c-catalog-content\") pod \"certified-operators-xssmj\" (UID: \"4fb87c85-fa63-4813-a35c-4e9342046f8c\") " pod="openshift-marketplace/certified-operators-xssmj" Oct 01 16:26:58 crc kubenswrapper[4869]: I1001 16:26:58.004096 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4fb87c85-fa63-4813-a35c-4e9342046f8c-utilities\") pod \"certified-operators-xssmj\" (UID: \"4fb87c85-fa63-4813-a35c-4e9342046f8c\") " pod="openshift-marketplace/certified-operators-xssmj" Oct 01 16:26:58 crc kubenswrapper[4869]: I1001 16:26:58.004164 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4fb87c85-fa63-4813-a35c-4e9342046f8c-catalog-content\") pod \"certified-operators-xssmj\" (UID: \"4fb87c85-fa63-4813-a35c-4e9342046f8c\") " pod="openshift-marketplace/certified-operators-xssmj" Oct 01 16:26:58 crc kubenswrapper[4869]: I1001 16:26:58.035149 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jwrlc\" (UniqueName: \"kubernetes.io/projected/4fb87c85-fa63-4813-a35c-4e9342046f8c-kube-api-access-jwrlc\") pod \"certified-operators-xssmj\" (UID: \"4fb87c85-fa63-4813-a35c-4e9342046f8c\") " pod="openshift-marketplace/certified-operators-xssmj" Oct 01 16:26:58 crc kubenswrapper[4869]: I1001 16:26:58.161315 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-xssmj" Oct 01 16:26:58 crc kubenswrapper[4869]: I1001 16:26:58.274915 4869 generic.go:334] "Generic (PLEG): container finished" podID="40f107b2-b2e7-4ef0-8566-1c3c1b7122f7" containerID="62c1f0344bf15a375e99af93bd96c39778e11d91b8ea95cb1e67ed9c6bf2ec89" exitCode=0 Oct 01 16:26:58 crc kubenswrapper[4869]: I1001 16:26:58.275284 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-mf2dz" event={"ID":"40f107b2-b2e7-4ef0-8566-1c3c1b7122f7","Type":"ContainerDied","Data":"62c1f0344bf15a375e99af93bd96c39778e11d91b8ea95cb1e67ed9c6bf2ec89"} Oct 01 16:26:58 crc kubenswrapper[4869]: I1001 16:26:58.721863 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-xssmj"] Oct 01 16:26:59 crc kubenswrapper[4869]: I1001 16:26:59.284818 4869 generic.go:334] "Generic (PLEG): container finished" podID="4fb87c85-fa63-4813-a35c-4e9342046f8c" containerID="a92df9940d25a7ffb845706489896c4b78cfe497fc59fd385769f303ce39961d" exitCode=0 Oct 01 16:26:59 crc kubenswrapper[4869]: I1001 16:26:59.284922 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-xssmj" event={"ID":"4fb87c85-fa63-4813-a35c-4e9342046f8c","Type":"ContainerDied","Data":"a92df9940d25a7ffb845706489896c4b78cfe497fc59fd385769f303ce39961d"} Oct 01 16:26:59 crc kubenswrapper[4869]: I1001 16:26:59.285545 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-xssmj" event={"ID":"4fb87c85-fa63-4813-a35c-4e9342046f8c","Type":"ContainerStarted","Data":"f3706d11c5bca8d4bc8d01220186ad4a00458b9bfdcc33a2dfd2f139defd4b0d"} Oct 01 16:26:59 crc kubenswrapper[4869]: I1001 16:26:59.288362 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-mf2dz" event={"ID":"40f107b2-b2e7-4ef0-8566-1c3c1b7122f7","Type":"ContainerStarted","Data":"c312189b7f88861db70bb4ff72f21a37840626104cf414596eb65c64222dc8ef"} Oct 01 16:26:59 crc kubenswrapper[4869]: I1001 16:26:59.322867 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-mf2dz" podStartSLOduration=2.637890092 podStartE2EDuration="6.322841993s" podCreationTimestamp="2025-10-01 16:26:53 +0000 UTC" firstStartedPulling="2025-10-01 16:26:55.221822948 +0000 UTC m=+4924.368666064" lastFinishedPulling="2025-10-01 16:26:58.906774849 +0000 UTC m=+4928.053617965" observedRunningTime="2025-10-01 16:26:59.317365235 +0000 UTC m=+4928.464208361" watchObservedRunningTime="2025-10-01 16:26:59.322841993 +0000 UTC m=+4928.469685119" Oct 01 16:27:00 crc kubenswrapper[4869]: I1001 16:27:00.299837 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-xssmj" event={"ID":"4fb87c85-fa63-4813-a35c-4e9342046f8c","Type":"ContainerStarted","Data":"781746f3e65c8ba428d64dc976d959a0d075f07faeb9093b2e24c0979ef89b18"} Oct 01 16:27:01 crc kubenswrapper[4869]: I1001 16:27:01.312842 4869 generic.go:334] "Generic (PLEG): container finished" podID="4fb87c85-fa63-4813-a35c-4e9342046f8c" containerID="781746f3e65c8ba428d64dc976d959a0d075f07faeb9093b2e24c0979ef89b18" exitCode=0 Oct 01 16:27:01 crc kubenswrapper[4869]: I1001 16:27:01.313024 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-xssmj" event={"ID":"4fb87c85-fa63-4813-a35c-4e9342046f8c","Type":"ContainerDied","Data":"781746f3e65c8ba428d64dc976d959a0d075f07faeb9093b2e24c0979ef89b18"} Oct 01 16:27:03 crc kubenswrapper[4869]: I1001 16:27:03.991762 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-mf2dz" Oct 01 16:27:03 crc kubenswrapper[4869]: I1001 16:27:03.992333 4869 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-mf2dz" Oct 01 16:27:04 crc kubenswrapper[4869]: I1001 16:27:04.052650 4869 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-mf2dz" Oct 01 16:27:04 crc kubenswrapper[4869]: I1001 16:27:04.343383 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-xssmj" event={"ID":"4fb87c85-fa63-4813-a35c-4e9342046f8c","Type":"ContainerStarted","Data":"7f025e93ec3a102d6cbbb29a66742a7285cd215e4f2579d4295238c51b1d0e96"} Oct 01 16:27:04 crc kubenswrapper[4869]: I1001 16:27:04.366691 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-xssmj" podStartSLOduration=3.441957899 podStartE2EDuration="7.36664052s" podCreationTimestamp="2025-10-01 16:26:57 +0000 UTC" firstStartedPulling="2025-10-01 16:26:59.288235387 +0000 UTC m=+4928.435078503" lastFinishedPulling="2025-10-01 16:27:03.212918008 +0000 UTC m=+4932.359761124" observedRunningTime="2025-10-01 16:27:04.365171532 +0000 UTC m=+4933.512014688" watchObservedRunningTime="2025-10-01 16:27:04.36664052 +0000 UTC m=+4933.513483646" Oct 01 16:27:04 crc kubenswrapper[4869]: I1001 16:27:04.395648 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-mf2dz" Oct 01 16:27:06 crc kubenswrapper[4869]: I1001 16:27:06.219097 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-mf2dz"] Oct 01 16:27:06 crc kubenswrapper[4869]: I1001 16:27:06.360771 4869 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-mf2dz" podUID="40f107b2-b2e7-4ef0-8566-1c3c1b7122f7" containerName="registry-server" containerID="cri-o://c312189b7f88861db70bb4ff72f21a37840626104cf414596eb65c64222dc8ef" gracePeriod=2 Oct 01 16:27:07 crc kubenswrapper[4869]: I1001 16:27:07.372106 4869 generic.go:334] "Generic (PLEG): container finished" podID="40f107b2-b2e7-4ef0-8566-1c3c1b7122f7" containerID="c312189b7f88861db70bb4ff72f21a37840626104cf414596eb65c64222dc8ef" exitCode=0 Oct 01 16:27:07 crc kubenswrapper[4869]: I1001 16:27:07.372225 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-mf2dz" event={"ID":"40f107b2-b2e7-4ef0-8566-1c3c1b7122f7","Type":"ContainerDied","Data":"c312189b7f88861db70bb4ff72f21a37840626104cf414596eb65c64222dc8ef"} Oct 01 16:27:07 crc kubenswrapper[4869]: I1001 16:27:07.885065 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-mf2dz" Oct 01 16:27:07 crc kubenswrapper[4869]: I1001 16:27:07.913485 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/40f107b2-b2e7-4ef0-8566-1c3c1b7122f7-utilities\") pod \"40f107b2-b2e7-4ef0-8566-1c3c1b7122f7\" (UID: \"40f107b2-b2e7-4ef0-8566-1c3c1b7122f7\") " Oct 01 16:27:07 crc kubenswrapper[4869]: I1001 16:27:07.913655 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zw68s\" (UniqueName: \"kubernetes.io/projected/40f107b2-b2e7-4ef0-8566-1c3c1b7122f7-kube-api-access-zw68s\") pod \"40f107b2-b2e7-4ef0-8566-1c3c1b7122f7\" (UID: \"40f107b2-b2e7-4ef0-8566-1c3c1b7122f7\") " Oct 01 16:27:07 crc kubenswrapper[4869]: I1001 16:27:07.913713 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/40f107b2-b2e7-4ef0-8566-1c3c1b7122f7-catalog-content\") pod \"40f107b2-b2e7-4ef0-8566-1c3c1b7122f7\" (UID: \"40f107b2-b2e7-4ef0-8566-1c3c1b7122f7\") " Oct 01 16:27:07 crc kubenswrapper[4869]: I1001 16:27:07.914328 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/40f107b2-b2e7-4ef0-8566-1c3c1b7122f7-utilities" (OuterVolumeSpecName: "utilities") pod "40f107b2-b2e7-4ef0-8566-1c3c1b7122f7" (UID: "40f107b2-b2e7-4ef0-8566-1c3c1b7122f7"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 16:27:07 crc kubenswrapper[4869]: I1001 16:27:07.921629 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/40f107b2-b2e7-4ef0-8566-1c3c1b7122f7-kube-api-access-zw68s" (OuterVolumeSpecName: "kube-api-access-zw68s") pod "40f107b2-b2e7-4ef0-8566-1c3c1b7122f7" (UID: "40f107b2-b2e7-4ef0-8566-1c3c1b7122f7"). InnerVolumeSpecName "kube-api-access-zw68s". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 16:27:07 crc kubenswrapper[4869]: I1001 16:27:07.927103 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/40f107b2-b2e7-4ef0-8566-1c3c1b7122f7-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "40f107b2-b2e7-4ef0-8566-1c3c1b7122f7" (UID: "40f107b2-b2e7-4ef0-8566-1c3c1b7122f7"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 16:27:08 crc kubenswrapper[4869]: I1001 16:27:08.015569 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zw68s\" (UniqueName: \"kubernetes.io/projected/40f107b2-b2e7-4ef0-8566-1c3c1b7122f7-kube-api-access-zw68s\") on node \"crc\" DevicePath \"\"" Oct 01 16:27:08 crc kubenswrapper[4869]: I1001 16:27:08.015839 4869 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/40f107b2-b2e7-4ef0-8566-1c3c1b7122f7-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 01 16:27:08 crc kubenswrapper[4869]: I1001 16:27:08.015901 4869 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/40f107b2-b2e7-4ef0-8566-1c3c1b7122f7-utilities\") on node \"crc\" DevicePath \"\"" Oct 01 16:27:08 crc kubenswrapper[4869]: I1001 16:27:08.162047 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-xssmj" Oct 01 16:27:08 crc kubenswrapper[4869]: I1001 16:27:08.162567 4869 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-xssmj" Oct 01 16:27:08 crc kubenswrapper[4869]: I1001 16:27:08.224697 4869 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-xssmj" Oct 01 16:27:08 crc kubenswrapper[4869]: I1001 16:27:08.383380 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-mf2dz" Oct 01 16:27:08 crc kubenswrapper[4869]: I1001 16:27:08.383547 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-mf2dz" event={"ID":"40f107b2-b2e7-4ef0-8566-1c3c1b7122f7","Type":"ContainerDied","Data":"36bd1ad83cdfdec86e413e5477e35b53248a9c6d2f3ba33b59ebaa5dfa645aae"} Oct 01 16:27:08 crc kubenswrapper[4869]: I1001 16:27:08.384847 4869 scope.go:117] "RemoveContainer" containerID="c312189b7f88861db70bb4ff72f21a37840626104cf414596eb65c64222dc8ef" Oct 01 16:27:08 crc kubenswrapper[4869]: I1001 16:27:08.422501 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-mf2dz"] Oct 01 16:27:08 crc kubenswrapper[4869]: I1001 16:27:08.423765 4869 scope.go:117] "RemoveContainer" containerID="62c1f0344bf15a375e99af93bd96c39778e11d91b8ea95cb1e67ed9c6bf2ec89" Oct 01 16:27:08 crc kubenswrapper[4869]: I1001 16:27:08.431085 4869 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-mf2dz"] Oct 01 16:27:08 crc kubenswrapper[4869]: I1001 16:27:08.451562 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-xssmj" Oct 01 16:27:08 crc kubenswrapper[4869]: I1001 16:27:08.454994 4869 scope.go:117] "RemoveContainer" containerID="6ab6019578963a7d7fd4280076a31cc989e13d26d895e283393bdced06f5e368" Oct 01 16:27:09 crc kubenswrapper[4869]: I1001 16:27:09.592671 4869 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="40f107b2-b2e7-4ef0-8566-1c3c1b7122f7" path="/var/lib/kubelet/pods/40f107b2-b2e7-4ef0-8566-1c3c1b7122f7/volumes" Oct 01 16:27:10 crc kubenswrapper[4869]: I1001 16:27:10.627129 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-xssmj"] Oct 01 16:27:11 crc kubenswrapper[4869]: I1001 16:27:11.429230 4869 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-xssmj" podUID="4fb87c85-fa63-4813-a35c-4e9342046f8c" containerName="registry-server" containerID="cri-o://7f025e93ec3a102d6cbbb29a66742a7285cd215e4f2579d4295238c51b1d0e96" gracePeriod=2 Oct 01 16:27:11 crc kubenswrapper[4869]: I1001 16:27:11.979200 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-xssmj" Oct 01 16:27:12 crc kubenswrapper[4869]: I1001 16:27:12.109232 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jwrlc\" (UniqueName: \"kubernetes.io/projected/4fb87c85-fa63-4813-a35c-4e9342046f8c-kube-api-access-jwrlc\") pod \"4fb87c85-fa63-4813-a35c-4e9342046f8c\" (UID: \"4fb87c85-fa63-4813-a35c-4e9342046f8c\") " Oct 01 16:27:12 crc kubenswrapper[4869]: I1001 16:27:12.109621 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4fb87c85-fa63-4813-a35c-4e9342046f8c-catalog-content\") pod \"4fb87c85-fa63-4813-a35c-4e9342046f8c\" (UID: \"4fb87c85-fa63-4813-a35c-4e9342046f8c\") " Oct 01 16:27:12 crc kubenswrapper[4869]: I1001 16:27:12.109759 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4fb87c85-fa63-4813-a35c-4e9342046f8c-utilities\") pod \"4fb87c85-fa63-4813-a35c-4e9342046f8c\" (UID: \"4fb87c85-fa63-4813-a35c-4e9342046f8c\") " Oct 01 16:27:12 crc kubenswrapper[4869]: I1001 16:27:12.112988 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4fb87c85-fa63-4813-a35c-4e9342046f8c-utilities" (OuterVolumeSpecName: "utilities") pod "4fb87c85-fa63-4813-a35c-4e9342046f8c" (UID: "4fb87c85-fa63-4813-a35c-4e9342046f8c"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 16:27:12 crc kubenswrapper[4869]: I1001 16:27:12.116029 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4fb87c85-fa63-4813-a35c-4e9342046f8c-kube-api-access-jwrlc" (OuterVolumeSpecName: "kube-api-access-jwrlc") pod "4fb87c85-fa63-4813-a35c-4e9342046f8c" (UID: "4fb87c85-fa63-4813-a35c-4e9342046f8c"). InnerVolumeSpecName "kube-api-access-jwrlc". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 16:27:12 crc kubenswrapper[4869]: I1001 16:27:12.152495 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4fb87c85-fa63-4813-a35c-4e9342046f8c-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "4fb87c85-fa63-4813-a35c-4e9342046f8c" (UID: "4fb87c85-fa63-4813-a35c-4e9342046f8c"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 16:27:12 crc kubenswrapper[4869]: I1001 16:27:12.211514 4869 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4fb87c85-fa63-4813-a35c-4e9342046f8c-utilities\") on node \"crc\" DevicePath \"\"" Oct 01 16:27:12 crc kubenswrapper[4869]: I1001 16:27:12.211546 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jwrlc\" (UniqueName: \"kubernetes.io/projected/4fb87c85-fa63-4813-a35c-4e9342046f8c-kube-api-access-jwrlc\") on node \"crc\" DevicePath \"\"" Oct 01 16:27:12 crc kubenswrapper[4869]: I1001 16:27:12.211555 4869 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4fb87c85-fa63-4813-a35c-4e9342046f8c-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 01 16:27:12 crc kubenswrapper[4869]: I1001 16:27:12.439181 4869 generic.go:334] "Generic (PLEG): container finished" podID="4fb87c85-fa63-4813-a35c-4e9342046f8c" containerID="7f025e93ec3a102d6cbbb29a66742a7285cd215e4f2579d4295238c51b1d0e96" exitCode=0 Oct 01 16:27:12 crc kubenswrapper[4869]: I1001 16:27:12.439228 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-xssmj" event={"ID":"4fb87c85-fa63-4813-a35c-4e9342046f8c","Type":"ContainerDied","Data":"7f025e93ec3a102d6cbbb29a66742a7285cd215e4f2579d4295238c51b1d0e96"} Oct 01 16:27:12 crc kubenswrapper[4869]: I1001 16:27:12.439269 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-xssmj" event={"ID":"4fb87c85-fa63-4813-a35c-4e9342046f8c","Type":"ContainerDied","Data":"f3706d11c5bca8d4bc8d01220186ad4a00458b9bfdcc33a2dfd2f139defd4b0d"} Oct 01 16:27:12 crc kubenswrapper[4869]: I1001 16:27:12.439289 4869 scope.go:117] "RemoveContainer" containerID="7f025e93ec3a102d6cbbb29a66742a7285cd215e4f2579d4295238c51b1d0e96" Oct 01 16:27:12 crc kubenswrapper[4869]: I1001 16:27:12.439419 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-xssmj" Oct 01 16:27:12 crc kubenswrapper[4869]: I1001 16:27:12.471900 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-xssmj"] Oct 01 16:27:12 crc kubenswrapper[4869]: I1001 16:27:12.478660 4869 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-xssmj"] Oct 01 16:27:12 crc kubenswrapper[4869]: I1001 16:27:12.479359 4869 scope.go:117] "RemoveContainer" containerID="781746f3e65c8ba428d64dc976d959a0d075f07faeb9093b2e24c0979ef89b18" Oct 01 16:27:12 crc kubenswrapper[4869]: I1001 16:27:12.519818 4869 scope.go:117] "RemoveContainer" containerID="a92df9940d25a7ffb845706489896c4b78cfe497fc59fd385769f303ce39961d" Oct 01 16:27:12 crc kubenswrapper[4869]: I1001 16:27:12.547223 4869 scope.go:117] "RemoveContainer" containerID="7f025e93ec3a102d6cbbb29a66742a7285cd215e4f2579d4295238c51b1d0e96" Oct 01 16:27:12 crc kubenswrapper[4869]: E1001 16:27:12.548201 4869 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7f025e93ec3a102d6cbbb29a66742a7285cd215e4f2579d4295238c51b1d0e96\": container with ID starting with 7f025e93ec3a102d6cbbb29a66742a7285cd215e4f2579d4295238c51b1d0e96 not found: ID does not exist" containerID="7f025e93ec3a102d6cbbb29a66742a7285cd215e4f2579d4295238c51b1d0e96" Oct 01 16:27:12 crc kubenswrapper[4869]: I1001 16:27:12.548236 4869 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7f025e93ec3a102d6cbbb29a66742a7285cd215e4f2579d4295238c51b1d0e96"} err="failed to get container status \"7f025e93ec3a102d6cbbb29a66742a7285cd215e4f2579d4295238c51b1d0e96\": rpc error: code = NotFound desc = could not find container \"7f025e93ec3a102d6cbbb29a66742a7285cd215e4f2579d4295238c51b1d0e96\": container with ID starting with 7f025e93ec3a102d6cbbb29a66742a7285cd215e4f2579d4295238c51b1d0e96 not found: ID does not exist" Oct 01 16:27:12 crc kubenswrapper[4869]: I1001 16:27:12.548277 4869 scope.go:117] "RemoveContainer" containerID="781746f3e65c8ba428d64dc976d959a0d075f07faeb9093b2e24c0979ef89b18" Oct 01 16:27:12 crc kubenswrapper[4869]: E1001 16:27:12.548909 4869 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"781746f3e65c8ba428d64dc976d959a0d075f07faeb9093b2e24c0979ef89b18\": container with ID starting with 781746f3e65c8ba428d64dc976d959a0d075f07faeb9093b2e24c0979ef89b18 not found: ID does not exist" containerID="781746f3e65c8ba428d64dc976d959a0d075f07faeb9093b2e24c0979ef89b18" Oct 01 16:27:12 crc kubenswrapper[4869]: I1001 16:27:12.548938 4869 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"781746f3e65c8ba428d64dc976d959a0d075f07faeb9093b2e24c0979ef89b18"} err="failed to get container status \"781746f3e65c8ba428d64dc976d959a0d075f07faeb9093b2e24c0979ef89b18\": rpc error: code = NotFound desc = could not find container \"781746f3e65c8ba428d64dc976d959a0d075f07faeb9093b2e24c0979ef89b18\": container with ID starting with 781746f3e65c8ba428d64dc976d959a0d075f07faeb9093b2e24c0979ef89b18 not found: ID does not exist" Oct 01 16:27:12 crc kubenswrapper[4869]: I1001 16:27:12.548955 4869 scope.go:117] "RemoveContainer" containerID="a92df9940d25a7ffb845706489896c4b78cfe497fc59fd385769f303ce39961d" Oct 01 16:27:12 crc kubenswrapper[4869]: E1001 16:27:12.549348 4869 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a92df9940d25a7ffb845706489896c4b78cfe497fc59fd385769f303ce39961d\": container with ID starting with a92df9940d25a7ffb845706489896c4b78cfe497fc59fd385769f303ce39961d not found: ID does not exist" containerID="a92df9940d25a7ffb845706489896c4b78cfe497fc59fd385769f303ce39961d" Oct 01 16:27:12 crc kubenswrapper[4869]: I1001 16:27:12.549376 4869 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a92df9940d25a7ffb845706489896c4b78cfe497fc59fd385769f303ce39961d"} err="failed to get container status \"a92df9940d25a7ffb845706489896c4b78cfe497fc59fd385769f303ce39961d\": rpc error: code = NotFound desc = could not find container \"a92df9940d25a7ffb845706489896c4b78cfe497fc59fd385769f303ce39961d\": container with ID starting with a92df9940d25a7ffb845706489896c4b78cfe497fc59fd385769f303ce39961d not found: ID does not exist" Oct 01 16:27:13 crc kubenswrapper[4869]: I1001 16:27:13.353889 4869 patch_prober.go:28] interesting pod/machine-config-daemon-c86m8 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 01 16:27:13 crc kubenswrapper[4869]: I1001 16:27:13.354239 4869 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-c86m8" podUID="a4b64f7f-0b03-4f47-965b-9fde048b735c" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 01 16:27:13 crc kubenswrapper[4869]: I1001 16:27:13.354305 4869 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-c86m8" Oct 01 16:27:13 crc kubenswrapper[4869]: I1001 16:27:13.355095 4869 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"0eaa53cd5d569b0e6bef9edefc7d1fa07489655924f7f785aa79773af28d727f"} pod="openshift-machine-config-operator/machine-config-daemon-c86m8" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Oct 01 16:27:13 crc kubenswrapper[4869]: I1001 16:27:13.355166 4869 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-c86m8" podUID="a4b64f7f-0b03-4f47-965b-9fde048b735c" containerName="machine-config-daemon" containerID="cri-o://0eaa53cd5d569b0e6bef9edefc7d1fa07489655924f7f785aa79773af28d727f" gracePeriod=600 Oct 01 16:27:13 crc kubenswrapper[4869]: E1001 16:27:13.476236 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-c86m8_openshift-machine-config-operator(a4b64f7f-0b03-4f47-965b-9fde048b735c)\"" pod="openshift-machine-config-operator/machine-config-daemon-c86m8" podUID="a4b64f7f-0b03-4f47-965b-9fde048b735c" Oct 01 16:27:13 crc kubenswrapper[4869]: I1001 16:27:13.591467 4869 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4fb87c85-fa63-4813-a35c-4e9342046f8c" path="/var/lib/kubelet/pods/4fb87c85-fa63-4813-a35c-4e9342046f8c/volumes" Oct 01 16:27:14 crc kubenswrapper[4869]: I1001 16:27:14.461421 4869 generic.go:334] "Generic (PLEG): container finished" podID="a4b64f7f-0b03-4f47-965b-9fde048b735c" containerID="0eaa53cd5d569b0e6bef9edefc7d1fa07489655924f7f785aa79773af28d727f" exitCode=0 Oct 01 16:27:14 crc kubenswrapper[4869]: I1001 16:27:14.461495 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-c86m8" event={"ID":"a4b64f7f-0b03-4f47-965b-9fde048b735c","Type":"ContainerDied","Data":"0eaa53cd5d569b0e6bef9edefc7d1fa07489655924f7f785aa79773af28d727f"} Oct 01 16:27:14 crc kubenswrapper[4869]: I1001 16:27:14.461769 4869 scope.go:117] "RemoveContainer" containerID="5e2d8e6198d2f74bc8fa7e68cea96d1e2800cee50b353247648e0d66925b176c" Oct 01 16:27:14 crc kubenswrapper[4869]: I1001 16:27:14.462504 4869 scope.go:117] "RemoveContainer" containerID="0eaa53cd5d569b0e6bef9edefc7d1fa07489655924f7f785aa79773af28d727f" Oct 01 16:27:14 crc kubenswrapper[4869]: E1001 16:27:14.462893 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-c86m8_openshift-machine-config-operator(a4b64f7f-0b03-4f47-965b-9fde048b735c)\"" pod="openshift-machine-config-operator/machine-config-daemon-c86m8" podUID="a4b64f7f-0b03-4f47-965b-9fde048b735c" Oct 01 16:27:25 crc kubenswrapper[4869]: I1001 16:27:25.581741 4869 scope.go:117] "RemoveContainer" containerID="0eaa53cd5d569b0e6bef9edefc7d1fa07489655924f7f785aa79773af28d727f" Oct 01 16:27:25 crc kubenswrapper[4869]: E1001 16:27:25.582447 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-c86m8_openshift-machine-config-operator(a4b64f7f-0b03-4f47-965b-9fde048b735c)\"" pod="openshift-machine-config-operator/machine-config-daemon-c86m8" podUID="a4b64f7f-0b03-4f47-965b-9fde048b735c" Oct 01 16:27:38 crc kubenswrapper[4869]: I1001 16:27:38.581386 4869 scope.go:117] "RemoveContainer" containerID="0eaa53cd5d569b0e6bef9edefc7d1fa07489655924f7f785aa79773af28d727f" Oct 01 16:27:38 crc kubenswrapper[4869]: E1001 16:27:38.582189 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-c86m8_openshift-machine-config-operator(a4b64f7f-0b03-4f47-965b-9fde048b735c)\"" pod="openshift-machine-config-operator/machine-config-daemon-c86m8" podUID="a4b64f7f-0b03-4f47-965b-9fde048b735c" Oct 01 16:27:51 crc kubenswrapper[4869]: I1001 16:27:51.587835 4869 scope.go:117] "RemoveContainer" containerID="0eaa53cd5d569b0e6bef9edefc7d1fa07489655924f7f785aa79773af28d727f" Oct 01 16:27:51 crc kubenswrapper[4869]: E1001 16:27:51.588682 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-c86m8_openshift-machine-config-operator(a4b64f7f-0b03-4f47-965b-9fde048b735c)\"" pod="openshift-machine-config-operator/machine-config-daemon-c86m8" podUID="a4b64f7f-0b03-4f47-965b-9fde048b735c" Oct 01 16:28:05 crc kubenswrapper[4869]: I1001 16:28:05.580804 4869 scope.go:117] "RemoveContainer" containerID="0eaa53cd5d569b0e6bef9edefc7d1fa07489655924f7f785aa79773af28d727f" Oct 01 16:28:05 crc kubenswrapper[4869]: E1001 16:28:05.581611 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-c86m8_openshift-machine-config-operator(a4b64f7f-0b03-4f47-965b-9fde048b735c)\"" pod="openshift-machine-config-operator/machine-config-daemon-c86m8" podUID="a4b64f7f-0b03-4f47-965b-9fde048b735c" Oct 01 16:28:18 crc kubenswrapper[4869]: I1001 16:28:18.581459 4869 scope.go:117] "RemoveContainer" containerID="0eaa53cd5d569b0e6bef9edefc7d1fa07489655924f7f785aa79773af28d727f" Oct 01 16:28:18 crc kubenswrapper[4869]: E1001 16:28:18.582096 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-c86m8_openshift-machine-config-operator(a4b64f7f-0b03-4f47-965b-9fde048b735c)\"" pod="openshift-machine-config-operator/machine-config-daemon-c86m8" podUID="a4b64f7f-0b03-4f47-965b-9fde048b735c" Oct 01 16:28:29 crc kubenswrapper[4869]: I1001 16:28:29.581838 4869 scope.go:117] "RemoveContainer" containerID="0eaa53cd5d569b0e6bef9edefc7d1fa07489655924f7f785aa79773af28d727f" Oct 01 16:28:29 crc kubenswrapper[4869]: E1001 16:28:29.583223 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-c86m8_openshift-machine-config-operator(a4b64f7f-0b03-4f47-965b-9fde048b735c)\"" pod="openshift-machine-config-operator/machine-config-daemon-c86m8" podUID="a4b64f7f-0b03-4f47-965b-9fde048b735c" Oct 01 16:28:42 crc kubenswrapper[4869]: I1001 16:28:42.580648 4869 scope.go:117] "RemoveContainer" containerID="0eaa53cd5d569b0e6bef9edefc7d1fa07489655924f7f785aa79773af28d727f" Oct 01 16:28:42 crc kubenswrapper[4869]: E1001 16:28:42.581398 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-c86m8_openshift-machine-config-operator(a4b64f7f-0b03-4f47-965b-9fde048b735c)\"" pod="openshift-machine-config-operator/machine-config-daemon-c86m8" podUID="a4b64f7f-0b03-4f47-965b-9fde048b735c" Oct 01 16:28:56 crc kubenswrapper[4869]: I1001 16:28:56.581081 4869 scope.go:117] "RemoveContainer" containerID="0eaa53cd5d569b0e6bef9edefc7d1fa07489655924f7f785aa79773af28d727f" Oct 01 16:28:56 crc kubenswrapper[4869]: E1001 16:28:56.581896 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-c86m8_openshift-machine-config-operator(a4b64f7f-0b03-4f47-965b-9fde048b735c)\"" pod="openshift-machine-config-operator/machine-config-daemon-c86m8" podUID="a4b64f7f-0b03-4f47-965b-9fde048b735c" Oct 01 16:29:11 crc kubenswrapper[4869]: I1001 16:29:11.587417 4869 scope.go:117] "RemoveContainer" containerID="0eaa53cd5d569b0e6bef9edefc7d1fa07489655924f7f785aa79773af28d727f" Oct 01 16:29:11 crc kubenswrapper[4869]: E1001 16:29:11.588238 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-c86m8_openshift-machine-config-operator(a4b64f7f-0b03-4f47-965b-9fde048b735c)\"" pod="openshift-machine-config-operator/machine-config-daemon-c86m8" podUID="a4b64f7f-0b03-4f47-965b-9fde048b735c" Oct 01 16:29:25 crc kubenswrapper[4869]: I1001 16:29:25.581819 4869 scope.go:117] "RemoveContainer" containerID="0eaa53cd5d569b0e6bef9edefc7d1fa07489655924f7f785aa79773af28d727f" Oct 01 16:29:25 crc kubenswrapper[4869]: E1001 16:29:25.582623 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-c86m8_openshift-machine-config-operator(a4b64f7f-0b03-4f47-965b-9fde048b735c)\"" pod="openshift-machine-config-operator/machine-config-daemon-c86m8" podUID="a4b64f7f-0b03-4f47-965b-9fde048b735c" Oct 01 16:29:39 crc kubenswrapper[4869]: I1001 16:29:39.581181 4869 scope.go:117] "RemoveContainer" containerID="0eaa53cd5d569b0e6bef9edefc7d1fa07489655924f7f785aa79773af28d727f" Oct 01 16:29:39 crc kubenswrapper[4869]: E1001 16:29:39.581996 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-c86m8_openshift-machine-config-operator(a4b64f7f-0b03-4f47-965b-9fde048b735c)\"" pod="openshift-machine-config-operator/machine-config-daemon-c86m8" podUID="a4b64f7f-0b03-4f47-965b-9fde048b735c" Oct 01 16:29:54 crc kubenswrapper[4869]: I1001 16:29:54.581872 4869 scope.go:117] "RemoveContainer" containerID="0eaa53cd5d569b0e6bef9edefc7d1fa07489655924f7f785aa79773af28d727f" Oct 01 16:29:54 crc kubenswrapper[4869]: E1001 16:29:54.582888 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-c86m8_openshift-machine-config-operator(a4b64f7f-0b03-4f47-965b-9fde048b735c)\"" pod="openshift-machine-config-operator/machine-config-daemon-c86m8" podUID="a4b64f7f-0b03-4f47-965b-9fde048b735c" Oct 01 16:29:55 crc kubenswrapper[4869]: I1001 16:29:55.443907 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-ls522"] Oct 01 16:29:55 crc kubenswrapper[4869]: E1001 16:29:55.444742 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="40f107b2-b2e7-4ef0-8566-1c3c1b7122f7" containerName="extract-utilities" Oct 01 16:29:55 crc kubenswrapper[4869]: I1001 16:29:55.444766 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="40f107b2-b2e7-4ef0-8566-1c3c1b7122f7" containerName="extract-utilities" Oct 01 16:29:55 crc kubenswrapper[4869]: E1001 16:29:55.444789 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="40f107b2-b2e7-4ef0-8566-1c3c1b7122f7" containerName="registry-server" Oct 01 16:29:55 crc kubenswrapper[4869]: I1001 16:29:55.444797 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="40f107b2-b2e7-4ef0-8566-1c3c1b7122f7" containerName="registry-server" Oct 01 16:29:55 crc kubenswrapper[4869]: E1001 16:29:55.444816 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4fb87c85-fa63-4813-a35c-4e9342046f8c" containerName="extract-utilities" Oct 01 16:29:55 crc kubenswrapper[4869]: I1001 16:29:55.444844 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="4fb87c85-fa63-4813-a35c-4e9342046f8c" containerName="extract-utilities" Oct 01 16:29:55 crc kubenswrapper[4869]: E1001 16:29:55.444857 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4fb87c85-fa63-4813-a35c-4e9342046f8c" containerName="extract-content" Oct 01 16:29:55 crc kubenswrapper[4869]: I1001 16:29:55.444865 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="4fb87c85-fa63-4813-a35c-4e9342046f8c" containerName="extract-content" Oct 01 16:29:55 crc kubenswrapper[4869]: E1001 16:29:55.444889 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="40f107b2-b2e7-4ef0-8566-1c3c1b7122f7" containerName="extract-content" Oct 01 16:29:55 crc kubenswrapper[4869]: I1001 16:29:55.444896 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="40f107b2-b2e7-4ef0-8566-1c3c1b7122f7" containerName="extract-content" Oct 01 16:29:55 crc kubenswrapper[4869]: E1001 16:29:55.444923 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4fb87c85-fa63-4813-a35c-4e9342046f8c" containerName="registry-server" Oct 01 16:29:55 crc kubenswrapper[4869]: I1001 16:29:55.444932 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="4fb87c85-fa63-4813-a35c-4e9342046f8c" containerName="registry-server" Oct 01 16:29:55 crc kubenswrapper[4869]: I1001 16:29:55.445152 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="40f107b2-b2e7-4ef0-8566-1c3c1b7122f7" containerName="registry-server" Oct 01 16:29:55 crc kubenswrapper[4869]: I1001 16:29:55.445196 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="4fb87c85-fa63-4813-a35c-4e9342046f8c" containerName="registry-server" Oct 01 16:29:55 crc kubenswrapper[4869]: I1001 16:29:55.448561 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-ls522" Oct 01 16:29:55 crc kubenswrapper[4869]: I1001 16:29:55.456503 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-ls522"] Oct 01 16:29:55 crc kubenswrapper[4869]: I1001 16:29:55.543639 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5b54q\" (UniqueName: \"kubernetes.io/projected/f297cbfe-1e49-4afc-8fe5-9a951f59163c-kube-api-access-5b54q\") pod \"community-operators-ls522\" (UID: \"f297cbfe-1e49-4afc-8fe5-9a951f59163c\") " pod="openshift-marketplace/community-operators-ls522" Oct 01 16:29:55 crc kubenswrapper[4869]: I1001 16:29:55.543691 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f297cbfe-1e49-4afc-8fe5-9a951f59163c-utilities\") pod \"community-operators-ls522\" (UID: \"f297cbfe-1e49-4afc-8fe5-9a951f59163c\") " pod="openshift-marketplace/community-operators-ls522" Oct 01 16:29:55 crc kubenswrapper[4869]: I1001 16:29:55.543718 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f297cbfe-1e49-4afc-8fe5-9a951f59163c-catalog-content\") pod \"community-operators-ls522\" (UID: \"f297cbfe-1e49-4afc-8fe5-9a951f59163c\") " pod="openshift-marketplace/community-operators-ls522" Oct 01 16:29:55 crc kubenswrapper[4869]: I1001 16:29:55.645689 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f297cbfe-1e49-4afc-8fe5-9a951f59163c-catalog-content\") pod \"community-operators-ls522\" (UID: \"f297cbfe-1e49-4afc-8fe5-9a951f59163c\") " pod="openshift-marketplace/community-operators-ls522" Oct 01 16:29:55 crc kubenswrapper[4869]: I1001 16:29:55.645942 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5b54q\" (UniqueName: \"kubernetes.io/projected/f297cbfe-1e49-4afc-8fe5-9a951f59163c-kube-api-access-5b54q\") pod \"community-operators-ls522\" (UID: \"f297cbfe-1e49-4afc-8fe5-9a951f59163c\") " pod="openshift-marketplace/community-operators-ls522" Oct 01 16:29:55 crc kubenswrapper[4869]: I1001 16:29:55.645974 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f297cbfe-1e49-4afc-8fe5-9a951f59163c-utilities\") pod \"community-operators-ls522\" (UID: \"f297cbfe-1e49-4afc-8fe5-9a951f59163c\") " pod="openshift-marketplace/community-operators-ls522" Oct 01 16:29:55 crc kubenswrapper[4869]: I1001 16:29:55.646133 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f297cbfe-1e49-4afc-8fe5-9a951f59163c-catalog-content\") pod \"community-operators-ls522\" (UID: \"f297cbfe-1e49-4afc-8fe5-9a951f59163c\") " pod="openshift-marketplace/community-operators-ls522" Oct 01 16:29:55 crc kubenswrapper[4869]: I1001 16:29:55.646354 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f297cbfe-1e49-4afc-8fe5-9a951f59163c-utilities\") pod \"community-operators-ls522\" (UID: \"f297cbfe-1e49-4afc-8fe5-9a951f59163c\") " pod="openshift-marketplace/community-operators-ls522" Oct 01 16:29:55 crc kubenswrapper[4869]: I1001 16:29:55.669602 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5b54q\" (UniqueName: \"kubernetes.io/projected/f297cbfe-1e49-4afc-8fe5-9a951f59163c-kube-api-access-5b54q\") pod \"community-operators-ls522\" (UID: \"f297cbfe-1e49-4afc-8fe5-9a951f59163c\") " pod="openshift-marketplace/community-operators-ls522" Oct 01 16:29:55 crc kubenswrapper[4869]: I1001 16:29:55.773115 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-ls522" Oct 01 16:29:56 crc kubenswrapper[4869]: I1001 16:29:56.304441 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-ls522"] Oct 01 16:29:57 crc kubenswrapper[4869]: I1001 16:29:57.025058 4869 generic.go:334] "Generic (PLEG): container finished" podID="f297cbfe-1e49-4afc-8fe5-9a951f59163c" containerID="9836c11a8e6f7b2fe089cbb6d0de1f2ccac08df3ae2ee3c7c6a306610ca8611e" exitCode=0 Oct 01 16:29:57 crc kubenswrapper[4869]: I1001 16:29:57.025106 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-ls522" event={"ID":"f297cbfe-1e49-4afc-8fe5-9a951f59163c","Type":"ContainerDied","Data":"9836c11a8e6f7b2fe089cbb6d0de1f2ccac08df3ae2ee3c7c6a306610ca8611e"} Oct 01 16:29:57 crc kubenswrapper[4869]: I1001 16:29:57.025464 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-ls522" event={"ID":"f297cbfe-1e49-4afc-8fe5-9a951f59163c","Type":"ContainerStarted","Data":"57a0e8133990f8ceef6eb8de0e4960660bd79fa980db5a0b23a50a94232bf809"} Oct 01 16:29:58 crc kubenswrapper[4869]: I1001 16:29:58.062048 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-ls522" event={"ID":"f297cbfe-1e49-4afc-8fe5-9a951f59163c","Type":"ContainerStarted","Data":"3047dd67ec98b4c74db5d62393f6062431b482a65c6ec358a88c0a0b8049f08f"} Oct 01 16:30:00 crc kubenswrapper[4869]: I1001 16:30:00.082426 4869 generic.go:334] "Generic (PLEG): container finished" podID="f297cbfe-1e49-4afc-8fe5-9a951f59163c" containerID="3047dd67ec98b4c74db5d62393f6062431b482a65c6ec358a88c0a0b8049f08f" exitCode=0 Oct 01 16:30:00 crc kubenswrapper[4869]: I1001 16:30:00.082495 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-ls522" event={"ID":"f297cbfe-1e49-4afc-8fe5-9a951f59163c","Type":"ContainerDied","Data":"3047dd67ec98b4c74db5d62393f6062431b482a65c6ec358a88c0a0b8049f08f"} Oct 01 16:30:00 crc kubenswrapper[4869]: I1001 16:30:00.162971 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29322270-pjv6n"] Oct 01 16:30:00 crc kubenswrapper[4869]: I1001 16:30:00.164275 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29322270-pjv6n" Oct 01 16:30:00 crc kubenswrapper[4869]: I1001 16:30:00.166205 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Oct 01 16:30:00 crc kubenswrapper[4869]: I1001 16:30:00.166887 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Oct 01 16:30:00 crc kubenswrapper[4869]: I1001 16:30:00.175668 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29322270-pjv6n"] Oct 01 16:30:00 crc kubenswrapper[4869]: I1001 16:30:00.246734 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/495ca456-ea2e-47b7-9b98-307bc08df870-secret-volume\") pod \"collect-profiles-29322270-pjv6n\" (UID: \"495ca456-ea2e-47b7-9b98-307bc08df870\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29322270-pjv6n" Oct 01 16:30:00 crc kubenswrapper[4869]: I1001 16:30:00.246799 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/495ca456-ea2e-47b7-9b98-307bc08df870-config-volume\") pod \"collect-profiles-29322270-pjv6n\" (UID: \"495ca456-ea2e-47b7-9b98-307bc08df870\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29322270-pjv6n" Oct 01 16:30:00 crc kubenswrapper[4869]: I1001 16:30:00.247194 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jkknk\" (UniqueName: \"kubernetes.io/projected/495ca456-ea2e-47b7-9b98-307bc08df870-kube-api-access-jkknk\") pod \"collect-profiles-29322270-pjv6n\" (UID: \"495ca456-ea2e-47b7-9b98-307bc08df870\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29322270-pjv6n" Oct 01 16:30:00 crc kubenswrapper[4869]: I1001 16:30:00.354130 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jkknk\" (UniqueName: \"kubernetes.io/projected/495ca456-ea2e-47b7-9b98-307bc08df870-kube-api-access-jkknk\") pod \"collect-profiles-29322270-pjv6n\" (UID: \"495ca456-ea2e-47b7-9b98-307bc08df870\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29322270-pjv6n" Oct 01 16:30:00 crc kubenswrapper[4869]: I1001 16:30:00.354306 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/495ca456-ea2e-47b7-9b98-307bc08df870-secret-volume\") pod \"collect-profiles-29322270-pjv6n\" (UID: \"495ca456-ea2e-47b7-9b98-307bc08df870\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29322270-pjv6n" Oct 01 16:30:00 crc kubenswrapper[4869]: I1001 16:30:00.354392 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/495ca456-ea2e-47b7-9b98-307bc08df870-config-volume\") pod \"collect-profiles-29322270-pjv6n\" (UID: \"495ca456-ea2e-47b7-9b98-307bc08df870\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29322270-pjv6n" Oct 01 16:30:00 crc kubenswrapper[4869]: I1001 16:30:00.355456 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/495ca456-ea2e-47b7-9b98-307bc08df870-config-volume\") pod \"collect-profiles-29322270-pjv6n\" (UID: \"495ca456-ea2e-47b7-9b98-307bc08df870\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29322270-pjv6n" Oct 01 16:30:00 crc kubenswrapper[4869]: I1001 16:30:00.552793 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/495ca456-ea2e-47b7-9b98-307bc08df870-secret-volume\") pod \"collect-profiles-29322270-pjv6n\" (UID: \"495ca456-ea2e-47b7-9b98-307bc08df870\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29322270-pjv6n" Oct 01 16:30:00 crc kubenswrapper[4869]: I1001 16:30:00.553346 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jkknk\" (UniqueName: \"kubernetes.io/projected/495ca456-ea2e-47b7-9b98-307bc08df870-kube-api-access-jkknk\") pod \"collect-profiles-29322270-pjv6n\" (UID: \"495ca456-ea2e-47b7-9b98-307bc08df870\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29322270-pjv6n" Oct 01 16:30:00 crc kubenswrapper[4869]: I1001 16:30:00.802861 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29322270-pjv6n" Oct 01 16:30:01 crc kubenswrapper[4869]: I1001 16:30:01.102628 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-ls522" event={"ID":"f297cbfe-1e49-4afc-8fe5-9a951f59163c","Type":"ContainerStarted","Data":"75de49fb06d78ea85f12de12249bb045225737870644567a5bbd70e434ce797d"} Oct 01 16:30:01 crc kubenswrapper[4869]: I1001 16:30:01.126959 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-ls522" podStartSLOduration=2.431173442 podStartE2EDuration="6.126931517s" podCreationTimestamp="2025-10-01 16:29:55 +0000 UTC" firstStartedPulling="2025-10-01 16:29:57.028578359 +0000 UTC m=+5106.175421515" lastFinishedPulling="2025-10-01 16:30:00.724336474 +0000 UTC m=+5109.871179590" observedRunningTime="2025-10-01 16:30:01.121791347 +0000 UTC m=+5110.268634483" watchObservedRunningTime="2025-10-01 16:30:01.126931517 +0000 UTC m=+5110.273774633" Oct 01 16:30:01 crc kubenswrapper[4869]: I1001 16:30:01.300030 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29322270-pjv6n"] Oct 01 16:30:01 crc kubenswrapper[4869]: W1001 16:30:01.304244 4869 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod495ca456_ea2e_47b7_9b98_307bc08df870.slice/crio-a1722c4f4199f5a23a1c3f3744e940cd3b0118b5578c8cd6fe9319711a0da4c1 WatchSource:0}: Error finding container a1722c4f4199f5a23a1c3f3744e940cd3b0118b5578c8cd6fe9319711a0da4c1: Status 404 returned error can't find the container with id a1722c4f4199f5a23a1c3f3744e940cd3b0118b5578c8cd6fe9319711a0da4c1 Oct 01 16:30:02 crc kubenswrapper[4869]: I1001 16:30:02.111908 4869 generic.go:334] "Generic (PLEG): container finished" podID="495ca456-ea2e-47b7-9b98-307bc08df870" containerID="bab5db0bd2f9e90691aebb0e21f37c930915052309ee2b4016701f0b6ac55e2c" exitCode=0 Oct 01 16:30:02 crc kubenswrapper[4869]: I1001 16:30:02.111983 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29322270-pjv6n" event={"ID":"495ca456-ea2e-47b7-9b98-307bc08df870","Type":"ContainerDied","Data":"bab5db0bd2f9e90691aebb0e21f37c930915052309ee2b4016701f0b6ac55e2c"} Oct 01 16:30:02 crc kubenswrapper[4869]: I1001 16:30:02.112231 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29322270-pjv6n" event={"ID":"495ca456-ea2e-47b7-9b98-307bc08df870","Type":"ContainerStarted","Data":"a1722c4f4199f5a23a1c3f3744e940cd3b0118b5578c8cd6fe9319711a0da4c1"} Oct 01 16:30:03 crc kubenswrapper[4869]: I1001 16:30:03.610230 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29322270-pjv6n" Oct 01 16:30:03 crc kubenswrapper[4869]: I1001 16:30:03.738965 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/495ca456-ea2e-47b7-9b98-307bc08df870-secret-volume\") pod \"495ca456-ea2e-47b7-9b98-307bc08df870\" (UID: \"495ca456-ea2e-47b7-9b98-307bc08df870\") " Oct 01 16:30:03 crc kubenswrapper[4869]: I1001 16:30:03.739228 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jkknk\" (UniqueName: \"kubernetes.io/projected/495ca456-ea2e-47b7-9b98-307bc08df870-kube-api-access-jkknk\") pod \"495ca456-ea2e-47b7-9b98-307bc08df870\" (UID: \"495ca456-ea2e-47b7-9b98-307bc08df870\") " Oct 01 16:30:03 crc kubenswrapper[4869]: I1001 16:30:03.739310 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/495ca456-ea2e-47b7-9b98-307bc08df870-config-volume\") pod \"495ca456-ea2e-47b7-9b98-307bc08df870\" (UID: \"495ca456-ea2e-47b7-9b98-307bc08df870\") " Oct 01 16:30:03 crc kubenswrapper[4869]: I1001 16:30:03.740381 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/495ca456-ea2e-47b7-9b98-307bc08df870-config-volume" (OuterVolumeSpecName: "config-volume") pod "495ca456-ea2e-47b7-9b98-307bc08df870" (UID: "495ca456-ea2e-47b7-9b98-307bc08df870"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 16:30:03 crc kubenswrapper[4869]: I1001 16:30:03.744747 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/495ca456-ea2e-47b7-9b98-307bc08df870-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "495ca456-ea2e-47b7-9b98-307bc08df870" (UID: "495ca456-ea2e-47b7-9b98-307bc08df870"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 16:30:03 crc kubenswrapper[4869]: I1001 16:30:03.745831 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/495ca456-ea2e-47b7-9b98-307bc08df870-kube-api-access-jkknk" (OuterVolumeSpecName: "kube-api-access-jkknk") pod "495ca456-ea2e-47b7-9b98-307bc08df870" (UID: "495ca456-ea2e-47b7-9b98-307bc08df870"). InnerVolumeSpecName "kube-api-access-jkknk". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 16:30:03 crc kubenswrapper[4869]: I1001 16:30:03.841906 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jkknk\" (UniqueName: \"kubernetes.io/projected/495ca456-ea2e-47b7-9b98-307bc08df870-kube-api-access-jkknk\") on node \"crc\" DevicePath \"\"" Oct 01 16:30:03 crc kubenswrapper[4869]: I1001 16:30:03.841947 4869 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/495ca456-ea2e-47b7-9b98-307bc08df870-config-volume\") on node \"crc\" DevicePath \"\"" Oct 01 16:30:03 crc kubenswrapper[4869]: I1001 16:30:03.841957 4869 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/495ca456-ea2e-47b7-9b98-307bc08df870-secret-volume\") on node \"crc\" DevicePath \"\"" Oct 01 16:30:04 crc kubenswrapper[4869]: I1001 16:30:04.130169 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29322270-pjv6n" event={"ID":"495ca456-ea2e-47b7-9b98-307bc08df870","Type":"ContainerDied","Data":"a1722c4f4199f5a23a1c3f3744e940cd3b0118b5578c8cd6fe9319711a0da4c1"} Oct 01 16:30:04 crc kubenswrapper[4869]: I1001 16:30:04.130217 4869 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="a1722c4f4199f5a23a1c3f3744e940cd3b0118b5578c8cd6fe9319711a0da4c1" Oct 01 16:30:04 crc kubenswrapper[4869]: I1001 16:30:04.130238 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29322270-pjv6n" Oct 01 16:30:04 crc kubenswrapper[4869]: I1001 16:30:04.690945 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29322225-2hwr8"] Oct 01 16:30:04 crc kubenswrapper[4869]: I1001 16:30:04.699329 4869 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29322225-2hwr8"] Oct 01 16:30:05 crc kubenswrapper[4869]: I1001 16:30:05.593953 4869 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3d379718-8ee6-4e42-8d61-01f74beb9e0a" path="/var/lib/kubelet/pods/3d379718-8ee6-4e42-8d61-01f74beb9e0a/volumes" Oct 01 16:30:05 crc kubenswrapper[4869]: I1001 16:30:05.774428 4869 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-ls522" Oct 01 16:30:05 crc kubenswrapper[4869]: I1001 16:30:05.775771 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-ls522" Oct 01 16:30:06 crc kubenswrapper[4869]: I1001 16:30:06.823351 4869 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/community-operators-ls522" podUID="f297cbfe-1e49-4afc-8fe5-9a951f59163c" containerName="registry-server" probeResult="failure" output=< Oct 01 16:30:06 crc kubenswrapper[4869]: timeout: failed to connect service ":50051" within 1s Oct 01 16:30:06 crc kubenswrapper[4869]: > Oct 01 16:30:08 crc kubenswrapper[4869]: I1001 16:30:08.581203 4869 scope.go:117] "RemoveContainer" containerID="0eaa53cd5d569b0e6bef9edefc7d1fa07489655924f7f785aa79773af28d727f" Oct 01 16:30:08 crc kubenswrapper[4869]: E1001 16:30:08.581903 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-c86m8_openshift-machine-config-operator(a4b64f7f-0b03-4f47-965b-9fde048b735c)\"" pod="openshift-machine-config-operator/machine-config-daemon-c86m8" podUID="a4b64f7f-0b03-4f47-965b-9fde048b735c" Oct 01 16:30:15 crc kubenswrapper[4869]: I1001 16:30:15.848975 4869 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-ls522" Oct 01 16:30:15 crc kubenswrapper[4869]: I1001 16:30:15.905654 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-ls522" Oct 01 16:30:16 crc kubenswrapper[4869]: I1001 16:30:16.094397 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-ls522"] Oct 01 16:30:17 crc kubenswrapper[4869]: I1001 16:30:17.004842 4869 scope.go:117] "RemoveContainer" containerID="93cb852400d436873f3210fc6f8f028e1ab9f3f05ad5d495f8c0fc8fdad210e7" Oct 01 16:30:17 crc kubenswrapper[4869]: I1001 16:30:17.245005 4869 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-ls522" podUID="f297cbfe-1e49-4afc-8fe5-9a951f59163c" containerName="registry-server" containerID="cri-o://75de49fb06d78ea85f12de12249bb045225737870644567a5bbd70e434ce797d" gracePeriod=2 Oct 01 16:30:17 crc kubenswrapper[4869]: I1001 16:30:17.840783 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-ls522" Oct 01 16:30:17 crc kubenswrapper[4869]: I1001 16:30:17.892891 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5b54q\" (UniqueName: \"kubernetes.io/projected/f297cbfe-1e49-4afc-8fe5-9a951f59163c-kube-api-access-5b54q\") pod \"f297cbfe-1e49-4afc-8fe5-9a951f59163c\" (UID: \"f297cbfe-1e49-4afc-8fe5-9a951f59163c\") " Oct 01 16:30:17 crc kubenswrapper[4869]: I1001 16:30:17.893037 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f297cbfe-1e49-4afc-8fe5-9a951f59163c-catalog-content\") pod \"f297cbfe-1e49-4afc-8fe5-9a951f59163c\" (UID: \"f297cbfe-1e49-4afc-8fe5-9a951f59163c\") " Oct 01 16:30:17 crc kubenswrapper[4869]: I1001 16:30:17.893132 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f297cbfe-1e49-4afc-8fe5-9a951f59163c-utilities\") pod \"f297cbfe-1e49-4afc-8fe5-9a951f59163c\" (UID: \"f297cbfe-1e49-4afc-8fe5-9a951f59163c\") " Oct 01 16:30:17 crc kubenswrapper[4869]: I1001 16:30:17.893876 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f297cbfe-1e49-4afc-8fe5-9a951f59163c-utilities" (OuterVolumeSpecName: "utilities") pod "f297cbfe-1e49-4afc-8fe5-9a951f59163c" (UID: "f297cbfe-1e49-4afc-8fe5-9a951f59163c"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 16:30:17 crc kubenswrapper[4869]: I1001 16:30:17.894226 4869 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f297cbfe-1e49-4afc-8fe5-9a951f59163c-utilities\") on node \"crc\" DevicePath \"\"" Oct 01 16:30:17 crc kubenswrapper[4869]: I1001 16:30:17.919549 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f297cbfe-1e49-4afc-8fe5-9a951f59163c-kube-api-access-5b54q" (OuterVolumeSpecName: "kube-api-access-5b54q") pod "f297cbfe-1e49-4afc-8fe5-9a951f59163c" (UID: "f297cbfe-1e49-4afc-8fe5-9a951f59163c"). InnerVolumeSpecName "kube-api-access-5b54q". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 16:30:17 crc kubenswrapper[4869]: I1001 16:30:17.941153 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f297cbfe-1e49-4afc-8fe5-9a951f59163c-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "f297cbfe-1e49-4afc-8fe5-9a951f59163c" (UID: "f297cbfe-1e49-4afc-8fe5-9a951f59163c"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 16:30:17 crc kubenswrapper[4869]: I1001 16:30:17.996073 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5b54q\" (UniqueName: \"kubernetes.io/projected/f297cbfe-1e49-4afc-8fe5-9a951f59163c-kube-api-access-5b54q\") on node \"crc\" DevicePath \"\"" Oct 01 16:30:17 crc kubenswrapper[4869]: I1001 16:30:17.996127 4869 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f297cbfe-1e49-4afc-8fe5-9a951f59163c-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 01 16:30:18 crc kubenswrapper[4869]: I1001 16:30:18.256403 4869 generic.go:334] "Generic (PLEG): container finished" podID="f297cbfe-1e49-4afc-8fe5-9a951f59163c" containerID="75de49fb06d78ea85f12de12249bb045225737870644567a5bbd70e434ce797d" exitCode=0 Oct 01 16:30:18 crc kubenswrapper[4869]: I1001 16:30:18.256488 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-ls522" Oct 01 16:30:18 crc kubenswrapper[4869]: I1001 16:30:18.256510 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-ls522" event={"ID":"f297cbfe-1e49-4afc-8fe5-9a951f59163c","Type":"ContainerDied","Data":"75de49fb06d78ea85f12de12249bb045225737870644567a5bbd70e434ce797d"} Oct 01 16:30:18 crc kubenswrapper[4869]: I1001 16:30:18.256768 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-ls522" event={"ID":"f297cbfe-1e49-4afc-8fe5-9a951f59163c","Type":"ContainerDied","Data":"57a0e8133990f8ceef6eb8de0e4960660bd79fa980db5a0b23a50a94232bf809"} Oct 01 16:30:18 crc kubenswrapper[4869]: I1001 16:30:18.256789 4869 scope.go:117] "RemoveContainer" containerID="75de49fb06d78ea85f12de12249bb045225737870644567a5bbd70e434ce797d" Oct 01 16:30:18 crc kubenswrapper[4869]: I1001 16:30:18.289016 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-ls522"] Oct 01 16:30:18 crc kubenswrapper[4869]: I1001 16:30:18.289771 4869 scope.go:117] "RemoveContainer" containerID="3047dd67ec98b4c74db5d62393f6062431b482a65c6ec358a88c0a0b8049f08f" Oct 01 16:30:18 crc kubenswrapper[4869]: I1001 16:30:18.296452 4869 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-ls522"] Oct 01 16:30:18 crc kubenswrapper[4869]: I1001 16:30:18.335707 4869 scope.go:117] "RemoveContainer" containerID="9836c11a8e6f7b2fe089cbb6d0de1f2ccac08df3ae2ee3c7c6a306610ca8611e" Oct 01 16:30:18 crc kubenswrapper[4869]: I1001 16:30:18.377610 4869 scope.go:117] "RemoveContainer" containerID="75de49fb06d78ea85f12de12249bb045225737870644567a5bbd70e434ce797d" Oct 01 16:30:18 crc kubenswrapper[4869]: E1001 16:30:18.378119 4869 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"75de49fb06d78ea85f12de12249bb045225737870644567a5bbd70e434ce797d\": container with ID starting with 75de49fb06d78ea85f12de12249bb045225737870644567a5bbd70e434ce797d not found: ID does not exist" containerID="75de49fb06d78ea85f12de12249bb045225737870644567a5bbd70e434ce797d" Oct 01 16:30:18 crc kubenswrapper[4869]: I1001 16:30:18.378156 4869 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"75de49fb06d78ea85f12de12249bb045225737870644567a5bbd70e434ce797d"} err="failed to get container status \"75de49fb06d78ea85f12de12249bb045225737870644567a5bbd70e434ce797d\": rpc error: code = NotFound desc = could not find container \"75de49fb06d78ea85f12de12249bb045225737870644567a5bbd70e434ce797d\": container with ID starting with 75de49fb06d78ea85f12de12249bb045225737870644567a5bbd70e434ce797d not found: ID does not exist" Oct 01 16:30:18 crc kubenswrapper[4869]: I1001 16:30:18.378181 4869 scope.go:117] "RemoveContainer" containerID="3047dd67ec98b4c74db5d62393f6062431b482a65c6ec358a88c0a0b8049f08f" Oct 01 16:30:18 crc kubenswrapper[4869]: E1001 16:30:18.378771 4869 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3047dd67ec98b4c74db5d62393f6062431b482a65c6ec358a88c0a0b8049f08f\": container with ID starting with 3047dd67ec98b4c74db5d62393f6062431b482a65c6ec358a88c0a0b8049f08f not found: ID does not exist" containerID="3047dd67ec98b4c74db5d62393f6062431b482a65c6ec358a88c0a0b8049f08f" Oct 01 16:30:18 crc kubenswrapper[4869]: I1001 16:30:18.378809 4869 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3047dd67ec98b4c74db5d62393f6062431b482a65c6ec358a88c0a0b8049f08f"} err="failed to get container status \"3047dd67ec98b4c74db5d62393f6062431b482a65c6ec358a88c0a0b8049f08f\": rpc error: code = NotFound desc = could not find container \"3047dd67ec98b4c74db5d62393f6062431b482a65c6ec358a88c0a0b8049f08f\": container with ID starting with 3047dd67ec98b4c74db5d62393f6062431b482a65c6ec358a88c0a0b8049f08f not found: ID does not exist" Oct 01 16:30:18 crc kubenswrapper[4869]: I1001 16:30:18.378852 4869 scope.go:117] "RemoveContainer" containerID="9836c11a8e6f7b2fe089cbb6d0de1f2ccac08df3ae2ee3c7c6a306610ca8611e" Oct 01 16:30:18 crc kubenswrapper[4869]: E1001 16:30:18.379101 4869 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9836c11a8e6f7b2fe089cbb6d0de1f2ccac08df3ae2ee3c7c6a306610ca8611e\": container with ID starting with 9836c11a8e6f7b2fe089cbb6d0de1f2ccac08df3ae2ee3c7c6a306610ca8611e not found: ID does not exist" containerID="9836c11a8e6f7b2fe089cbb6d0de1f2ccac08df3ae2ee3c7c6a306610ca8611e" Oct 01 16:30:18 crc kubenswrapper[4869]: I1001 16:30:18.379135 4869 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9836c11a8e6f7b2fe089cbb6d0de1f2ccac08df3ae2ee3c7c6a306610ca8611e"} err="failed to get container status \"9836c11a8e6f7b2fe089cbb6d0de1f2ccac08df3ae2ee3c7c6a306610ca8611e\": rpc error: code = NotFound desc = could not find container \"9836c11a8e6f7b2fe089cbb6d0de1f2ccac08df3ae2ee3c7c6a306610ca8611e\": container with ID starting with 9836c11a8e6f7b2fe089cbb6d0de1f2ccac08df3ae2ee3c7c6a306610ca8611e not found: ID does not exist" Oct 01 16:30:19 crc kubenswrapper[4869]: I1001 16:30:19.595676 4869 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f297cbfe-1e49-4afc-8fe5-9a951f59163c" path="/var/lib/kubelet/pods/f297cbfe-1e49-4afc-8fe5-9a951f59163c/volumes" Oct 01 16:30:23 crc kubenswrapper[4869]: I1001 16:30:23.581776 4869 scope.go:117] "RemoveContainer" containerID="0eaa53cd5d569b0e6bef9edefc7d1fa07489655924f7f785aa79773af28d727f" Oct 01 16:30:23 crc kubenswrapper[4869]: E1001 16:30:23.582906 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-c86m8_openshift-machine-config-operator(a4b64f7f-0b03-4f47-965b-9fde048b735c)\"" pod="openshift-machine-config-operator/machine-config-daemon-c86m8" podUID="a4b64f7f-0b03-4f47-965b-9fde048b735c" Oct 01 16:30:37 crc kubenswrapper[4869]: I1001 16:30:37.581853 4869 scope.go:117] "RemoveContainer" containerID="0eaa53cd5d569b0e6bef9edefc7d1fa07489655924f7f785aa79773af28d727f" Oct 01 16:30:37 crc kubenswrapper[4869]: E1001 16:30:37.583024 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-c86m8_openshift-machine-config-operator(a4b64f7f-0b03-4f47-965b-9fde048b735c)\"" pod="openshift-machine-config-operator/machine-config-daemon-c86m8" podUID="a4b64f7f-0b03-4f47-965b-9fde048b735c" Oct 01 16:30:50 crc kubenswrapper[4869]: I1001 16:30:50.581168 4869 scope.go:117] "RemoveContainer" containerID="0eaa53cd5d569b0e6bef9edefc7d1fa07489655924f7f785aa79773af28d727f" Oct 01 16:30:50 crc kubenswrapper[4869]: E1001 16:30:50.582168 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-c86m8_openshift-machine-config-operator(a4b64f7f-0b03-4f47-965b-9fde048b735c)\"" pod="openshift-machine-config-operator/machine-config-daemon-c86m8" podUID="a4b64f7f-0b03-4f47-965b-9fde048b735c" Oct 01 16:31:04 crc kubenswrapper[4869]: I1001 16:31:04.581937 4869 scope.go:117] "RemoveContainer" containerID="0eaa53cd5d569b0e6bef9edefc7d1fa07489655924f7f785aa79773af28d727f" Oct 01 16:31:04 crc kubenswrapper[4869]: E1001 16:31:04.582621 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-c86m8_openshift-machine-config-operator(a4b64f7f-0b03-4f47-965b-9fde048b735c)\"" pod="openshift-machine-config-operator/machine-config-daemon-c86m8" podUID="a4b64f7f-0b03-4f47-965b-9fde048b735c" Oct 01 16:31:16 crc kubenswrapper[4869]: I1001 16:31:16.581470 4869 scope.go:117] "RemoveContainer" containerID="0eaa53cd5d569b0e6bef9edefc7d1fa07489655924f7f785aa79773af28d727f" Oct 01 16:31:16 crc kubenswrapper[4869]: E1001 16:31:16.582246 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-c86m8_openshift-machine-config-operator(a4b64f7f-0b03-4f47-965b-9fde048b735c)\"" pod="openshift-machine-config-operator/machine-config-daemon-c86m8" podUID="a4b64f7f-0b03-4f47-965b-9fde048b735c" Oct 01 16:31:28 crc kubenswrapper[4869]: I1001 16:31:28.582930 4869 scope.go:117] "RemoveContainer" containerID="0eaa53cd5d569b0e6bef9edefc7d1fa07489655924f7f785aa79773af28d727f" Oct 01 16:31:28 crc kubenswrapper[4869]: E1001 16:31:28.584442 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-c86m8_openshift-machine-config-operator(a4b64f7f-0b03-4f47-965b-9fde048b735c)\"" pod="openshift-machine-config-operator/machine-config-daemon-c86m8" podUID="a4b64f7f-0b03-4f47-965b-9fde048b735c" Oct 01 16:31:39 crc kubenswrapper[4869]: I1001 16:31:39.582068 4869 scope.go:117] "RemoveContainer" containerID="0eaa53cd5d569b0e6bef9edefc7d1fa07489655924f7f785aa79773af28d727f" Oct 01 16:31:39 crc kubenswrapper[4869]: E1001 16:31:39.583455 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-c86m8_openshift-machine-config-operator(a4b64f7f-0b03-4f47-965b-9fde048b735c)\"" pod="openshift-machine-config-operator/machine-config-daemon-c86m8" podUID="a4b64f7f-0b03-4f47-965b-9fde048b735c" Oct 01 16:31:51 crc kubenswrapper[4869]: I1001 16:31:51.598790 4869 scope.go:117] "RemoveContainer" containerID="0eaa53cd5d569b0e6bef9edefc7d1fa07489655924f7f785aa79773af28d727f" Oct 01 16:31:51 crc kubenswrapper[4869]: E1001 16:31:51.599994 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-c86m8_openshift-machine-config-operator(a4b64f7f-0b03-4f47-965b-9fde048b735c)\"" pod="openshift-machine-config-operator/machine-config-daemon-c86m8" podUID="a4b64f7f-0b03-4f47-965b-9fde048b735c" Oct 01 16:32:03 crc kubenswrapper[4869]: I1001 16:32:03.582508 4869 scope.go:117] "RemoveContainer" containerID="0eaa53cd5d569b0e6bef9edefc7d1fa07489655924f7f785aa79773af28d727f" Oct 01 16:32:03 crc kubenswrapper[4869]: E1001 16:32:03.583197 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-c86m8_openshift-machine-config-operator(a4b64f7f-0b03-4f47-965b-9fde048b735c)\"" pod="openshift-machine-config-operator/machine-config-daemon-c86m8" podUID="a4b64f7f-0b03-4f47-965b-9fde048b735c" Oct 01 16:32:18 crc kubenswrapper[4869]: I1001 16:32:18.581845 4869 scope.go:117] "RemoveContainer" containerID="0eaa53cd5d569b0e6bef9edefc7d1fa07489655924f7f785aa79773af28d727f" Oct 01 16:32:19 crc kubenswrapper[4869]: I1001 16:32:19.422377 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-c86m8" event={"ID":"a4b64f7f-0b03-4f47-965b-9fde048b735c","Type":"ContainerStarted","Data":"c121385614507939bd4b0233bfac1b4b5096c8ffc9330ccf589f609e0b1c878a"} Oct 01 16:33:09 crc kubenswrapper[4869]: I1001 16:33:09.783146 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-f4m9s"] Oct 01 16:33:09 crc kubenswrapper[4869]: E1001 16:33:09.784175 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f297cbfe-1e49-4afc-8fe5-9a951f59163c" containerName="extract-utilities" Oct 01 16:33:09 crc kubenswrapper[4869]: I1001 16:33:09.784191 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="f297cbfe-1e49-4afc-8fe5-9a951f59163c" containerName="extract-utilities" Oct 01 16:33:09 crc kubenswrapper[4869]: E1001 16:33:09.784214 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f297cbfe-1e49-4afc-8fe5-9a951f59163c" containerName="extract-content" Oct 01 16:33:09 crc kubenswrapper[4869]: I1001 16:33:09.784220 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="f297cbfe-1e49-4afc-8fe5-9a951f59163c" containerName="extract-content" Oct 01 16:33:09 crc kubenswrapper[4869]: E1001 16:33:09.784252 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f297cbfe-1e49-4afc-8fe5-9a951f59163c" containerName="registry-server" Oct 01 16:33:09 crc kubenswrapper[4869]: I1001 16:33:09.784279 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="f297cbfe-1e49-4afc-8fe5-9a951f59163c" containerName="registry-server" Oct 01 16:33:09 crc kubenswrapper[4869]: E1001 16:33:09.784298 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="495ca456-ea2e-47b7-9b98-307bc08df870" containerName="collect-profiles" Oct 01 16:33:09 crc kubenswrapper[4869]: I1001 16:33:09.784305 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="495ca456-ea2e-47b7-9b98-307bc08df870" containerName="collect-profiles" Oct 01 16:33:09 crc kubenswrapper[4869]: I1001 16:33:09.784529 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="f297cbfe-1e49-4afc-8fe5-9a951f59163c" containerName="registry-server" Oct 01 16:33:09 crc kubenswrapper[4869]: I1001 16:33:09.784552 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="495ca456-ea2e-47b7-9b98-307bc08df870" containerName="collect-profiles" Oct 01 16:33:09 crc kubenswrapper[4869]: I1001 16:33:09.786399 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-f4m9s" Oct 01 16:33:09 crc kubenswrapper[4869]: I1001 16:33:09.799080 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-f4m9s"] Oct 01 16:33:09 crc kubenswrapper[4869]: I1001 16:33:09.855568 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-b8pct\" (UniqueName: \"kubernetes.io/projected/894a5177-f062-4912-83bd-56783e2dcc11-kube-api-access-b8pct\") pod \"redhat-operators-f4m9s\" (UID: \"894a5177-f062-4912-83bd-56783e2dcc11\") " pod="openshift-marketplace/redhat-operators-f4m9s" Oct 01 16:33:09 crc kubenswrapper[4869]: I1001 16:33:09.855730 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/894a5177-f062-4912-83bd-56783e2dcc11-utilities\") pod \"redhat-operators-f4m9s\" (UID: \"894a5177-f062-4912-83bd-56783e2dcc11\") " pod="openshift-marketplace/redhat-operators-f4m9s" Oct 01 16:33:09 crc kubenswrapper[4869]: I1001 16:33:09.855793 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/894a5177-f062-4912-83bd-56783e2dcc11-catalog-content\") pod \"redhat-operators-f4m9s\" (UID: \"894a5177-f062-4912-83bd-56783e2dcc11\") " pod="openshift-marketplace/redhat-operators-f4m9s" Oct 01 16:33:09 crc kubenswrapper[4869]: I1001 16:33:09.957949 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/894a5177-f062-4912-83bd-56783e2dcc11-utilities\") pod \"redhat-operators-f4m9s\" (UID: \"894a5177-f062-4912-83bd-56783e2dcc11\") " pod="openshift-marketplace/redhat-operators-f4m9s" Oct 01 16:33:09 crc kubenswrapper[4869]: I1001 16:33:09.958053 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/894a5177-f062-4912-83bd-56783e2dcc11-catalog-content\") pod \"redhat-operators-f4m9s\" (UID: \"894a5177-f062-4912-83bd-56783e2dcc11\") " pod="openshift-marketplace/redhat-operators-f4m9s" Oct 01 16:33:09 crc kubenswrapper[4869]: I1001 16:33:09.958158 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-b8pct\" (UniqueName: \"kubernetes.io/projected/894a5177-f062-4912-83bd-56783e2dcc11-kube-api-access-b8pct\") pod \"redhat-operators-f4m9s\" (UID: \"894a5177-f062-4912-83bd-56783e2dcc11\") " pod="openshift-marketplace/redhat-operators-f4m9s" Oct 01 16:33:09 crc kubenswrapper[4869]: I1001 16:33:09.958534 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/894a5177-f062-4912-83bd-56783e2dcc11-utilities\") pod \"redhat-operators-f4m9s\" (UID: \"894a5177-f062-4912-83bd-56783e2dcc11\") " pod="openshift-marketplace/redhat-operators-f4m9s" Oct 01 16:33:09 crc kubenswrapper[4869]: I1001 16:33:09.958674 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/894a5177-f062-4912-83bd-56783e2dcc11-catalog-content\") pod \"redhat-operators-f4m9s\" (UID: \"894a5177-f062-4912-83bd-56783e2dcc11\") " pod="openshift-marketplace/redhat-operators-f4m9s" Oct 01 16:33:09 crc kubenswrapper[4869]: I1001 16:33:09.981372 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-b8pct\" (UniqueName: \"kubernetes.io/projected/894a5177-f062-4912-83bd-56783e2dcc11-kube-api-access-b8pct\") pod \"redhat-operators-f4m9s\" (UID: \"894a5177-f062-4912-83bd-56783e2dcc11\") " pod="openshift-marketplace/redhat-operators-f4m9s" Oct 01 16:33:10 crc kubenswrapper[4869]: I1001 16:33:10.113227 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-f4m9s" Oct 01 16:33:10 crc kubenswrapper[4869]: I1001 16:33:10.629962 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-f4m9s"] Oct 01 16:33:10 crc kubenswrapper[4869]: I1001 16:33:10.891508 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-f4m9s" event={"ID":"894a5177-f062-4912-83bd-56783e2dcc11","Type":"ContainerStarted","Data":"518b0b01a067c9db4367534c5cd58744f76c60b8afab615f46f4b5c374925664"} Oct 01 16:33:11 crc kubenswrapper[4869]: I1001 16:33:11.914733 4869 generic.go:334] "Generic (PLEG): container finished" podID="894a5177-f062-4912-83bd-56783e2dcc11" containerID="b149bfb730f4d70df386265133db696fe33fa349f10350252e7c4c06b055a689" exitCode=0 Oct 01 16:33:11 crc kubenswrapper[4869]: I1001 16:33:11.915038 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-f4m9s" event={"ID":"894a5177-f062-4912-83bd-56783e2dcc11","Type":"ContainerDied","Data":"b149bfb730f4d70df386265133db696fe33fa349f10350252e7c4c06b055a689"} Oct 01 16:33:11 crc kubenswrapper[4869]: I1001 16:33:11.925561 4869 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Oct 01 16:33:22 crc kubenswrapper[4869]: I1001 16:33:22.020732 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-f4m9s" event={"ID":"894a5177-f062-4912-83bd-56783e2dcc11","Type":"ContainerStarted","Data":"5c9cab60d7cd09e8b0c7354ce76b7fb118292f1b252aa5c6375a60dfcc5263b2"} Oct 01 16:33:23 crc kubenswrapper[4869]: I1001 16:33:23.031228 4869 generic.go:334] "Generic (PLEG): container finished" podID="894a5177-f062-4912-83bd-56783e2dcc11" containerID="5c9cab60d7cd09e8b0c7354ce76b7fb118292f1b252aa5c6375a60dfcc5263b2" exitCode=0 Oct 01 16:33:23 crc kubenswrapper[4869]: I1001 16:33:23.031293 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-f4m9s" event={"ID":"894a5177-f062-4912-83bd-56783e2dcc11","Type":"ContainerDied","Data":"5c9cab60d7cd09e8b0c7354ce76b7fb118292f1b252aa5c6375a60dfcc5263b2"} Oct 01 16:33:27 crc kubenswrapper[4869]: I1001 16:33:27.073693 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-f4m9s" event={"ID":"894a5177-f062-4912-83bd-56783e2dcc11","Type":"ContainerStarted","Data":"d3b119c4f1190532588772c23735ca061fd6af337a28d7650e54f66d441b3335"} Oct 01 16:33:27 crc kubenswrapper[4869]: I1001 16:33:27.093595 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-f4m9s" podStartSLOduration=3.683508288 podStartE2EDuration="18.093579932s" podCreationTimestamp="2025-10-01 16:33:09 +0000 UTC" firstStartedPulling="2025-10-01 16:33:11.922571201 +0000 UTC m=+5301.069414347" lastFinishedPulling="2025-10-01 16:33:26.332642855 +0000 UTC m=+5315.479485991" observedRunningTime="2025-10-01 16:33:27.088524544 +0000 UTC m=+5316.235367660" watchObservedRunningTime="2025-10-01 16:33:27.093579932 +0000 UTC m=+5316.240423048" Oct 01 16:33:30 crc kubenswrapper[4869]: I1001 16:33:30.114089 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-f4m9s" Oct 01 16:33:30 crc kubenswrapper[4869]: I1001 16:33:30.115697 4869 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-f4m9s" Oct 01 16:33:31 crc kubenswrapper[4869]: I1001 16:33:31.160767 4869 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-f4m9s" podUID="894a5177-f062-4912-83bd-56783e2dcc11" containerName="registry-server" probeResult="failure" output=< Oct 01 16:33:31 crc kubenswrapper[4869]: timeout: failed to connect service ":50051" within 1s Oct 01 16:33:31 crc kubenswrapper[4869]: > Oct 01 16:33:41 crc kubenswrapper[4869]: I1001 16:33:41.162558 4869 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-f4m9s" podUID="894a5177-f062-4912-83bd-56783e2dcc11" containerName="registry-server" probeResult="failure" output=< Oct 01 16:33:41 crc kubenswrapper[4869]: timeout: failed to connect service ":50051" within 1s Oct 01 16:33:41 crc kubenswrapper[4869]: > Oct 01 16:33:50 crc kubenswrapper[4869]: I1001 16:33:50.185835 4869 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-f4m9s" Oct 01 16:33:50 crc kubenswrapper[4869]: I1001 16:33:50.236512 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-f4m9s" Oct 01 16:33:50 crc kubenswrapper[4869]: I1001 16:33:50.298716 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-f4m9s"] Oct 01 16:33:50 crc kubenswrapper[4869]: I1001 16:33:50.359059 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-gxmfx"] Oct 01 16:33:50 crc kubenswrapper[4869]: I1001 16:33:50.359629 4869 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-gxmfx" podUID="0861897c-f37b-416a-bc83-7e72df955845" containerName="registry-server" containerID="cri-o://8b02d2e7efc22a7ca238dd01481c487226251046d26d0d2a6b86039b9d896945" gracePeriod=2 Oct 01 16:33:50 crc kubenswrapper[4869]: I1001 16:33:50.950810 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-gxmfx" Oct 01 16:33:51 crc kubenswrapper[4869]: I1001 16:33:51.035813 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0861897c-f37b-416a-bc83-7e72df955845-catalog-content\") pod \"0861897c-f37b-416a-bc83-7e72df955845\" (UID: \"0861897c-f37b-416a-bc83-7e72df955845\") " Oct 01 16:33:51 crc kubenswrapper[4869]: I1001 16:33:51.036185 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0861897c-f37b-416a-bc83-7e72df955845-utilities\") pod \"0861897c-f37b-416a-bc83-7e72df955845\" (UID: \"0861897c-f37b-416a-bc83-7e72df955845\") " Oct 01 16:33:51 crc kubenswrapper[4869]: I1001 16:33:51.036347 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-n7pbm\" (UniqueName: \"kubernetes.io/projected/0861897c-f37b-416a-bc83-7e72df955845-kube-api-access-n7pbm\") pod \"0861897c-f37b-416a-bc83-7e72df955845\" (UID: \"0861897c-f37b-416a-bc83-7e72df955845\") " Oct 01 16:33:51 crc kubenswrapper[4869]: I1001 16:33:51.037002 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0861897c-f37b-416a-bc83-7e72df955845-utilities" (OuterVolumeSpecName: "utilities") pod "0861897c-f37b-416a-bc83-7e72df955845" (UID: "0861897c-f37b-416a-bc83-7e72df955845"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 16:33:51 crc kubenswrapper[4869]: I1001 16:33:51.045225 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0861897c-f37b-416a-bc83-7e72df955845-kube-api-access-n7pbm" (OuterVolumeSpecName: "kube-api-access-n7pbm") pod "0861897c-f37b-416a-bc83-7e72df955845" (UID: "0861897c-f37b-416a-bc83-7e72df955845"). InnerVolumeSpecName "kube-api-access-n7pbm". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 16:33:51 crc kubenswrapper[4869]: I1001 16:33:51.118464 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0861897c-f37b-416a-bc83-7e72df955845-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "0861897c-f37b-416a-bc83-7e72df955845" (UID: "0861897c-f37b-416a-bc83-7e72df955845"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 16:33:51 crc kubenswrapper[4869]: I1001 16:33:51.140735 4869 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0861897c-f37b-416a-bc83-7e72df955845-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 01 16:33:51 crc kubenswrapper[4869]: I1001 16:33:51.140770 4869 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0861897c-f37b-416a-bc83-7e72df955845-utilities\") on node \"crc\" DevicePath \"\"" Oct 01 16:33:51 crc kubenswrapper[4869]: I1001 16:33:51.140780 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-n7pbm\" (UniqueName: \"kubernetes.io/projected/0861897c-f37b-416a-bc83-7e72df955845-kube-api-access-n7pbm\") on node \"crc\" DevicePath \"\"" Oct 01 16:33:51 crc kubenswrapper[4869]: I1001 16:33:51.309500 4869 generic.go:334] "Generic (PLEG): container finished" podID="0861897c-f37b-416a-bc83-7e72df955845" containerID="8b02d2e7efc22a7ca238dd01481c487226251046d26d0d2a6b86039b9d896945" exitCode=0 Oct 01 16:33:51 crc kubenswrapper[4869]: I1001 16:33:51.309561 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-gxmfx" Oct 01 16:33:51 crc kubenswrapper[4869]: I1001 16:33:51.309592 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-gxmfx" event={"ID":"0861897c-f37b-416a-bc83-7e72df955845","Type":"ContainerDied","Data":"8b02d2e7efc22a7ca238dd01481c487226251046d26d0d2a6b86039b9d896945"} Oct 01 16:33:51 crc kubenswrapper[4869]: I1001 16:33:51.309647 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-gxmfx" event={"ID":"0861897c-f37b-416a-bc83-7e72df955845","Type":"ContainerDied","Data":"cd2915ee2e41bba3d00154e21b865bb63bd1e1ff9309650435d93a0885d7b29d"} Oct 01 16:33:51 crc kubenswrapper[4869]: I1001 16:33:51.309669 4869 scope.go:117] "RemoveContainer" containerID="8b02d2e7efc22a7ca238dd01481c487226251046d26d0d2a6b86039b9d896945" Oct 01 16:33:51 crc kubenswrapper[4869]: I1001 16:33:51.347551 4869 scope.go:117] "RemoveContainer" containerID="9cc41a569e1847ccd85cd0808202a104049ea59b261f399314d0b40679fdfe2f" Oct 01 16:33:51 crc kubenswrapper[4869]: I1001 16:33:51.350315 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-gxmfx"] Oct 01 16:33:51 crc kubenswrapper[4869]: I1001 16:33:51.379205 4869 scope.go:117] "RemoveContainer" containerID="a6e80d5c9601c239e4f3f09eb993e3b5f21f021b97bd8ee74061753dc6651872" Oct 01 16:33:51 crc kubenswrapper[4869]: I1001 16:33:51.382487 4869 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-gxmfx"] Oct 01 16:33:51 crc kubenswrapper[4869]: I1001 16:33:51.421256 4869 scope.go:117] "RemoveContainer" containerID="8b02d2e7efc22a7ca238dd01481c487226251046d26d0d2a6b86039b9d896945" Oct 01 16:33:51 crc kubenswrapper[4869]: E1001 16:33:51.421837 4869 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8b02d2e7efc22a7ca238dd01481c487226251046d26d0d2a6b86039b9d896945\": container with ID starting with 8b02d2e7efc22a7ca238dd01481c487226251046d26d0d2a6b86039b9d896945 not found: ID does not exist" containerID="8b02d2e7efc22a7ca238dd01481c487226251046d26d0d2a6b86039b9d896945" Oct 01 16:33:51 crc kubenswrapper[4869]: I1001 16:33:51.421899 4869 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8b02d2e7efc22a7ca238dd01481c487226251046d26d0d2a6b86039b9d896945"} err="failed to get container status \"8b02d2e7efc22a7ca238dd01481c487226251046d26d0d2a6b86039b9d896945\": rpc error: code = NotFound desc = could not find container \"8b02d2e7efc22a7ca238dd01481c487226251046d26d0d2a6b86039b9d896945\": container with ID starting with 8b02d2e7efc22a7ca238dd01481c487226251046d26d0d2a6b86039b9d896945 not found: ID does not exist" Oct 01 16:33:51 crc kubenswrapper[4869]: I1001 16:33:51.421927 4869 scope.go:117] "RemoveContainer" containerID="9cc41a569e1847ccd85cd0808202a104049ea59b261f399314d0b40679fdfe2f" Oct 01 16:33:51 crc kubenswrapper[4869]: E1001 16:33:51.422861 4869 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9cc41a569e1847ccd85cd0808202a104049ea59b261f399314d0b40679fdfe2f\": container with ID starting with 9cc41a569e1847ccd85cd0808202a104049ea59b261f399314d0b40679fdfe2f not found: ID does not exist" containerID="9cc41a569e1847ccd85cd0808202a104049ea59b261f399314d0b40679fdfe2f" Oct 01 16:33:51 crc kubenswrapper[4869]: I1001 16:33:51.422918 4869 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9cc41a569e1847ccd85cd0808202a104049ea59b261f399314d0b40679fdfe2f"} err="failed to get container status \"9cc41a569e1847ccd85cd0808202a104049ea59b261f399314d0b40679fdfe2f\": rpc error: code = NotFound desc = could not find container \"9cc41a569e1847ccd85cd0808202a104049ea59b261f399314d0b40679fdfe2f\": container with ID starting with 9cc41a569e1847ccd85cd0808202a104049ea59b261f399314d0b40679fdfe2f not found: ID does not exist" Oct 01 16:33:51 crc kubenswrapper[4869]: I1001 16:33:51.422969 4869 scope.go:117] "RemoveContainer" containerID="a6e80d5c9601c239e4f3f09eb993e3b5f21f021b97bd8ee74061753dc6651872" Oct 01 16:33:51 crc kubenswrapper[4869]: E1001 16:33:51.423291 4869 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a6e80d5c9601c239e4f3f09eb993e3b5f21f021b97bd8ee74061753dc6651872\": container with ID starting with a6e80d5c9601c239e4f3f09eb993e3b5f21f021b97bd8ee74061753dc6651872 not found: ID does not exist" containerID="a6e80d5c9601c239e4f3f09eb993e3b5f21f021b97bd8ee74061753dc6651872" Oct 01 16:33:51 crc kubenswrapper[4869]: I1001 16:33:51.423338 4869 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a6e80d5c9601c239e4f3f09eb993e3b5f21f021b97bd8ee74061753dc6651872"} err="failed to get container status \"a6e80d5c9601c239e4f3f09eb993e3b5f21f021b97bd8ee74061753dc6651872\": rpc error: code = NotFound desc = could not find container \"a6e80d5c9601c239e4f3f09eb993e3b5f21f021b97bd8ee74061753dc6651872\": container with ID starting with a6e80d5c9601c239e4f3f09eb993e3b5f21f021b97bd8ee74061753dc6651872 not found: ID does not exist" Oct 01 16:33:51 crc kubenswrapper[4869]: I1001 16:33:51.596016 4869 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0861897c-f37b-416a-bc83-7e72df955845" path="/var/lib/kubelet/pods/0861897c-f37b-416a-bc83-7e72df955845/volumes" Oct 01 16:33:52 crc kubenswrapper[4869]: I1001 16:33:52.716472 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835c6l949"] Oct 01 16:33:52 crc kubenswrapper[4869]: E1001 16:33:52.718022 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0861897c-f37b-416a-bc83-7e72df955845" containerName="extract-utilities" Oct 01 16:33:52 crc kubenswrapper[4869]: I1001 16:33:52.718301 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="0861897c-f37b-416a-bc83-7e72df955845" containerName="extract-utilities" Oct 01 16:33:52 crc kubenswrapper[4869]: E1001 16:33:52.718402 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0861897c-f37b-416a-bc83-7e72df955845" containerName="registry-server" Oct 01 16:33:52 crc kubenswrapper[4869]: I1001 16:33:52.718458 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="0861897c-f37b-416a-bc83-7e72df955845" containerName="registry-server" Oct 01 16:33:52 crc kubenswrapper[4869]: E1001 16:33:52.718538 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0861897c-f37b-416a-bc83-7e72df955845" containerName="extract-content" Oct 01 16:33:52 crc kubenswrapper[4869]: I1001 16:33:52.718603 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="0861897c-f37b-416a-bc83-7e72df955845" containerName="extract-content" Oct 01 16:33:52 crc kubenswrapper[4869]: I1001 16:33:52.718863 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="0861897c-f37b-416a-bc83-7e72df955845" containerName="registry-server" Oct 01 16:33:52 crc kubenswrapper[4869]: I1001 16:33:52.720307 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835c6l949" Oct 01 16:33:52 crc kubenswrapper[4869]: I1001 16:33:52.726512 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835c6l949"] Oct 01 16:33:52 crc kubenswrapper[4869]: I1001 16:33:52.727164 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"default-dockercfg-vmwhc" Oct 01 16:33:52 crc kubenswrapper[4869]: I1001 16:33:52.770076 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jmfl5\" (UniqueName: \"kubernetes.io/projected/2d7aa34b-5f2f-4a92-95d8-4a4778a4c3be-kube-api-access-jmfl5\") pod \"fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835c6l949\" (UID: \"2d7aa34b-5f2f-4a92-95d8-4a4778a4c3be\") " pod="openshift-marketplace/fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835c6l949" Oct 01 16:33:52 crc kubenswrapper[4869]: I1001 16:33:52.770168 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/2d7aa34b-5f2f-4a92-95d8-4a4778a4c3be-bundle\") pod \"fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835c6l949\" (UID: \"2d7aa34b-5f2f-4a92-95d8-4a4778a4c3be\") " pod="openshift-marketplace/fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835c6l949" Oct 01 16:33:52 crc kubenswrapper[4869]: I1001 16:33:52.770189 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/2d7aa34b-5f2f-4a92-95d8-4a4778a4c3be-util\") pod \"fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835c6l949\" (UID: \"2d7aa34b-5f2f-4a92-95d8-4a4778a4c3be\") " pod="openshift-marketplace/fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835c6l949" Oct 01 16:33:52 crc kubenswrapper[4869]: I1001 16:33:52.871938 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jmfl5\" (UniqueName: \"kubernetes.io/projected/2d7aa34b-5f2f-4a92-95d8-4a4778a4c3be-kube-api-access-jmfl5\") pod \"fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835c6l949\" (UID: \"2d7aa34b-5f2f-4a92-95d8-4a4778a4c3be\") " pod="openshift-marketplace/fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835c6l949" Oct 01 16:33:52 crc kubenswrapper[4869]: I1001 16:33:52.872044 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/2d7aa34b-5f2f-4a92-95d8-4a4778a4c3be-bundle\") pod \"fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835c6l949\" (UID: \"2d7aa34b-5f2f-4a92-95d8-4a4778a4c3be\") " pod="openshift-marketplace/fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835c6l949" Oct 01 16:33:52 crc kubenswrapper[4869]: I1001 16:33:52.872063 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/2d7aa34b-5f2f-4a92-95d8-4a4778a4c3be-util\") pod \"fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835c6l949\" (UID: \"2d7aa34b-5f2f-4a92-95d8-4a4778a4c3be\") " pod="openshift-marketplace/fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835c6l949" Oct 01 16:33:52 crc kubenswrapper[4869]: I1001 16:33:52.872563 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/2d7aa34b-5f2f-4a92-95d8-4a4778a4c3be-util\") pod \"fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835c6l949\" (UID: \"2d7aa34b-5f2f-4a92-95d8-4a4778a4c3be\") " pod="openshift-marketplace/fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835c6l949" Oct 01 16:33:52 crc kubenswrapper[4869]: I1001 16:33:52.872798 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/2d7aa34b-5f2f-4a92-95d8-4a4778a4c3be-bundle\") pod \"fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835c6l949\" (UID: \"2d7aa34b-5f2f-4a92-95d8-4a4778a4c3be\") " pod="openshift-marketplace/fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835c6l949" Oct 01 16:33:52 crc kubenswrapper[4869]: I1001 16:33:52.894530 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jmfl5\" (UniqueName: \"kubernetes.io/projected/2d7aa34b-5f2f-4a92-95d8-4a4778a4c3be-kube-api-access-jmfl5\") pod \"fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835c6l949\" (UID: \"2d7aa34b-5f2f-4a92-95d8-4a4778a4c3be\") " pod="openshift-marketplace/fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835c6l949" Oct 01 16:33:52 crc kubenswrapper[4869]: I1001 16:33:52.910614 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2tlppj"] Oct 01 16:33:52 crc kubenswrapper[4869]: I1001 16:33:52.912423 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2tlppj" Oct 01 16:33:52 crc kubenswrapper[4869]: I1001 16:33:52.921641 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2tlppj"] Oct 01 16:33:52 crc kubenswrapper[4869]: I1001 16:33:52.973698 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/0025b8f5-fdce-434f-a9c6-393fd4c93273-bundle\") pod \"8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2tlppj\" (UID: \"0025b8f5-fdce-434f-a9c6-393fd4c93273\") " pod="openshift-marketplace/8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2tlppj" Oct 01 16:33:52 crc kubenswrapper[4869]: I1001 16:33:52.973948 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/0025b8f5-fdce-434f-a9c6-393fd4c93273-util\") pod \"8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2tlppj\" (UID: \"0025b8f5-fdce-434f-a9c6-393fd4c93273\") " pod="openshift-marketplace/8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2tlppj" Oct 01 16:33:52 crc kubenswrapper[4869]: I1001 16:33:52.974185 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nvt7g\" (UniqueName: \"kubernetes.io/projected/0025b8f5-fdce-434f-a9c6-393fd4c93273-kube-api-access-nvt7g\") pod \"8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2tlppj\" (UID: \"0025b8f5-fdce-434f-a9c6-393fd4c93273\") " pod="openshift-marketplace/8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2tlppj" Oct 01 16:33:53 crc kubenswrapper[4869]: I1001 16:33:53.040865 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835c6l949" Oct 01 16:33:53 crc kubenswrapper[4869]: I1001 16:33:53.076599 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/0025b8f5-fdce-434f-a9c6-393fd4c93273-bundle\") pod \"8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2tlppj\" (UID: \"0025b8f5-fdce-434f-a9c6-393fd4c93273\") " pod="openshift-marketplace/8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2tlppj" Oct 01 16:33:53 crc kubenswrapper[4869]: I1001 16:33:53.077079 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/0025b8f5-fdce-434f-a9c6-393fd4c93273-util\") pod \"8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2tlppj\" (UID: \"0025b8f5-fdce-434f-a9c6-393fd4c93273\") " pod="openshift-marketplace/8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2tlppj" Oct 01 16:33:53 crc kubenswrapper[4869]: I1001 16:33:53.077356 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nvt7g\" (UniqueName: \"kubernetes.io/projected/0025b8f5-fdce-434f-a9c6-393fd4c93273-kube-api-access-nvt7g\") pod \"8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2tlppj\" (UID: \"0025b8f5-fdce-434f-a9c6-393fd4c93273\") " pod="openshift-marketplace/8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2tlppj" Oct 01 16:33:53 crc kubenswrapper[4869]: I1001 16:33:53.077379 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/0025b8f5-fdce-434f-a9c6-393fd4c93273-bundle\") pod \"8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2tlppj\" (UID: \"0025b8f5-fdce-434f-a9c6-393fd4c93273\") " pod="openshift-marketplace/8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2tlppj" Oct 01 16:33:53 crc kubenswrapper[4869]: I1001 16:33:53.077633 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/0025b8f5-fdce-434f-a9c6-393fd4c93273-util\") pod \"8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2tlppj\" (UID: \"0025b8f5-fdce-434f-a9c6-393fd4c93273\") " pod="openshift-marketplace/8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2tlppj" Oct 01 16:33:53 crc kubenswrapper[4869]: I1001 16:33:53.096942 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nvt7g\" (UniqueName: \"kubernetes.io/projected/0025b8f5-fdce-434f-a9c6-393fd4c93273-kube-api-access-nvt7g\") pod \"8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2tlppj\" (UID: \"0025b8f5-fdce-434f-a9c6-393fd4c93273\") " pod="openshift-marketplace/8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2tlppj" Oct 01 16:33:53 crc kubenswrapper[4869]: I1001 16:33:53.275032 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2tlppj" Oct 01 16:33:53 crc kubenswrapper[4869]: I1001 16:33:53.550334 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835c6l949"] Oct 01 16:33:53 crc kubenswrapper[4869]: W1001 16:33:53.807895 4869 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod0025b8f5_fdce_434f_a9c6_393fd4c93273.slice/crio-f8776f403713331d9ca2d88d7eb8a717ddddae7054df7f2c71f65b4d97b9dad7 WatchSource:0}: Error finding container f8776f403713331d9ca2d88d7eb8a717ddddae7054df7f2c71f65b4d97b9dad7: Status 404 returned error can't find the container with id f8776f403713331d9ca2d88d7eb8a717ddddae7054df7f2c71f65b4d97b9dad7 Oct 01 16:33:53 crc kubenswrapper[4869]: I1001 16:33:53.808955 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2tlppj"] Oct 01 16:33:54 crc kubenswrapper[4869]: I1001 16:33:54.389170 4869 generic.go:334] "Generic (PLEG): container finished" podID="0025b8f5-fdce-434f-a9c6-393fd4c93273" containerID="ed4f282a9c860f77986bf64c61b77c0de0d9923d0233956d76c298a7e133607f" exitCode=0 Oct 01 16:33:54 crc kubenswrapper[4869]: I1001 16:33:54.389353 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2tlppj" event={"ID":"0025b8f5-fdce-434f-a9c6-393fd4c93273","Type":"ContainerDied","Data":"ed4f282a9c860f77986bf64c61b77c0de0d9923d0233956d76c298a7e133607f"} Oct 01 16:33:54 crc kubenswrapper[4869]: I1001 16:33:54.389447 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2tlppj" event={"ID":"0025b8f5-fdce-434f-a9c6-393fd4c93273","Type":"ContainerStarted","Data":"f8776f403713331d9ca2d88d7eb8a717ddddae7054df7f2c71f65b4d97b9dad7"} Oct 01 16:33:54 crc kubenswrapper[4869]: I1001 16:33:54.392558 4869 generic.go:334] "Generic (PLEG): container finished" podID="2d7aa34b-5f2f-4a92-95d8-4a4778a4c3be" containerID="40c0a0f29978d4bf42ce73991034901ef8e8ec860776bfbc0eb6c468de7e543e" exitCode=0 Oct 01 16:33:54 crc kubenswrapper[4869]: I1001 16:33:54.392598 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835c6l949" event={"ID":"2d7aa34b-5f2f-4a92-95d8-4a4778a4c3be","Type":"ContainerDied","Data":"40c0a0f29978d4bf42ce73991034901ef8e8ec860776bfbc0eb6c468de7e543e"} Oct 01 16:33:54 crc kubenswrapper[4869]: I1001 16:33:54.392625 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835c6l949" event={"ID":"2d7aa34b-5f2f-4a92-95d8-4a4778a4c3be","Type":"ContainerStarted","Data":"54967728b9c09fef33403868e0d7d5cda910fc707521971ea02fb8996eb5c344"} Oct 01 16:33:56 crc kubenswrapper[4869]: I1001 16:33:56.410597 4869 generic.go:334] "Generic (PLEG): container finished" podID="0025b8f5-fdce-434f-a9c6-393fd4c93273" containerID="d5a9552c00f2682530122786c5a31a5cccc582cca20af1deabdbf7d2180457ba" exitCode=0 Oct 01 16:33:56 crc kubenswrapper[4869]: I1001 16:33:56.410667 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2tlppj" event={"ID":"0025b8f5-fdce-434f-a9c6-393fd4c93273","Type":"ContainerDied","Data":"d5a9552c00f2682530122786c5a31a5cccc582cca20af1deabdbf7d2180457ba"} Oct 01 16:33:56 crc kubenswrapper[4869]: I1001 16:33:56.413067 4869 generic.go:334] "Generic (PLEG): container finished" podID="2d7aa34b-5f2f-4a92-95d8-4a4778a4c3be" containerID="1d2fda5516d94f5c864e372834c45c68275bb01419b6a3204ca58d5ea46a9f2e" exitCode=0 Oct 01 16:33:56 crc kubenswrapper[4869]: I1001 16:33:56.413097 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835c6l949" event={"ID":"2d7aa34b-5f2f-4a92-95d8-4a4778a4c3be","Type":"ContainerDied","Data":"1d2fda5516d94f5c864e372834c45c68275bb01419b6a3204ca58d5ea46a9f2e"} Oct 01 16:33:57 crc kubenswrapper[4869]: I1001 16:33:57.425570 4869 generic.go:334] "Generic (PLEG): container finished" podID="0025b8f5-fdce-434f-a9c6-393fd4c93273" containerID="276c7942b321a93c32e381b928caa6dcaba5d859d50dc535f2760aeb9130e7b2" exitCode=0 Oct 01 16:33:57 crc kubenswrapper[4869]: I1001 16:33:57.425812 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2tlppj" event={"ID":"0025b8f5-fdce-434f-a9c6-393fd4c93273","Type":"ContainerDied","Data":"276c7942b321a93c32e381b928caa6dcaba5d859d50dc535f2760aeb9130e7b2"} Oct 01 16:33:57 crc kubenswrapper[4869]: I1001 16:33:57.428243 4869 generic.go:334] "Generic (PLEG): container finished" podID="2d7aa34b-5f2f-4a92-95d8-4a4778a4c3be" containerID="b292b5a06607a45ac55dcff54574e5671dde6774eb7b795e9ea60f4e87ec2348" exitCode=0 Oct 01 16:33:57 crc kubenswrapper[4869]: I1001 16:33:57.428307 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835c6l949" event={"ID":"2d7aa34b-5f2f-4a92-95d8-4a4778a4c3be","Type":"ContainerDied","Data":"b292b5a06607a45ac55dcff54574e5671dde6774eb7b795e9ea60f4e87ec2348"} Oct 01 16:33:58 crc kubenswrapper[4869]: I1001 16:33:58.987949 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835c6l949" Oct 01 16:33:59 crc kubenswrapper[4869]: I1001 16:33:59.002959 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2tlppj" Oct 01 16:33:59 crc kubenswrapper[4869]: I1001 16:33:59.015599 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/2d7aa34b-5f2f-4a92-95d8-4a4778a4c3be-util\") pod \"2d7aa34b-5f2f-4a92-95d8-4a4778a4c3be\" (UID: \"2d7aa34b-5f2f-4a92-95d8-4a4778a4c3be\") " Oct 01 16:33:59 crc kubenswrapper[4869]: I1001 16:33:59.015695 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jmfl5\" (UniqueName: \"kubernetes.io/projected/2d7aa34b-5f2f-4a92-95d8-4a4778a4c3be-kube-api-access-jmfl5\") pod \"2d7aa34b-5f2f-4a92-95d8-4a4778a4c3be\" (UID: \"2d7aa34b-5f2f-4a92-95d8-4a4778a4c3be\") " Oct 01 16:33:59 crc kubenswrapper[4869]: I1001 16:33:59.015878 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/2d7aa34b-5f2f-4a92-95d8-4a4778a4c3be-bundle\") pod \"2d7aa34b-5f2f-4a92-95d8-4a4778a4c3be\" (UID: \"2d7aa34b-5f2f-4a92-95d8-4a4778a4c3be\") " Oct 01 16:33:59 crc kubenswrapper[4869]: I1001 16:33:59.017348 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2d7aa34b-5f2f-4a92-95d8-4a4778a4c3be-bundle" (OuterVolumeSpecName: "bundle") pod "2d7aa34b-5f2f-4a92-95d8-4a4778a4c3be" (UID: "2d7aa34b-5f2f-4a92-95d8-4a4778a4c3be"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 16:33:59 crc kubenswrapper[4869]: I1001 16:33:59.029110 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2d7aa34b-5f2f-4a92-95d8-4a4778a4c3be-kube-api-access-jmfl5" (OuterVolumeSpecName: "kube-api-access-jmfl5") pod "2d7aa34b-5f2f-4a92-95d8-4a4778a4c3be" (UID: "2d7aa34b-5f2f-4a92-95d8-4a4778a4c3be"). InnerVolumeSpecName "kube-api-access-jmfl5". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 16:33:59 crc kubenswrapper[4869]: I1001 16:33:59.117990 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/0025b8f5-fdce-434f-a9c6-393fd4c93273-bundle\") pod \"0025b8f5-fdce-434f-a9c6-393fd4c93273\" (UID: \"0025b8f5-fdce-434f-a9c6-393fd4c93273\") " Oct 01 16:33:59 crc kubenswrapper[4869]: I1001 16:33:59.118143 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/0025b8f5-fdce-434f-a9c6-393fd4c93273-util\") pod \"0025b8f5-fdce-434f-a9c6-393fd4c93273\" (UID: \"0025b8f5-fdce-434f-a9c6-393fd4c93273\") " Oct 01 16:33:59 crc kubenswrapper[4869]: I1001 16:33:59.118398 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nvt7g\" (UniqueName: \"kubernetes.io/projected/0025b8f5-fdce-434f-a9c6-393fd4c93273-kube-api-access-nvt7g\") pod \"0025b8f5-fdce-434f-a9c6-393fd4c93273\" (UID: \"0025b8f5-fdce-434f-a9c6-393fd4c93273\") " Oct 01 16:33:59 crc kubenswrapper[4869]: I1001 16:33:59.118946 4869 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/2d7aa34b-5f2f-4a92-95d8-4a4778a4c3be-bundle\") on node \"crc\" DevicePath \"\"" Oct 01 16:33:59 crc kubenswrapper[4869]: I1001 16:33:59.118970 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jmfl5\" (UniqueName: \"kubernetes.io/projected/2d7aa34b-5f2f-4a92-95d8-4a4778a4c3be-kube-api-access-jmfl5\") on node \"crc\" DevicePath \"\"" Oct 01 16:33:59 crc kubenswrapper[4869]: I1001 16:33:59.119420 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0025b8f5-fdce-434f-a9c6-393fd4c93273-bundle" (OuterVolumeSpecName: "bundle") pod "0025b8f5-fdce-434f-a9c6-393fd4c93273" (UID: "0025b8f5-fdce-434f-a9c6-393fd4c93273"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 16:33:59 crc kubenswrapper[4869]: I1001 16:33:59.126572 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0025b8f5-fdce-434f-a9c6-393fd4c93273-kube-api-access-nvt7g" (OuterVolumeSpecName: "kube-api-access-nvt7g") pod "0025b8f5-fdce-434f-a9c6-393fd4c93273" (UID: "0025b8f5-fdce-434f-a9c6-393fd4c93273"). InnerVolumeSpecName "kube-api-access-nvt7g". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 16:33:59 crc kubenswrapper[4869]: I1001 16:33:59.144247 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0025b8f5-fdce-434f-a9c6-393fd4c93273-util" (OuterVolumeSpecName: "util") pod "0025b8f5-fdce-434f-a9c6-393fd4c93273" (UID: "0025b8f5-fdce-434f-a9c6-393fd4c93273"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 16:33:59 crc kubenswrapper[4869]: I1001 16:33:59.184368 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2d7aa34b-5f2f-4a92-95d8-4a4778a4c3be-util" (OuterVolumeSpecName: "util") pod "2d7aa34b-5f2f-4a92-95d8-4a4778a4c3be" (UID: "2d7aa34b-5f2f-4a92-95d8-4a4778a4c3be"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 16:33:59 crc kubenswrapper[4869]: I1001 16:33:59.220656 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nvt7g\" (UniqueName: \"kubernetes.io/projected/0025b8f5-fdce-434f-a9c6-393fd4c93273-kube-api-access-nvt7g\") on node \"crc\" DevicePath \"\"" Oct 01 16:33:59 crc kubenswrapper[4869]: I1001 16:33:59.220697 4869 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/0025b8f5-fdce-434f-a9c6-393fd4c93273-bundle\") on node \"crc\" DevicePath \"\"" Oct 01 16:33:59 crc kubenswrapper[4869]: I1001 16:33:59.220715 4869 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/0025b8f5-fdce-434f-a9c6-393fd4c93273-util\") on node \"crc\" DevicePath \"\"" Oct 01 16:33:59 crc kubenswrapper[4869]: I1001 16:33:59.220727 4869 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/2d7aa34b-5f2f-4a92-95d8-4a4778a4c3be-util\") on node \"crc\" DevicePath \"\"" Oct 01 16:33:59 crc kubenswrapper[4869]: I1001 16:33:59.451340 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2tlppj" event={"ID":"0025b8f5-fdce-434f-a9c6-393fd4c93273","Type":"ContainerDied","Data":"f8776f403713331d9ca2d88d7eb8a717ddddae7054df7f2c71f65b4d97b9dad7"} Oct 01 16:33:59 crc kubenswrapper[4869]: I1001 16:33:59.451420 4869 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="f8776f403713331d9ca2d88d7eb8a717ddddae7054df7f2c71f65b4d97b9dad7" Oct 01 16:33:59 crc kubenswrapper[4869]: I1001 16:33:59.451366 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2tlppj" Oct 01 16:33:59 crc kubenswrapper[4869]: I1001 16:33:59.454338 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835c6l949" event={"ID":"2d7aa34b-5f2f-4a92-95d8-4a4778a4c3be","Type":"ContainerDied","Data":"54967728b9c09fef33403868e0d7d5cda910fc707521971ea02fb8996eb5c344"} Oct 01 16:33:59 crc kubenswrapper[4869]: I1001 16:33:59.454401 4869 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="54967728b9c09fef33403868e0d7d5cda910fc707521971ea02fb8996eb5c344" Oct 01 16:33:59 crc kubenswrapper[4869]: I1001 16:33:59.454440 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835c6l949" Oct 01 16:34:04 crc kubenswrapper[4869]: I1001 16:34:04.258522 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-operator-858ddd8f98-m2cf8"] Oct 01 16:34:04 crc kubenswrapper[4869]: E1001 16:34:04.259552 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2d7aa34b-5f2f-4a92-95d8-4a4778a4c3be" containerName="extract" Oct 01 16:34:04 crc kubenswrapper[4869]: I1001 16:34:04.259656 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="2d7aa34b-5f2f-4a92-95d8-4a4778a4c3be" containerName="extract" Oct 01 16:34:04 crc kubenswrapper[4869]: E1001 16:34:04.259672 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0025b8f5-fdce-434f-a9c6-393fd4c93273" containerName="extract" Oct 01 16:34:04 crc kubenswrapper[4869]: I1001 16:34:04.259681 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="0025b8f5-fdce-434f-a9c6-393fd4c93273" containerName="extract" Oct 01 16:34:04 crc kubenswrapper[4869]: E1001 16:34:04.259697 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2d7aa34b-5f2f-4a92-95d8-4a4778a4c3be" containerName="util" Oct 01 16:34:04 crc kubenswrapper[4869]: I1001 16:34:04.259705 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="2d7aa34b-5f2f-4a92-95d8-4a4778a4c3be" containerName="util" Oct 01 16:34:04 crc kubenswrapper[4869]: E1001 16:34:04.259718 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0025b8f5-fdce-434f-a9c6-393fd4c93273" containerName="pull" Oct 01 16:34:04 crc kubenswrapper[4869]: I1001 16:34:04.259726 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="0025b8f5-fdce-434f-a9c6-393fd4c93273" containerName="pull" Oct 01 16:34:04 crc kubenswrapper[4869]: E1001 16:34:04.259762 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2d7aa34b-5f2f-4a92-95d8-4a4778a4c3be" containerName="pull" Oct 01 16:34:04 crc kubenswrapper[4869]: I1001 16:34:04.259770 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="2d7aa34b-5f2f-4a92-95d8-4a4778a4c3be" containerName="pull" Oct 01 16:34:04 crc kubenswrapper[4869]: E1001 16:34:04.259785 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0025b8f5-fdce-434f-a9c6-393fd4c93273" containerName="util" Oct 01 16:34:04 crc kubenswrapper[4869]: I1001 16:34:04.259792 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="0025b8f5-fdce-434f-a9c6-393fd4c93273" containerName="util" Oct 01 16:34:04 crc kubenswrapper[4869]: I1001 16:34:04.260022 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="0025b8f5-fdce-434f-a9c6-393fd4c93273" containerName="extract" Oct 01 16:34:04 crc kubenswrapper[4869]: I1001 16:34:04.260041 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="2d7aa34b-5f2f-4a92-95d8-4a4778a4c3be" containerName="extract" Oct 01 16:34:04 crc kubenswrapper[4869]: I1001 16:34:04.261082 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-operator-858ddd8f98-m2cf8" Oct 01 16:34:04 crc kubenswrapper[4869]: I1001 16:34:04.273314 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-operator-858ddd8f98-m2cf8"] Oct 01 16:34:04 crc kubenswrapper[4869]: I1001 16:34:04.331083 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-g66p8\" (UniqueName: \"kubernetes.io/projected/f9bf39e5-d129-4dc7-881e-3a312469e6f9-kube-api-access-g66p8\") pod \"nmstate-operator-858ddd8f98-m2cf8\" (UID: \"f9bf39e5-d129-4dc7-881e-3a312469e6f9\") " pod="openshift-nmstate/nmstate-operator-858ddd8f98-m2cf8" Oct 01 16:34:04 crc kubenswrapper[4869]: I1001 16:34:04.432488 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-g66p8\" (UniqueName: \"kubernetes.io/projected/f9bf39e5-d129-4dc7-881e-3a312469e6f9-kube-api-access-g66p8\") pod \"nmstate-operator-858ddd8f98-m2cf8\" (UID: \"f9bf39e5-d129-4dc7-881e-3a312469e6f9\") " pod="openshift-nmstate/nmstate-operator-858ddd8f98-m2cf8" Oct 01 16:34:04 crc kubenswrapper[4869]: I1001 16:34:04.461441 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-g66p8\" (UniqueName: \"kubernetes.io/projected/f9bf39e5-d129-4dc7-881e-3a312469e6f9-kube-api-access-g66p8\") pod \"nmstate-operator-858ddd8f98-m2cf8\" (UID: \"f9bf39e5-d129-4dc7-881e-3a312469e6f9\") " pod="openshift-nmstate/nmstate-operator-858ddd8f98-m2cf8" Oct 01 16:34:04 crc kubenswrapper[4869]: I1001 16:34:04.583430 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-operator-858ddd8f98-m2cf8" Oct 01 16:34:05 crc kubenswrapper[4869]: I1001 16:34:05.099581 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-operator-858ddd8f98-m2cf8"] Oct 01 16:34:05 crc kubenswrapper[4869]: I1001 16:34:05.528910 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-operator-858ddd8f98-m2cf8" event={"ID":"f9bf39e5-d129-4dc7-881e-3a312469e6f9","Type":"ContainerStarted","Data":"3a494f5adc0f787229bf812f9cd7b436ecf2dc099a1f62aeb4f9044fc6663192"} Oct 01 16:34:08 crc kubenswrapper[4869]: I1001 16:34:08.497118 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-nmstate/nmstate-metrics-58fcddf996-bc5gk"] Oct 01 16:34:08 crc kubenswrapper[4869]: I1001 16:34:08.497957 4869 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-nmstate/nmstate-metrics-58fcddf996-bc5gk" podUID="27f0e6e2-94a5-4313-9c9e-9eacbc971748" containerName="nmstate-metrics" containerID="cri-o://c1f9913f8a4df05b50a07411e0e87e257fa37dc49c286ff0c860862ee21fc194" gracePeriod=30 Oct 01 16:34:08 crc kubenswrapper[4869]: I1001 16:34:08.498014 4869 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-nmstate/nmstate-metrics-58fcddf996-bc5gk" podUID="27f0e6e2-94a5-4313-9c9e-9eacbc971748" containerName="kube-rbac-proxy" containerID="cri-o://477d6470d69b9431c4f75da395985b54d2bbb544f32068e427a48f701dc3c101" gracePeriod=30 Oct 01 16:34:08 crc kubenswrapper[4869]: I1001 16:34:08.520292 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-nmstate/nmstate-webhook-6d689559c5-dssrt"] Oct 01 16:34:08 crc kubenswrapper[4869]: I1001 16:34:08.520547 4869 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-nmstate/nmstate-webhook-6d689559c5-dssrt" podUID="aa4da6ef-158a-44e5-8d1a-779aa19fe3ac" containerName="nmstate-webhook" containerID="cri-o://b2825444eff4b4c6d99387f6454d228630e32645a13994d2d0cec3b378353f1c" gracePeriod=30 Oct 01 16:34:08 crc kubenswrapper[4869]: I1001 16:34:08.530106 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-nmstate/nmstate-handler-xs8c4"] Oct 01 16:34:08 crc kubenswrapper[4869]: I1001 16:34:08.530339 4869 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-nmstate/nmstate-handler-xs8c4" podUID="441818c5-6e78-4a8f-9ed9-58e7dd4b2028" containerName="nmstate-handler" containerID="cri-o://986db3c34f99f3e6dac6202d395cccb5945f8bda2565f33f95a8aec20a75bca0" gracePeriod=30 Oct 01 16:34:08 crc kubenswrapper[4869]: I1001 16:34:08.554155 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-operator-858ddd8f98-m2cf8" event={"ID":"f9bf39e5-d129-4dc7-881e-3a312469e6f9","Type":"ContainerStarted","Data":"03e575aec326e5964b579fa9a312d1d66773821561da0511b43e99e45ab938ae"} Oct 01 16:34:08 crc kubenswrapper[4869]: I1001 16:34:08.577307 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-operator-858ddd8f98-m2cf8" podStartSLOduration=1.941144549 podStartE2EDuration="4.577285105s" podCreationTimestamp="2025-10-01 16:34:04 +0000 UTC" firstStartedPulling="2025-10-01 16:34:05.122564133 +0000 UTC m=+5354.269407249" lastFinishedPulling="2025-10-01 16:34:07.758704689 +0000 UTC m=+5356.905547805" observedRunningTime="2025-10-01 16:34:08.57391666 +0000 UTC m=+5357.720759776" watchObservedRunningTime="2025-10-01 16:34:08.577285105 +0000 UTC m=+5357.724128231" Oct 01 16:34:08 crc kubenswrapper[4869]: I1001 16:34:08.653533 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-nmstate/nmstate-operator-5d6f6cfd66-5whxj"] Oct 01 16:34:08 crc kubenswrapper[4869]: I1001 16:34:08.653759 4869 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-nmstate/nmstate-operator-5d6f6cfd66-5whxj" podUID="bd4b603c-ef21-4afa-adcf-0e075976eeef" containerName="nmstate-operator" containerID="cri-o://ae61def0d9c94972a6fd926fffa33a9a7ec9dc0aad61f31c8348d2b23c698eb6" gracePeriod=30 Oct 01 16:34:08 crc kubenswrapper[4869]: I1001 16:34:08.932141 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-console-plugin-6b874cbd85-2mvc5"] Oct 01 16:34:08 crc kubenswrapper[4869]: I1001 16:34:08.934989 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-console-plugin-6b874cbd85-2mvc5" Oct 01 16:34:08 crc kubenswrapper[4869]: I1001 16:34:08.948132 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-console-plugin-6b874cbd85-2mvc5"] Oct 01 16:34:08 crc kubenswrapper[4869]: I1001 16:34:08.997632 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-handler-xs8c4" Oct 01 16:34:09 crc kubenswrapper[4869]: I1001 16:34:09.024548 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4kgd6\" (UniqueName: \"kubernetes.io/projected/441818c5-6e78-4a8f-9ed9-58e7dd4b2028-kube-api-access-4kgd6\") pod \"441818c5-6e78-4a8f-9ed9-58e7dd4b2028\" (UID: \"441818c5-6e78-4a8f-9ed9-58e7dd4b2028\") " Oct 01 16:34:09 crc kubenswrapper[4869]: I1001 16:34:09.024860 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nmstate-lock\" (UniqueName: \"kubernetes.io/host-path/441818c5-6e78-4a8f-9ed9-58e7dd4b2028-nmstate-lock\") pod \"441818c5-6e78-4a8f-9ed9-58e7dd4b2028\" (UID: \"441818c5-6e78-4a8f-9ed9-58e7dd4b2028\") " Oct 01 16:34:09 crc kubenswrapper[4869]: I1001 16:34:09.025021 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dbus-socket\" (UniqueName: \"kubernetes.io/host-path/441818c5-6e78-4a8f-9ed9-58e7dd4b2028-dbus-socket\") pod \"441818c5-6e78-4a8f-9ed9-58e7dd4b2028\" (UID: \"441818c5-6e78-4a8f-9ed9-58e7dd4b2028\") " Oct 01 16:34:09 crc kubenswrapper[4869]: I1001 16:34:09.025199 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovs-socket\" (UniqueName: \"kubernetes.io/host-path/441818c5-6e78-4a8f-9ed9-58e7dd4b2028-ovs-socket\") pod \"441818c5-6e78-4a8f-9ed9-58e7dd4b2028\" (UID: \"441818c5-6e78-4a8f-9ed9-58e7dd4b2028\") " Oct 01 16:34:09 crc kubenswrapper[4869]: I1001 16:34:09.024938 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/441818c5-6e78-4a8f-9ed9-58e7dd4b2028-nmstate-lock" (OuterVolumeSpecName: "nmstate-lock") pod "441818c5-6e78-4a8f-9ed9-58e7dd4b2028" (UID: "441818c5-6e78-4a8f-9ed9-58e7dd4b2028"). InnerVolumeSpecName "nmstate-lock". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 01 16:34:09 crc kubenswrapper[4869]: I1001 16:34:09.025344 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/441818c5-6e78-4a8f-9ed9-58e7dd4b2028-ovs-socket" (OuterVolumeSpecName: "ovs-socket") pod "441818c5-6e78-4a8f-9ed9-58e7dd4b2028" (UID: "441818c5-6e78-4a8f-9ed9-58e7dd4b2028"). InnerVolumeSpecName "ovs-socket". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 01 16:34:09 crc kubenswrapper[4869]: I1001 16:34:09.025410 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/441818c5-6e78-4a8f-9ed9-58e7dd4b2028-dbus-socket" (OuterVolumeSpecName: "dbus-socket") pod "441818c5-6e78-4a8f-9ed9-58e7dd4b2028" (UID: "441818c5-6e78-4a8f-9ed9-58e7dd4b2028"). InnerVolumeSpecName "dbus-socket". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 01 16:34:09 crc kubenswrapper[4869]: I1001 16:34:09.025808 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-69rxt\" (UniqueName: \"kubernetes.io/projected/a8204861-b466-42de-bda3-448b67dc02f2-kube-api-access-69rxt\") pod \"nmstate-console-plugin-6b874cbd85-2mvc5\" (UID: \"a8204861-b466-42de-bda3-448b67dc02f2\") " pod="openshift-nmstate/nmstate-console-plugin-6b874cbd85-2mvc5" Oct 01 16:34:09 crc kubenswrapper[4869]: I1001 16:34:09.025952 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/a8204861-b466-42de-bda3-448b67dc02f2-nginx-conf\") pod \"nmstate-console-plugin-6b874cbd85-2mvc5\" (UID: \"a8204861-b466-42de-bda3-448b67dc02f2\") " pod="openshift-nmstate/nmstate-console-plugin-6b874cbd85-2mvc5" Oct 01 16:34:09 crc kubenswrapper[4869]: I1001 16:34:09.026384 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugin-serving-cert\" (UniqueName: \"kubernetes.io/secret/a8204861-b466-42de-bda3-448b67dc02f2-plugin-serving-cert\") pod \"nmstate-console-plugin-6b874cbd85-2mvc5\" (UID: \"a8204861-b466-42de-bda3-448b67dc02f2\") " pod="openshift-nmstate/nmstate-console-plugin-6b874cbd85-2mvc5" Oct 01 16:34:09 crc kubenswrapper[4869]: I1001 16:34:09.026600 4869 reconciler_common.go:293] "Volume detached for volume \"nmstate-lock\" (UniqueName: \"kubernetes.io/host-path/441818c5-6e78-4a8f-9ed9-58e7dd4b2028-nmstate-lock\") on node \"crc\" DevicePath \"\"" Oct 01 16:34:09 crc kubenswrapper[4869]: I1001 16:34:09.026701 4869 reconciler_common.go:293] "Volume detached for volume \"dbus-socket\" (UniqueName: \"kubernetes.io/host-path/441818c5-6e78-4a8f-9ed9-58e7dd4b2028-dbus-socket\") on node \"crc\" DevicePath \"\"" Oct 01 16:34:09 crc kubenswrapper[4869]: I1001 16:34:09.026797 4869 reconciler_common.go:293] "Volume detached for volume \"ovs-socket\" (UniqueName: \"kubernetes.io/host-path/441818c5-6e78-4a8f-9ed9-58e7dd4b2028-ovs-socket\") on node \"crc\" DevicePath \"\"" Oct 01 16:34:09 crc kubenswrapper[4869]: I1001 16:34:09.036192 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/441818c5-6e78-4a8f-9ed9-58e7dd4b2028-kube-api-access-4kgd6" (OuterVolumeSpecName: "kube-api-access-4kgd6") pod "441818c5-6e78-4a8f-9ed9-58e7dd4b2028" (UID: "441818c5-6e78-4a8f-9ed9-58e7dd4b2028"). InnerVolumeSpecName "kube-api-access-4kgd6". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 16:34:09 crc kubenswrapper[4869]: I1001 16:34:09.056309 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-handler-p56ll"] Oct 01 16:34:09 crc kubenswrapper[4869]: E1001 16:34:09.056980 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="441818c5-6e78-4a8f-9ed9-58e7dd4b2028" containerName="nmstate-handler" Oct 01 16:34:09 crc kubenswrapper[4869]: I1001 16:34:09.057005 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="441818c5-6e78-4a8f-9ed9-58e7dd4b2028" containerName="nmstate-handler" Oct 01 16:34:09 crc kubenswrapper[4869]: I1001 16:34:09.057342 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="441818c5-6e78-4a8f-9ed9-58e7dd4b2028" containerName="nmstate-handler" Oct 01 16:34:09 crc kubenswrapper[4869]: I1001 16:34:09.058163 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-handler-p56ll" Oct 01 16:34:09 crc kubenswrapper[4869]: I1001 16:34:09.136192 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nmstate-lock\" (UniqueName: \"kubernetes.io/host-path/fb760180-2040-4f8f-8a57-e8f2fdb6d1ed-nmstate-lock\") pod \"nmstate-handler-p56ll\" (UID: \"fb760180-2040-4f8f-8a57-e8f2fdb6d1ed\") " pod="openshift-nmstate/nmstate-handler-p56ll" Oct 01 16:34:09 crc kubenswrapper[4869]: I1001 16:34:09.137631 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dbus-socket\" (UniqueName: \"kubernetes.io/host-path/fb760180-2040-4f8f-8a57-e8f2fdb6d1ed-dbus-socket\") pod \"nmstate-handler-p56ll\" (UID: \"fb760180-2040-4f8f-8a57-e8f2fdb6d1ed\") " pod="openshift-nmstate/nmstate-handler-p56ll" Oct 01 16:34:09 crc kubenswrapper[4869]: I1001 16:34:09.143354 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vm4m9\" (UniqueName: \"kubernetes.io/projected/fb760180-2040-4f8f-8a57-e8f2fdb6d1ed-kube-api-access-vm4m9\") pod \"nmstate-handler-p56ll\" (UID: \"fb760180-2040-4f8f-8a57-e8f2fdb6d1ed\") " pod="openshift-nmstate/nmstate-handler-p56ll" Oct 01 16:34:09 crc kubenswrapper[4869]: I1001 16:34:09.155641 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugin-serving-cert\" (UniqueName: \"kubernetes.io/secret/a8204861-b466-42de-bda3-448b67dc02f2-plugin-serving-cert\") pod \"nmstate-console-plugin-6b874cbd85-2mvc5\" (UID: \"a8204861-b466-42de-bda3-448b67dc02f2\") " pod="openshift-nmstate/nmstate-console-plugin-6b874cbd85-2mvc5" Oct 01 16:34:09 crc kubenswrapper[4869]: I1001 16:34:09.155713 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovs-socket\" (UniqueName: \"kubernetes.io/host-path/fb760180-2040-4f8f-8a57-e8f2fdb6d1ed-ovs-socket\") pod \"nmstate-handler-p56ll\" (UID: \"fb760180-2040-4f8f-8a57-e8f2fdb6d1ed\") " pod="openshift-nmstate/nmstate-handler-p56ll" Oct 01 16:34:09 crc kubenswrapper[4869]: I1001 16:34:09.155778 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-69rxt\" (UniqueName: \"kubernetes.io/projected/a8204861-b466-42de-bda3-448b67dc02f2-kube-api-access-69rxt\") pod \"nmstate-console-plugin-6b874cbd85-2mvc5\" (UID: \"a8204861-b466-42de-bda3-448b67dc02f2\") " pod="openshift-nmstate/nmstate-console-plugin-6b874cbd85-2mvc5" Oct 01 16:34:09 crc kubenswrapper[4869]: I1001 16:34:09.155815 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/a8204861-b466-42de-bda3-448b67dc02f2-nginx-conf\") pod \"nmstate-console-plugin-6b874cbd85-2mvc5\" (UID: \"a8204861-b466-42de-bda3-448b67dc02f2\") " pod="openshift-nmstate/nmstate-console-plugin-6b874cbd85-2mvc5" Oct 01 16:34:09 crc kubenswrapper[4869]: I1001 16:34:09.155976 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4kgd6\" (UniqueName: \"kubernetes.io/projected/441818c5-6e78-4a8f-9ed9-58e7dd4b2028-kube-api-access-4kgd6\") on node \"crc\" DevicePath \"\"" Oct 01 16:34:09 crc kubenswrapper[4869]: I1001 16:34:09.156917 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/a8204861-b466-42de-bda3-448b67dc02f2-nginx-conf\") pod \"nmstate-console-plugin-6b874cbd85-2mvc5\" (UID: \"a8204861-b466-42de-bda3-448b67dc02f2\") " pod="openshift-nmstate/nmstate-console-plugin-6b874cbd85-2mvc5" Oct 01 16:34:09 crc kubenswrapper[4869]: I1001 16:34:09.174602 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-69rxt\" (UniqueName: \"kubernetes.io/projected/a8204861-b466-42de-bda3-448b67dc02f2-kube-api-access-69rxt\") pod \"nmstate-console-plugin-6b874cbd85-2mvc5\" (UID: \"a8204861-b466-42de-bda3-448b67dc02f2\") " pod="openshift-nmstate/nmstate-console-plugin-6b874cbd85-2mvc5" Oct 01 16:34:09 crc kubenswrapper[4869]: I1001 16:34:09.178967 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugin-serving-cert\" (UniqueName: \"kubernetes.io/secret/a8204861-b466-42de-bda3-448b67dc02f2-plugin-serving-cert\") pod \"nmstate-console-plugin-6b874cbd85-2mvc5\" (UID: \"a8204861-b466-42de-bda3-448b67dc02f2\") " pod="openshift-nmstate/nmstate-console-plugin-6b874cbd85-2mvc5" Oct 01 16:34:09 crc kubenswrapper[4869]: I1001 16:34:09.258251 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovs-socket\" (UniqueName: \"kubernetes.io/host-path/fb760180-2040-4f8f-8a57-e8f2fdb6d1ed-ovs-socket\") pod \"nmstate-handler-p56ll\" (UID: \"fb760180-2040-4f8f-8a57-e8f2fdb6d1ed\") " pod="openshift-nmstate/nmstate-handler-p56ll" Oct 01 16:34:09 crc kubenswrapper[4869]: I1001 16:34:09.258375 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nmstate-lock\" (UniqueName: \"kubernetes.io/host-path/fb760180-2040-4f8f-8a57-e8f2fdb6d1ed-nmstate-lock\") pod \"nmstate-handler-p56ll\" (UID: \"fb760180-2040-4f8f-8a57-e8f2fdb6d1ed\") " pod="openshift-nmstate/nmstate-handler-p56ll" Oct 01 16:34:09 crc kubenswrapper[4869]: I1001 16:34:09.258408 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dbus-socket\" (UniqueName: \"kubernetes.io/host-path/fb760180-2040-4f8f-8a57-e8f2fdb6d1ed-dbus-socket\") pod \"nmstate-handler-p56ll\" (UID: \"fb760180-2040-4f8f-8a57-e8f2fdb6d1ed\") " pod="openshift-nmstate/nmstate-handler-p56ll" Oct 01 16:34:09 crc kubenswrapper[4869]: I1001 16:34:09.258426 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vm4m9\" (UniqueName: \"kubernetes.io/projected/fb760180-2040-4f8f-8a57-e8f2fdb6d1ed-kube-api-access-vm4m9\") pod \"nmstate-handler-p56ll\" (UID: \"fb760180-2040-4f8f-8a57-e8f2fdb6d1ed\") " pod="openshift-nmstate/nmstate-handler-p56ll" Oct 01 16:34:09 crc kubenswrapper[4869]: I1001 16:34:09.258769 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovs-socket\" (UniqueName: \"kubernetes.io/host-path/fb760180-2040-4f8f-8a57-e8f2fdb6d1ed-ovs-socket\") pod \"nmstate-handler-p56ll\" (UID: \"fb760180-2040-4f8f-8a57-e8f2fdb6d1ed\") " pod="openshift-nmstate/nmstate-handler-p56ll" Oct 01 16:34:09 crc kubenswrapper[4869]: I1001 16:34:09.258803 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nmstate-lock\" (UniqueName: \"kubernetes.io/host-path/fb760180-2040-4f8f-8a57-e8f2fdb6d1ed-nmstate-lock\") pod \"nmstate-handler-p56ll\" (UID: \"fb760180-2040-4f8f-8a57-e8f2fdb6d1ed\") " pod="openshift-nmstate/nmstate-handler-p56ll" Oct 01 16:34:09 crc kubenswrapper[4869]: I1001 16:34:09.258985 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dbus-socket\" (UniqueName: \"kubernetes.io/host-path/fb760180-2040-4f8f-8a57-e8f2fdb6d1ed-dbus-socket\") pod \"nmstate-handler-p56ll\" (UID: \"fb760180-2040-4f8f-8a57-e8f2fdb6d1ed\") " pod="openshift-nmstate/nmstate-handler-p56ll" Oct 01 16:34:09 crc kubenswrapper[4869]: I1001 16:34:09.286368 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vm4m9\" (UniqueName: \"kubernetes.io/projected/fb760180-2040-4f8f-8a57-e8f2fdb6d1ed-kube-api-access-vm4m9\") pod \"nmstate-handler-p56ll\" (UID: \"fb760180-2040-4f8f-8a57-e8f2fdb6d1ed\") " pod="openshift-nmstate/nmstate-handler-p56ll" Oct 01 16:34:09 crc kubenswrapper[4869]: I1001 16:34:09.312762 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-console-plugin-6b874cbd85-2mvc5" Oct 01 16:34:09 crc kubenswrapper[4869]: I1001 16:34:09.454844 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-webhook-6d689559c5-dssrt" Oct 01 16:34:09 crc kubenswrapper[4869]: I1001 16:34:09.461883 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-handler-p56ll" Oct 01 16:34:09 crc kubenswrapper[4869]: I1001 16:34:09.549735 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-webhook-6cdbc54649-k6lhz"] Oct 01 16:34:09 crc kubenswrapper[4869]: E1001 16:34:09.550374 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="aa4da6ef-158a-44e5-8d1a-779aa19fe3ac" containerName="nmstate-webhook" Oct 01 16:34:09 crc kubenswrapper[4869]: I1001 16:34:09.550386 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="aa4da6ef-158a-44e5-8d1a-779aa19fe3ac" containerName="nmstate-webhook" Oct 01 16:34:09 crc kubenswrapper[4869]: I1001 16:34:09.550604 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="aa4da6ef-158a-44e5-8d1a-779aa19fe3ac" containerName="nmstate-webhook" Oct 01 16:34:09 crc kubenswrapper[4869]: I1001 16:34:09.551246 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-webhook-6cdbc54649-k6lhz" Oct 01 16:34:09 crc kubenswrapper[4869]: I1001 16:34:09.564896 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-webhook-6cdbc54649-k6lhz"] Oct 01 16:34:09 crc kubenswrapper[4869]: I1001 16:34:09.569707 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"tls-key-pair\" (UniqueName: \"kubernetes.io/secret/aa4da6ef-158a-44e5-8d1a-779aa19fe3ac-tls-key-pair\") pod \"aa4da6ef-158a-44e5-8d1a-779aa19fe3ac\" (UID: \"aa4da6ef-158a-44e5-8d1a-779aa19fe3ac\") " Oct 01 16:34:09 crc kubenswrapper[4869]: I1001 16:34:09.569892 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5rn77\" (UniqueName: \"kubernetes.io/projected/aa4da6ef-158a-44e5-8d1a-779aa19fe3ac-kube-api-access-5rn77\") pod \"aa4da6ef-158a-44e5-8d1a-779aa19fe3ac\" (UID: \"aa4da6ef-158a-44e5-8d1a-779aa19fe3ac\") " Oct 01 16:34:09 crc kubenswrapper[4869]: I1001 16:34:09.576583 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/aa4da6ef-158a-44e5-8d1a-779aa19fe3ac-tls-key-pair" (OuterVolumeSpecName: "tls-key-pair") pod "aa4da6ef-158a-44e5-8d1a-779aa19fe3ac" (UID: "aa4da6ef-158a-44e5-8d1a-779aa19fe3ac"). InnerVolumeSpecName "tls-key-pair". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 16:34:09 crc kubenswrapper[4869]: I1001 16:34:09.577843 4869 generic.go:334] "Generic (PLEG): container finished" podID="bd4b603c-ef21-4afa-adcf-0e075976eeef" containerID="ae61def0d9c94972a6fd926fffa33a9a7ec9dc0aad61f31c8348d2b23c698eb6" exitCode=0 Oct 01 16:34:09 crc kubenswrapper[4869]: I1001 16:34:09.577923 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-operator-5d6f6cfd66-5whxj" event={"ID":"bd4b603c-ef21-4afa-adcf-0e075976eeef","Type":"ContainerDied","Data":"ae61def0d9c94972a6fd926fffa33a9a7ec9dc0aad61f31c8348d2b23c698eb6"} Oct 01 16:34:09 crc kubenswrapper[4869]: I1001 16:34:09.578122 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/aa4da6ef-158a-44e5-8d1a-779aa19fe3ac-kube-api-access-5rn77" (OuterVolumeSpecName: "kube-api-access-5rn77") pod "aa4da6ef-158a-44e5-8d1a-779aa19fe3ac" (UID: "aa4da6ef-158a-44e5-8d1a-779aa19fe3ac"). InnerVolumeSpecName "kube-api-access-5rn77". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 16:34:09 crc kubenswrapper[4869]: I1001 16:34:09.600767 4869 generic.go:334] "Generic (PLEG): container finished" podID="27f0e6e2-94a5-4313-9c9e-9eacbc971748" containerID="477d6470d69b9431c4f75da395985b54d2bbb544f32068e427a48f701dc3c101" exitCode=0 Oct 01 16:34:09 crc kubenswrapper[4869]: I1001 16:34:09.600790 4869 generic.go:334] "Generic (PLEG): container finished" podID="27f0e6e2-94a5-4313-9c9e-9eacbc971748" containerID="c1f9913f8a4df05b50a07411e0e87e257fa37dc49c286ff0c860862ee21fc194" exitCode=0 Oct 01 16:34:09 crc kubenswrapper[4869]: I1001 16:34:09.627960 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-metrics-58fcddf996-bc5gk" event={"ID":"27f0e6e2-94a5-4313-9c9e-9eacbc971748","Type":"ContainerDied","Data":"477d6470d69b9431c4f75da395985b54d2bbb544f32068e427a48f701dc3c101"} Oct 01 16:34:09 crc kubenswrapper[4869]: I1001 16:34:09.628011 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-metrics-58fcddf996-bc5gk" event={"ID":"27f0e6e2-94a5-4313-9c9e-9eacbc971748","Type":"ContainerDied","Data":"c1f9913f8a4df05b50a07411e0e87e257fa37dc49c286ff0c860862ee21fc194"} Oct 01 16:34:09 crc kubenswrapper[4869]: I1001 16:34:09.629178 4869 generic.go:334] "Generic (PLEG): container finished" podID="441818c5-6e78-4a8f-9ed9-58e7dd4b2028" containerID="986db3c34f99f3e6dac6202d395cccb5945f8bda2565f33f95a8aec20a75bca0" exitCode=0 Oct 01 16:34:09 crc kubenswrapper[4869]: I1001 16:34:09.629275 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-handler-xs8c4" Oct 01 16:34:09 crc kubenswrapper[4869]: I1001 16:34:09.629299 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-handler-xs8c4" event={"ID":"441818c5-6e78-4a8f-9ed9-58e7dd4b2028","Type":"ContainerDied","Data":"986db3c34f99f3e6dac6202d395cccb5945f8bda2565f33f95a8aec20a75bca0"} Oct 01 16:34:09 crc kubenswrapper[4869]: I1001 16:34:09.629540 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-handler-xs8c4" event={"ID":"441818c5-6e78-4a8f-9ed9-58e7dd4b2028","Type":"ContainerDied","Data":"1b4f1160a493344f9201f412caca5c75b512b48d88702dbe99ffe5f13ea8d727"} Oct 01 16:34:09 crc kubenswrapper[4869]: I1001 16:34:09.629562 4869 scope.go:117] "RemoveContainer" containerID="986db3c34f99f3e6dac6202d395cccb5945f8bda2565f33f95a8aec20a75bca0" Oct 01 16:34:09 crc kubenswrapper[4869]: I1001 16:34:09.635405 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-metrics-58fcddf996-bc5gk" Oct 01 16:34:09 crc kubenswrapper[4869]: I1001 16:34:09.645794 4869 generic.go:334] "Generic (PLEG): container finished" podID="aa4da6ef-158a-44e5-8d1a-779aa19fe3ac" containerID="b2825444eff4b4c6d99387f6454d228630e32645a13994d2d0cec3b378353f1c" exitCode=0 Oct 01 16:34:09 crc kubenswrapper[4869]: I1001 16:34:09.648303 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-webhook-6d689559c5-dssrt" Oct 01 16:34:09 crc kubenswrapper[4869]: I1001 16:34:09.649562 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-webhook-6d689559c5-dssrt" event={"ID":"aa4da6ef-158a-44e5-8d1a-779aa19fe3ac","Type":"ContainerDied","Data":"b2825444eff4b4c6d99387f6454d228630e32645a13994d2d0cec3b378353f1c"} Oct 01 16:34:09 crc kubenswrapper[4869]: I1001 16:34:09.649649 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-webhook-6d689559c5-dssrt" event={"ID":"aa4da6ef-158a-44e5-8d1a-779aa19fe3ac","Type":"ContainerDied","Data":"6223dadd0a29bb55bca96fae860cdce6e3c012da4bd842b0dcebfad503b7383c"} Oct 01 16:34:09 crc kubenswrapper[4869]: I1001 16:34:09.654717 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-operator-5d6f6cfd66-5whxj" Oct 01 16:34:09 crc kubenswrapper[4869]: I1001 16:34:09.674610 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tls-key-pair\" (UniqueName: \"kubernetes.io/secret/17e9486a-5271-47f0-9851-30b1c293f6c7-tls-key-pair\") pod \"nmstate-webhook-6cdbc54649-k6lhz\" (UID: \"17e9486a-5271-47f0-9851-30b1c293f6c7\") " pod="openshift-nmstate/nmstate-webhook-6cdbc54649-k6lhz" Oct 01 16:34:09 crc kubenswrapper[4869]: I1001 16:34:09.674729 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7c2r6\" (UniqueName: \"kubernetes.io/projected/17e9486a-5271-47f0-9851-30b1c293f6c7-kube-api-access-7c2r6\") pod \"nmstate-webhook-6cdbc54649-k6lhz\" (UID: \"17e9486a-5271-47f0-9851-30b1c293f6c7\") " pod="openshift-nmstate/nmstate-webhook-6cdbc54649-k6lhz" Oct 01 16:34:09 crc kubenswrapper[4869]: I1001 16:34:09.675017 4869 reconciler_common.go:293] "Volume detached for volume \"tls-key-pair\" (UniqueName: \"kubernetes.io/secret/aa4da6ef-158a-44e5-8d1a-779aa19fe3ac-tls-key-pair\") on node \"crc\" DevicePath \"\"" Oct 01 16:34:09 crc kubenswrapper[4869]: I1001 16:34:09.675038 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5rn77\" (UniqueName: \"kubernetes.io/projected/aa4da6ef-158a-44e5-8d1a-779aa19fe3ac-kube-api-access-5rn77\") on node \"crc\" DevicePath \"\"" Oct 01 16:34:09 crc kubenswrapper[4869]: I1001 16:34:09.725687 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-nmstate/nmstate-webhook-6d689559c5-dssrt"] Oct 01 16:34:09 crc kubenswrapper[4869]: I1001 16:34:09.736279 4869 scope.go:117] "RemoveContainer" containerID="986db3c34f99f3e6dac6202d395cccb5945f8bda2565f33f95a8aec20a75bca0" Oct 01 16:34:09 crc kubenswrapper[4869]: E1001 16:34:09.736739 4869 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"986db3c34f99f3e6dac6202d395cccb5945f8bda2565f33f95a8aec20a75bca0\": container with ID starting with 986db3c34f99f3e6dac6202d395cccb5945f8bda2565f33f95a8aec20a75bca0 not found: ID does not exist" containerID="986db3c34f99f3e6dac6202d395cccb5945f8bda2565f33f95a8aec20a75bca0" Oct 01 16:34:09 crc kubenswrapper[4869]: I1001 16:34:09.736771 4869 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"986db3c34f99f3e6dac6202d395cccb5945f8bda2565f33f95a8aec20a75bca0"} err="failed to get container status \"986db3c34f99f3e6dac6202d395cccb5945f8bda2565f33f95a8aec20a75bca0\": rpc error: code = NotFound desc = could not find container \"986db3c34f99f3e6dac6202d395cccb5945f8bda2565f33f95a8aec20a75bca0\": container with ID starting with 986db3c34f99f3e6dac6202d395cccb5945f8bda2565f33f95a8aec20a75bca0 not found: ID does not exist" Oct 01 16:34:09 crc kubenswrapper[4869]: I1001 16:34:09.736794 4869 scope.go:117] "RemoveContainer" containerID="b2825444eff4b4c6d99387f6454d228630e32645a13994d2d0cec3b378353f1c" Oct 01 16:34:09 crc kubenswrapper[4869]: I1001 16:34:09.736861 4869 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-nmstate/nmstate-webhook-6d689559c5-dssrt"] Oct 01 16:34:09 crc kubenswrapper[4869]: I1001 16:34:09.752221 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-nmstate/nmstate-handler-xs8c4"] Oct 01 16:34:09 crc kubenswrapper[4869]: I1001 16:34:09.760789 4869 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-nmstate/nmstate-handler-xs8c4"] Oct 01 16:34:09 crc kubenswrapper[4869]: I1001 16:34:09.774525 4869 scope.go:117] "RemoveContainer" containerID="b2825444eff4b4c6d99387f6454d228630e32645a13994d2d0cec3b378353f1c" Oct 01 16:34:09 crc kubenswrapper[4869]: E1001 16:34:09.775674 4869 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b2825444eff4b4c6d99387f6454d228630e32645a13994d2d0cec3b378353f1c\": container with ID starting with b2825444eff4b4c6d99387f6454d228630e32645a13994d2d0cec3b378353f1c not found: ID does not exist" containerID="b2825444eff4b4c6d99387f6454d228630e32645a13994d2d0cec3b378353f1c" Oct 01 16:34:09 crc kubenswrapper[4869]: I1001 16:34:09.775721 4869 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b2825444eff4b4c6d99387f6454d228630e32645a13994d2d0cec3b378353f1c"} err="failed to get container status \"b2825444eff4b4c6d99387f6454d228630e32645a13994d2d0cec3b378353f1c\": rpc error: code = NotFound desc = could not find container \"b2825444eff4b4c6d99387f6454d228630e32645a13994d2d0cec3b378353f1c\": container with ID starting with b2825444eff4b4c6d99387f6454d228630e32645a13994d2d0cec3b378353f1c not found: ID does not exist" Oct 01 16:34:09 crc kubenswrapper[4869]: I1001 16:34:09.776232 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7g9ld\" (UniqueName: \"kubernetes.io/projected/27f0e6e2-94a5-4313-9c9e-9eacbc971748-kube-api-access-7g9ld\") pod \"27f0e6e2-94a5-4313-9c9e-9eacbc971748\" (UID: \"27f0e6e2-94a5-4313-9c9e-9eacbc971748\") " Oct 01 16:34:09 crc kubenswrapper[4869]: I1001 16:34:09.776296 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jq74b\" (UniqueName: \"kubernetes.io/projected/bd4b603c-ef21-4afa-adcf-0e075976eeef-kube-api-access-jq74b\") pod \"bd4b603c-ef21-4afa-adcf-0e075976eeef\" (UID: \"bd4b603c-ef21-4afa-adcf-0e075976eeef\") " Oct 01 16:34:09 crc kubenswrapper[4869]: I1001 16:34:09.776672 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tls-key-pair\" (UniqueName: \"kubernetes.io/secret/17e9486a-5271-47f0-9851-30b1c293f6c7-tls-key-pair\") pod \"nmstate-webhook-6cdbc54649-k6lhz\" (UID: \"17e9486a-5271-47f0-9851-30b1c293f6c7\") " pod="openshift-nmstate/nmstate-webhook-6cdbc54649-k6lhz" Oct 01 16:34:09 crc kubenswrapper[4869]: I1001 16:34:09.776747 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7c2r6\" (UniqueName: \"kubernetes.io/projected/17e9486a-5271-47f0-9851-30b1c293f6c7-kube-api-access-7c2r6\") pod \"nmstate-webhook-6cdbc54649-k6lhz\" (UID: \"17e9486a-5271-47f0-9851-30b1c293f6c7\") " pod="openshift-nmstate/nmstate-webhook-6cdbc54649-k6lhz" Oct 01 16:34:09 crc kubenswrapper[4869]: I1001 16:34:09.784427 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bd4b603c-ef21-4afa-adcf-0e075976eeef-kube-api-access-jq74b" (OuterVolumeSpecName: "kube-api-access-jq74b") pod "bd4b603c-ef21-4afa-adcf-0e075976eeef" (UID: "bd4b603c-ef21-4afa-adcf-0e075976eeef"). InnerVolumeSpecName "kube-api-access-jq74b". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 16:34:09 crc kubenswrapper[4869]: I1001 16:34:09.786706 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/27f0e6e2-94a5-4313-9c9e-9eacbc971748-kube-api-access-7g9ld" (OuterVolumeSpecName: "kube-api-access-7g9ld") pod "27f0e6e2-94a5-4313-9c9e-9eacbc971748" (UID: "27f0e6e2-94a5-4313-9c9e-9eacbc971748"). InnerVolumeSpecName "kube-api-access-7g9ld". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 16:34:09 crc kubenswrapper[4869]: I1001 16:34:09.796409 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tls-key-pair\" (UniqueName: \"kubernetes.io/secret/17e9486a-5271-47f0-9851-30b1c293f6c7-tls-key-pair\") pod \"nmstate-webhook-6cdbc54649-k6lhz\" (UID: \"17e9486a-5271-47f0-9851-30b1c293f6c7\") " pod="openshift-nmstate/nmstate-webhook-6cdbc54649-k6lhz" Oct 01 16:34:09 crc kubenswrapper[4869]: I1001 16:34:09.804000 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7c2r6\" (UniqueName: \"kubernetes.io/projected/17e9486a-5271-47f0-9851-30b1c293f6c7-kube-api-access-7c2r6\") pod \"nmstate-webhook-6cdbc54649-k6lhz\" (UID: \"17e9486a-5271-47f0-9851-30b1c293f6c7\") " pod="openshift-nmstate/nmstate-webhook-6cdbc54649-k6lhz" Oct 01 16:34:09 crc kubenswrapper[4869]: I1001 16:34:09.839197 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-metrics-fdff9cb8d-n4x26"] Oct 01 16:34:09 crc kubenswrapper[4869]: E1001 16:34:09.845824 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bd4b603c-ef21-4afa-adcf-0e075976eeef" containerName="nmstate-operator" Oct 01 16:34:09 crc kubenswrapper[4869]: I1001 16:34:09.845865 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="bd4b603c-ef21-4afa-adcf-0e075976eeef" containerName="nmstate-operator" Oct 01 16:34:09 crc kubenswrapper[4869]: E1001 16:34:09.845875 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="27f0e6e2-94a5-4313-9c9e-9eacbc971748" containerName="kube-rbac-proxy" Oct 01 16:34:09 crc kubenswrapper[4869]: I1001 16:34:09.845882 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="27f0e6e2-94a5-4313-9c9e-9eacbc971748" containerName="kube-rbac-proxy" Oct 01 16:34:09 crc kubenswrapper[4869]: E1001 16:34:09.845919 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="27f0e6e2-94a5-4313-9c9e-9eacbc971748" containerName="nmstate-metrics" Oct 01 16:34:09 crc kubenswrapper[4869]: I1001 16:34:09.845925 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="27f0e6e2-94a5-4313-9c9e-9eacbc971748" containerName="nmstate-metrics" Oct 01 16:34:09 crc kubenswrapper[4869]: I1001 16:34:09.846132 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="bd4b603c-ef21-4afa-adcf-0e075976eeef" containerName="nmstate-operator" Oct 01 16:34:09 crc kubenswrapper[4869]: I1001 16:34:09.846145 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="27f0e6e2-94a5-4313-9c9e-9eacbc971748" containerName="kube-rbac-proxy" Oct 01 16:34:09 crc kubenswrapper[4869]: I1001 16:34:09.846163 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="27f0e6e2-94a5-4313-9c9e-9eacbc971748" containerName="nmstate-metrics" Oct 01 16:34:09 crc kubenswrapper[4869]: I1001 16:34:09.847115 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-metrics-fdff9cb8d-n4x26" Oct 01 16:34:09 crc kubenswrapper[4869]: I1001 16:34:09.877979 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-metrics-fdff9cb8d-n4x26"] Oct 01 16:34:09 crc kubenswrapper[4869]: I1001 16:34:09.882498 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7g9ld\" (UniqueName: \"kubernetes.io/projected/27f0e6e2-94a5-4313-9c9e-9eacbc971748-kube-api-access-7g9ld\") on node \"crc\" DevicePath \"\"" Oct 01 16:34:09 crc kubenswrapper[4869]: I1001 16:34:09.882533 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jq74b\" (UniqueName: \"kubernetes.io/projected/bd4b603c-ef21-4afa-adcf-0e075976eeef-kube-api-access-jq74b\") on node \"crc\" DevicePath \"\"" Oct 01 16:34:09 crc kubenswrapper[4869]: I1001 16:34:09.907495 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-webhook-6cdbc54649-k6lhz" Oct 01 16:34:09 crc kubenswrapper[4869]: I1001 16:34:09.972192 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-console-plugin-6b874cbd85-2mvc5"] Oct 01 16:34:09 crc kubenswrapper[4869]: I1001 16:34:09.985355 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mx2w2\" (UniqueName: \"kubernetes.io/projected/77a31948-6e77-47cd-b110-fa1af1087629-kube-api-access-mx2w2\") pod \"nmstate-metrics-fdff9cb8d-n4x26\" (UID: \"77a31948-6e77-47cd-b110-fa1af1087629\") " pod="openshift-nmstate/nmstate-metrics-fdff9cb8d-n4x26" Oct 01 16:34:10 crc kubenswrapper[4869]: I1001 16:34:10.087564 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mx2w2\" (UniqueName: \"kubernetes.io/projected/77a31948-6e77-47cd-b110-fa1af1087629-kube-api-access-mx2w2\") pod \"nmstate-metrics-fdff9cb8d-n4x26\" (UID: \"77a31948-6e77-47cd-b110-fa1af1087629\") " pod="openshift-nmstate/nmstate-metrics-fdff9cb8d-n4x26" Oct 01 16:34:10 crc kubenswrapper[4869]: I1001 16:34:10.113980 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mx2w2\" (UniqueName: \"kubernetes.io/projected/77a31948-6e77-47cd-b110-fa1af1087629-kube-api-access-mx2w2\") pod \"nmstate-metrics-fdff9cb8d-n4x26\" (UID: \"77a31948-6e77-47cd-b110-fa1af1087629\") " pod="openshift-nmstate/nmstate-metrics-fdff9cb8d-n4x26" Oct 01 16:34:10 crc kubenswrapper[4869]: I1001 16:34:10.189913 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-metrics-fdff9cb8d-n4x26" Oct 01 16:34:10 crc kubenswrapper[4869]: I1001 16:34:10.483040 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-webhook-6cdbc54649-k6lhz"] Oct 01 16:34:10 crc kubenswrapper[4869]: W1001 16:34:10.491468 4869 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod17e9486a_5271_47f0_9851_30b1c293f6c7.slice/crio-2079527e45be7ccfe03555d6000db51ee8a390f29d5de1ce1ab36df5c1afb9e4 WatchSource:0}: Error finding container 2079527e45be7ccfe03555d6000db51ee8a390f29d5de1ce1ab36df5c1afb9e4: Status 404 returned error can't find the container with id 2079527e45be7ccfe03555d6000db51ee8a390f29d5de1ce1ab36df5c1afb9e4 Oct 01 16:34:10 crc kubenswrapper[4869]: I1001 16:34:10.646861 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-metrics-fdff9cb8d-n4x26"] Oct 01 16:34:10 crc kubenswrapper[4869]: I1001 16:34:10.658073 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-handler-p56ll" event={"ID":"fb760180-2040-4f8f-8a57-e8f2fdb6d1ed","Type":"ContainerStarted","Data":"e07a015415f45379e73efb3f7be4bb58b0f5562208489ff93b89e16b4b2b4ca6"} Oct 01 16:34:10 crc kubenswrapper[4869]: I1001 16:34:10.660874 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-webhook-6cdbc54649-k6lhz" event={"ID":"17e9486a-5271-47f0-9851-30b1c293f6c7","Type":"ContainerStarted","Data":"2079527e45be7ccfe03555d6000db51ee8a390f29d5de1ce1ab36df5c1afb9e4"} Oct 01 16:34:10 crc kubenswrapper[4869]: I1001 16:34:10.663056 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-operator-5d6f6cfd66-5whxj" event={"ID":"bd4b603c-ef21-4afa-adcf-0e075976eeef","Type":"ContainerDied","Data":"39d8adc701998a2bad3f1f0570cbb1ea8853d704eee6b78556a7bb385527fe90"} Oct 01 16:34:10 crc kubenswrapper[4869]: I1001 16:34:10.663122 4869 scope.go:117] "RemoveContainer" containerID="ae61def0d9c94972a6fd926fffa33a9a7ec9dc0aad61f31c8348d2b23c698eb6" Oct 01 16:34:10 crc kubenswrapper[4869]: I1001 16:34:10.663083 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-operator-5d6f6cfd66-5whxj" Oct 01 16:34:10 crc kubenswrapper[4869]: I1001 16:34:10.667540 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-metrics-58fcddf996-bc5gk" Oct 01 16:34:10 crc kubenswrapper[4869]: I1001 16:34:10.667529 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-metrics-58fcddf996-bc5gk" event={"ID":"27f0e6e2-94a5-4313-9c9e-9eacbc971748","Type":"ContainerDied","Data":"f77313576443eaa4db14e8cb84e0f7fea96e4e911d19266bc911dedd3de116cf"} Oct 01 16:34:10 crc kubenswrapper[4869]: I1001 16:34:10.669607 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-console-plugin-6b874cbd85-2mvc5" event={"ID":"a8204861-b466-42de-bda3-448b67dc02f2","Type":"ContainerStarted","Data":"f23b2e45892b71183f585a652f5baace69b5889c59f17f257c0961815dd9d3d6"} Oct 01 16:34:10 crc kubenswrapper[4869]: I1001 16:34:10.700227 4869 scope.go:117] "RemoveContainer" containerID="477d6470d69b9431c4f75da395985b54d2bbb544f32068e427a48f701dc3c101" Oct 01 16:34:10 crc kubenswrapper[4869]: I1001 16:34:10.705435 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-nmstate/nmstate-operator-5d6f6cfd66-5whxj"] Oct 01 16:34:10 crc kubenswrapper[4869]: I1001 16:34:10.714756 4869 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-nmstate/nmstate-operator-5d6f6cfd66-5whxj"] Oct 01 16:34:10 crc kubenswrapper[4869]: I1001 16:34:10.722672 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-nmstate/nmstate-metrics-58fcddf996-bc5gk"] Oct 01 16:34:10 crc kubenswrapper[4869]: I1001 16:34:10.727641 4869 scope.go:117] "RemoveContainer" containerID="c1f9913f8a4df05b50a07411e0e87e257fa37dc49c286ff0c860862ee21fc194" Oct 01 16:34:10 crc kubenswrapper[4869]: I1001 16:34:10.728292 4869 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-nmstate/nmstate-metrics-58fcddf996-bc5gk"] Oct 01 16:34:11 crc kubenswrapper[4869]: I1001 16:34:11.601398 4869 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="27f0e6e2-94a5-4313-9c9e-9eacbc971748" path="/var/lib/kubelet/pods/27f0e6e2-94a5-4313-9c9e-9eacbc971748/volumes" Oct 01 16:34:11 crc kubenswrapper[4869]: I1001 16:34:11.604348 4869 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="441818c5-6e78-4a8f-9ed9-58e7dd4b2028" path="/var/lib/kubelet/pods/441818c5-6e78-4a8f-9ed9-58e7dd4b2028/volumes" Oct 01 16:34:11 crc kubenswrapper[4869]: I1001 16:34:11.604952 4869 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="aa4da6ef-158a-44e5-8d1a-779aa19fe3ac" path="/var/lib/kubelet/pods/aa4da6ef-158a-44e5-8d1a-779aa19fe3ac/volumes" Oct 01 16:34:11 crc kubenswrapper[4869]: I1001 16:34:11.606986 4869 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bd4b603c-ef21-4afa-adcf-0e075976eeef" path="/var/lib/kubelet/pods/bd4b603c-ef21-4afa-adcf-0e075976eeef/volumes" Oct 01 16:34:11 crc kubenswrapper[4869]: I1001 16:34:11.684892 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-metrics-fdff9cb8d-n4x26" event={"ID":"77a31948-6e77-47cd-b110-fa1af1087629","Type":"ContainerStarted","Data":"2bf34ada8e3beee0ce653b94c6639372c5eec65734522b0a65b24c43f7599c1f"} Oct 01 16:34:14 crc kubenswrapper[4869]: I1001 16:34:14.733813 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-console-plugin-6b874cbd85-2mvc5" event={"ID":"a8204861-b466-42de-bda3-448b67dc02f2","Type":"ContainerStarted","Data":"811a590c829951616326a24396aeaec5e00fd969d6d88c1d380ca58d49bc34eb"} Oct 01 16:34:14 crc kubenswrapper[4869]: I1001 16:34:14.742927 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-webhook-6cdbc54649-k6lhz" event={"ID":"17e9486a-5271-47f0-9851-30b1c293f6c7","Type":"ContainerStarted","Data":"f5a6cc497fa9df55812175699c79f12047d46d0e6efb7806997f0b67a2bafb4f"} Oct 01 16:34:14 crc kubenswrapper[4869]: I1001 16:34:14.743296 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-nmstate/nmstate-webhook-6cdbc54649-k6lhz" Oct 01 16:34:14 crc kubenswrapper[4869]: I1001 16:34:14.746072 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-metrics-fdff9cb8d-n4x26" event={"ID":"77a31948-6e77-47cd-b110-fa1af1087629","Type":"ContainerStarted","Data":"a4a7f5f9392c90e39e520d43583666f64768a3a841181fc82338ba895cf92620"} Oct 01 16:34:14 crc kubenswrapper[4869]: I1001 16:34:14.752283 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-console-plugin-6b874cbd85-2mvc5" podStartSLOduration=2.544754439 podStartE2EDuration="6.752266551s" podCreationTimestamp="2025-10-01 16:34:08 +0000 UTC" firstStartedPulling="2025-10-01 16:34:09.998817098 +0000 UTC m=+5359.145660214" lastFinishedPulling="2025-10-01 16:34:14.2063292 +0000 UTC m=+5363.353172326" observedRunningTime="2025-10-01 16:34:14.746113946 +0000 UTC m=+5363.892957072" watchObservedRunningTime="2025-10-01 16:34:14.752266551 +0000 UTC m=+5363.899109667" Oct 01 16:34:14 crc kubenswrapper[4869]: I1001 16:34:14.777621 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-webhook-6cdbc54649-k6lhz" podStartSLOduration=2.056072396 podStartE2EDuration="5.777602883s" podCreationTimestamp="2025-10-01 16:34:09 +0000 UTC" firstStartedPulling="2025-10-01 16:34:10.493042521 +0000 UTC m=+5359.639885637" lastFinishedPulling="2025-10-01 16:34:14.214572988 +0000 UTC m=+5363.361416124" observedRunningTime="2025-10-01 16:34:14.770195735 +0000 UTC m=+5363.917038861" watchObservedRunningTime="2025-10-01 16:34:14.777602883 +0000 UTC m=+5363.924446009" Oct 01 16:34:14 crc kubenswrapper[4869]: I1001 16:34:14.816385 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-nmstate/nmstate-console-plugin-864bb6dfb5-zcwb9"] Oct 01 16:34:14 crc kubenswrapper[4869]: I1001 16:34:14.816635 4869 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-nmstate/nmstate-console-plugin-864bb6dfb5-zcwb9" podUID="58fa3f46-8eb7-4d4f-9548-37c56f012aba" containerName="nmstate-console-plugin" containerID="cri-o://94c2506b293dfb9320b5125e90948fadbd1858bdd3457fa4c836fd6414aa0591" gracePeriod=30 Oct 01 16:34:15 crc kubenswrapper[4869]: I1001 16:34:15.501494 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-console-plugin-864bb6dfb5-zcwb9" Oct 01 16:34:15 crc kubenswrapper[4869]: I1001 16:34:15.656682 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"plugin-serving-cert\" (UniqueName: \"kubernetes.io/secret/58fa3f46-8eb7-4d4f-9548-37c56f012aba-plugin-serving-cert\") pod \"58fa3f46-8eb7-4d4f-9548-37c56f012aba\" (UID: \"58fa3f46-8eb7-4d4f-9548-37c56f012aba\") " Oct 01 16:34:15 crc kubenswrapper[4869]: I1001 16:34:15.656813 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2jvc2\" (UniqueName: \"kubernetes.io/projected/58fa3f46-8eb7-4d4f-9548-37c56f012aba-kube-api-access-2jvc2\") pod \"58fa3f46-8eb7-4d4f-9548-37c56f012aba\" (UID: \"58fa3f46-8eb7-4d4f-9548-37c56f012aba\") " Oct 01 16:34:15 crc kubenswrapper[4869]: I1001 16:34:15.656836 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/58fa3f46-8eb7-4d4f-9548-37c56f012aba-nginx-conf\") pod \"58fa3f46-8eb7-4d4f-9548-37c56f012aba\" (UID: \"58fa3f46-8eb7-4d4f-9548-37c56f012aba\") " Oct 01 16:34:15 crc kubenswrapper[4869]: I1001 16:34:15.662525 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/58fa3f46-8eb7-4d4f-9548-37c56f012aba-plugin-serving-cert" (OuterVolumeSpecName: "plugin-serving-cert") pod "58fa3f46-8eb7-4d4f-9548-37c56f012aba" (UID: "58fa3f46-8eb7-4d4f-9548-37c56f012aba"). InnerVolumeSpecName "plugin-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 16:34:15 crc kubenswrapper[4869]: I1001 16:34:15.667127 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/58fa3f46-8eb7-4d4f-9548-37c56f012aba-kube-api-access-2jvc2" (OuterVolumeSpecName: "kube-api-access-2jvc2") pod "58fa3f46-8eb7-4d4f-9548-37c56f012aba" (UID: "58fa3f46-8eb7-4d4f-9548-37c56f012aba"). InnerVolumeSpecName "kube-api-access-2jvc2". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 16:34:15 crc kubenswrapper[4869]: I1001 16:34:15.744027 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/58fa3f46-8eb7-4d4f-9548-37c56f012aba-nginx-conf" (OuterVolumeSpecName: "nginx-conf") pod "58fa3f46-8eb7-4d4f-9548-37c56f012aba" (UID: "58fa3f46-8eb7-4d4f-9548-37c56f012aba"). InnerVolumeSpecName "nginx-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 16:34:15 crc kubenswrapper[4869]: I1001 16:34:15.759223 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2jvc2\" (UniqueName: \"kubernetes.io/projected/58fa3f46-8eb7-4d4f-9548-37c56f012aba-kube-api-access-2jvc2\") on node \"crc\" DevicePath \"\"" Oct 01 16:34:15 crc kubenswrapper[4869]: I1001 16:34:15.759290 4869 reconciler_common.go:293] "Volume detached for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/58fa3f46-8eb7-4d4f-9548-37c56f012aba-nginx-conf\") on node \"crc\" DevicePath \"\"" Oct 01 16:34:15 crc kubenswrapper[4869]: I1001 16:34:15.759306 4869 reconciler_common.go:293] "Volume detached for volume \"plugin-serving-cert\" (UniqueName: \"kubernetes.io/secret/58fa3f46-8eb7-4d4f-9548-37c56f012aba-plugin-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 01 16:34:15 crc kubenswrapper[4869]: I1001 16:34:15.769861 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-handler-p56ll" event={"ID":"fb760180-2040-4f8f-8a57-e8f2fdb6d1ed","Type":"ContainerStarted","Data":"f851b7a0b9a1e00f1b5da6d6eea9e7f0aead20e026ff0620e8a3121d65cdd1ac"} Oct 01 16:34:15 crc kubenswrapper[4869]: I1001 16:34:15.770401 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-nmstate/nmstate-handler-p56ll" Oct 01 16:34:15 crc kubenswrapper[4869]: I1001 16:34:15.792571 4869 generic.go:334] "Generic (PLEG): container finished" podID="58fa3f46-8eb7-4d4f-9548-37c56f012aba" containerID="94c2506b293dfb9320b5125e90948fadbd1858bdd3457fa4c836fd6414aa0591" exitCode=0 Oct 01 16:34:15 crc kubenswrapper[4869]: I1001 16:34:15.793543 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-console-plugin-864bb6dfb5-zcwb9" Oct 01 16:34:15 crc kubenswrapper[4869]: I1001 16:34:15.795367 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-console-plugin-864bb6dfb5-zcwb9" event={"ID":"58fa3f46-8eb7-4d4f-9548-37c56f012aba","Type":"ContainerDied","Data":"94c2506b293dfb9320b5125e90948fadbd1858bdd3457fa4c836fd6414aa0591"} Oct 01 16:34:15 crc kubenswrapper[4869]: I1001 16:34:15.795450 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-console-plugin-864bb6dfb5-zcwb9" event={"ID":"58fa3f46-8eb7-4d4f-9548-37c56f012aba","Type":"ContainerDied","Data":"ca496496ffb05a3cae9ffb4d3103f604bf3228d57ed376ba81a98e50d8a36e4f"} Oct 01 16:34:15 crc kubenswrapper[4869]: I1001 16:34:15.795467 4869 scope.go:117] "RemoveContainer" containerID="94c2506b293dfb9320b5125e90948fadbd1858bdd3457fa4c836fd6414aa0591" Oct 01 16:34:15 crc kubenswrapper[4869]: I1001 16:34:15.829878 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-handler-p56ll" podStartSLOduration=2.189612227 podStartE2EDuration="6.829857656s" podCreationTimestamp="2025-10-01 16:34:09 +0000 UTC" firstStartedPulling="2025-10-01 16:34:09.57553695 +0000 UTC m=+5358.722380066" lastFinishedPulling="2025-10-01 16:34:14.215782379 +0000 UTC m=+5363.362625495" observedRunningTime="2025-10-01 16:34:15.828656505 +0000 UTC m=+5364.975499631" watchObservedRunningTime="2025-10-01 16:34:15.829857656 +0000 UTC m=+5364.976700772" Oct 01 16:34:15 crc kubenswrapper[4869]: I1001 16:34:15.854752 4869 scope.go:117] "RemoveContainer" containerID="94c2506b293dfb9320b5125e90948fadbd1858bdd3457fa4c836fd6414aa0591" Oct 01 16:34:15 crc kubenswrapper[4869]: E1001 16:34:15.855754 4869 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"94c2506b293dfb9320b5125e90948fadbd1858bdd3457fa4c836fd6414aa0591\": container with ID starting with 94c2506b293dfb9320b5125e90948fadbd1858bdd3457fa4c836fd6414aa0591 not found: ID does not exist" containerID="94c2506b293dfb9320b5125e90948fadbd1858bdd3457fa4c836fd6414aa0591" Oct 01 16:34:15 crc kubenswrapper[4869]: I1001 16:34:15.855798 4869 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"94c2506b293dfb9320b5125e90948fadbd1858bdd3457fa4c836fd6414aa0591"} err="failed to get container status \"94c2506b293dfb9320b5125e90948fadbd1858bdd3457fa4c836fd6414aa0591\": rpc error: code = NotFound desc = could not find container \"94c2506b293dfb9320b5125e90948fadbd1858bdd3457fa4c836fd6414aa0591\": container with ID starting with 94c2506b293dfb9320b5125e90948fadbd1858bdd3457fa4c836fd6414aa0591 not found: ID does not exist" Oct 01 16:34:15 crc kubenswrapper[4869]: I1001 16:34:15.927336 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-nmstate/nmstate-console-plugin-864bb6dfb5-zcwb9"] Oct 01 16:34:15 crc kubenswrapper[4869]: I1001 16:34:15.952404 4869 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-nmstate/nmstate-console-plugin-864bb6dfb5-zcwb9"] Oct 01 16:34:16 crc kubenswrapper[4869]: E1001 16:34:16.108626 4869 container_log_manager.go:274] "Failed to get container status" err="rpc error: code = NotFound desc = could not find container \"94c2506b293dfb9320b5125e90948fadbd1858bdd3457fa4c836fd6414aa0591\": container with ID starting with 94c2506b293dfb9320b5125e90948fadbd1858bdd3457fa4c836fd6414aa0591 not found: ID does not exist" worker=1 containerID="94c2506b293dfb9320b5125e90948fadbd1858bdd3457fa4c836fd6414aa0591" Oct 01 16:34:17 crc kubenswrapper[4869]: I1001 16:34:17.594139 4869 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="58fa3f46-8eb7-4d4f-9548-37c56f012aba" path="/var/lib/kubelet/pods/58fa3f46-8eb7-4d4f-9548-37c56f012aba/volumes" Oct 01 16:34:18 crc kubenswrapper[4869]: I1001 16:34:18.843952 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-metrics-fdff9cb8d-n4x26" event={"ID":"77a31948-6e77-47cd-b110-fa1af1087629","Type":"ContainerStarted","Data":"359448ceeae9936d3cef7dd8bf6b6d1f5d9a156dc01f473c369bb702af1a8caf"} Oct 01 16:34:19 crc kubenswrapper[4869]: I1001 16:34:19.492560 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-nmstate/nmstate-handler-p56ll" Oct 01 16:34:19 crc kubenswrapper[4869]: I1001 16:34:19.515281 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-metrics-fdff9cb8d-n4x26" podStartSLOduration=3.40849675 podStartE2EDuration="10.515243058s" podCreationTimestamp="2025-10-01 16:34:09 +0000 UTC" firstStartedPulling="2025-10-01 16:34:10.656921081 +0000 UTC m=+5359.803764207" lastFinishedPulling="2025-10-01 16:34:17.763667399 +0000 UTC m=+5366.910510515" observedRunningTime="2025-10-01 16:34:18.867502047 +0000 UTC m=+5368.014345173" watchObservedRunningTime="2025-10-01 16:34:19.515243058 +0000 UTC m=+5368.662086174" Oct 01 16:34:20 crc kubenswrapper[4869]: I1001 16:34:20.785540 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/metallb-operator-controller-manager-fc84fcf8c-4ttdv"] Oct 01 16:34:20 crc kubenswrapper[4869]: E1001 16:34:20.786507 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="58fa3f46-8eb7-4d4f-9548-37c56f012aba" containerName="nmstate-console-plugin" Oct 01 16:34:20 crc kubenswrapper[4869]: I1001 16:34:20.786523 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="58fa3f46-8eb7-4d4f-9548-37c56f012aba" containerName="nmstate-console-plugin" Oct 01 16:34:20 crc kubenswrapper[4869]: I1001 16:34:20.786797 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="58fa3f46-8eb7-4d4f-9548-37c56f012aba" containerName="nmstate-console-plugin" Oct 01 16:34:20 crc kubenswrapper[4869]: I1001 16:34:20.787524 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/metallb-operator-controller-manager-fc84fcf8c-4ttdv" Oct 01 16:34:20 crc kubenswrapper[4869]: I1001 16:34:20.796552 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/metallb-operator-controller-manager-fc84fcf8c-4ttdv"] Oct 01 16:34:20 crc kubenswrapper[4869]: I1001 16:34:20.979312 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/1907f504-e5aa-4cdf-868f-1dbafcb47d83-apiservice-cert\") pod \"metallb-operator-controller-manager-fc84fcf8c-4ttdv\" (UID: \"1907f504-e5aa-4cdf-868f-1dbafcb47d83\") " pod="metallb-system/metallb-operator-controller-manager-fc84fcf8c-4ttdv" Oct 01 16:34:20 crc kubenswrapper[4869]: I1001 16:34:20.979426 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/1907f504-e5aa-4cdf-868f-1dbafcb47d83-webhook-cert\") pod \"metallb-operator-controller-manager-fc84fcf8c-4ttdv\" (UID: \"1907f504-e5aa-4cdf-868f-1dbafcb47d83\") " pod="metallb-system/metallb-operator-controller-manager-fc84fcf8c-4ttdv" Oct 01 16:34:20 crc kubenswrapper[4869]: I1001 16:34:20.979464 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5jz5l\" (UniqueName: \"kubernetes.io/projected/1907f504-e5aa-4cdf-868f-1dbafcb47d83-kube-api-access-5jz5l\") pod \"metallb-operator-controller-manager-fc84fcf8c-4ttdv\" (UID: \"1907f504-e5aa-4cdf-868f-1dbafcb47d83\") " pod="metallb-system/metallb-operator-controller-manager-fc84fcf8c-4ttdv" Oct 01 16:34:21 crc kubenswrapper[4869]: I1001 16:34:21.081246 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5jz5l\" (UniqueName: \"kubernetes.io/projected/1907f504-e5aa-4cdf-868f-1dbafcb47d83-kube-api-access-5jz5l\") pod \"metallb-operator-controller-manager-fc84fcf8c-4ttdv\" (UID: \"1907f504-e5aa-4cdf-868f-1dbafcb47d83\") " pod="metallb-system/metallb-operator-controller-manager-fc84fcf8c-4ttdv" Oct 01 16:34:21 crc kubenswrapper[4869]: I1001 16:34:21.081371 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/1907f504-e5aa-4cdf-868f-1dbafcb47d83-apiservice-cert\") pod \"metallb-operator-controller-manager-fc84fcf8c-4ttdv\" (UID: \"1907f504-e5aa-4cdf-868f-1dbafcb47d83\") " pod="metallb-system/metallb-operator-controller-manager-fc84fcf8c-4ttdv" Oct 01 16:34:21 crc kubenswrapper[4869]: I1001 16:34:21.081459 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/1907f504-e5aa-4cdf-868f-1dbafcb47d83-webhook-cert\") pod \"metallb-operator-controller-manager-fc84fcf8c-4ttdv\" (UID: \"1907f504-e5aa-4cdf-868f-1dbafcb47d83\") " pod="metallb-system/metallb-operator-controller-manager-fc84fcf8c-4ttdv" Oct 01 16:34:21 crc kubenswrapper[4869]: I1001 16:34:21.087698 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/1907f504-e5aa-4cdf-868f-1dbafcb47d83-apiservice-cert\") pod \"metallb-operator-controller-manager-fc84fcf8c-4ttdv\" (UID: \"1907f504-e5aa-4cdf-868f-1dbafcb47d83\") " pod="metallb-system/metallb-operator-controller-manager-fc84fcf8c-4ttdv" Oct 01 16:34:21 crc kubenswrapper[4869]: I1001 16:34:21.088010 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/1907f504-e5aa-4cdf-868f-1dbafcb47d83-webhook-cert\") pod \"metallb-operator-controller-manager-fc84fcf8c-4ttdv\" (UID: \"1907f504-e5aa-4cdf-868f-1dbafcb47d83\") " pod="metallb-system/metallb-operator-controller-manager-fc84fcf8c-4ttdv" Oct 01 16:34:21 crc kubenswrapper[4869]: I1001 16:34:21.098765 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5jz5l\" (UniqueName: \"kubernetes.io/projected/1907f504-e5aa-4cdf-868f-1dbafcb47d83-kube-api-access-5jz5l\") pod \"metallb-operator-controller-manager-fc84fcf8c-4ttdv\" (UID: \"1907f504-e5aa-4cdf-868f-1dbafcb47d83\") " pod="metallb-system/metallb-operator-controller-manager-fc84fcf8c-4ttdv" Oct 01 16:34:21 crc kubenswrapper[4869]: I1001 16:34:21.107595 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/metallb-operator-controller-manager-fc84fcf8c-4ttdv" Oct 01 16:34:21 crc kubenswrapper[4869]: I1001 16:34:21.137128 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/metallb-operator-webhook-server-d479f666c-w8xkz"] Oct 01 16:34:21 crc kubenswrapper[4869]: I1001 16:34:21.138785 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/metallb-operator-webhook-server-d479f666c-w8xkz" Oct 01 16:34:21 crc kubenswrapper[4869]: I1001 16:34:21.162244 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/metallb-operator-webhook-server-d479f666c-w8xkz"] Oct 01 16:34:21 crc kubenswrapper[4869]: I1001 16:34:21.286321 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/e66dca24-690f-49b3-96a4-f4376279f654-apiservice-cert\") pod \"metallb-operator-webhook-server-d479f666c-w8xkz\" (UID: \"e66dca24-690f-49b3-96a4-f4376279f654\") " pod="metallb-system/metallb-operator-webhook-server-d479f666c-w8xkz" Oct 01 16:34:21 crc kubenswrapper[4869]: I1001 16:34:21.287059 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qqmc9\" (UniqueName: \"kubernetes.io/projected/e66dca24-690f-49b3-96a4-f4376279f654-kube-api-access-qqmc9\") pod \"metallb-operator-webhook-server-d479f666c-w8xkz\" (UID: \"e66dca24-690f-49b3-96a4-f4376279f654\") " pod="metallb-system/metallb-operator-webhook-server-d479f666c-w8xkz" Oct 01 16:34:21 crc kubenswrapper[4869]: I1001 16:34:21.287120 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/e66dca24-690f-49b3-96a4-f4376279f654-webhook-cert\") pod \"metallb-operator-webhook-server-d479f666c-w8xkz\" (UID: \"e66dca24-690f-49b3-96a4-f4376279f654\") " pod="metallb-system/metallb-operator-webhook-server-d479f666c-w8xkz" Oct 01 16:34:21 crc kubenswrapper[4869]: I1001 16:34:21.390056 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/e66dca24-690f-49b3-96a4-f4376279f654-webhook-cert\") pod \"metallb-operator-webhook-server-d479f666c-w8xkz\" (UID: \"e66dca24-690f-49b3-96a4-f4376279f654\") " pod="metallb-system/metallb-operator-webhook-server-d479f666c-w8xkz" Oct 01 16:34:21 crc kubenswrapper[4869]: I1001 16:34:21.390280 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/e66dca24-690f-49b3-96a4-f4376279f654-apiservice-cert\") pod \"metallb-operator-webhook-server-d479f666c-w8xkz\" (UID: \"e66dca24-690f-49b3-96a4-f4376279f654\") " pod="metallb-system/metallb-operator-webhook-server-d479f666c-w8xkz" Oct 01 16:34:21 crc kubenswrapper[4869]: I1001 16:34:21.390347 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qqmc9\" (UniqueName: \"kubernetes.io/projected/e66dca24-690f-49b3-96a4-f4376279f654-kube-api-access-qqmc9\") pod \"metallb-operator-webhook-server-d479f666c-w8xkz\" (UID: \"e66dca24-690f-49b3-96a4-f4376279f654\") " pod="metallb-system/metallb-operator-webhook-server-d479f666c-w8xkz" Oct 01 16:34:21 crc kubenswrapper[4869]: I1001 16:34:21.402383 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/e66dca24-690f-49b3-96a4-f4376279f654-apiservice-cert\") pod \"metallb-operator-webhook-server-d479f666c-w8xkz\" (UID: \"e66dca24-690f-49b3-96a4-f4376279f654\") " pod="metallb-system/metallb-operator-webhook-server-d479f666c-w8xkz" Oct 01 16:34:21 crc kubenswrapper[4869]: I1001 16:34:21.402824 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/e66dca24-690f-49b3-96a4-f4376279f654-webhook-cert\") pod \"metallb-operator-webhook-server-d479f666c-w8xkz\" (UID: \"e66dca24-690f-49b3-96a4-f4376279f654\") " pod="metallb-system/metallb-operator-webhook-server-d479f666c-w8xkz" Oct 01 16:34:21 crc kubenswrapper[4869]: I1001 16:34:21.411897 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qqmc9\" (UniqueName: \"kubernetes.io/projected/e66dca24-690f-49b3-96a4-f4376279f654-kube-api-access-qqmc9\") pod \"metallb-operator-webhook-server-d479f666c-w8xkz\" (UID: \"e66dca24-690f-49b3-96a4-f4376279f654\") " pod="metallb-system/metallb-operator-webhook-server-d479f666c-w8xkz" Oct 01 16:34:21 crc kubenswrapper[4869]: I1001 16:34:21.542160 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/metallb-operator-webhook-server-d479f666c-w8xkz" Oct 01 16:34:21 crc kubenswrapper[4869]: I1001 16:34:21.657459 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/metallb-operator-controller-manager-fc84fcf8c-4ttdv"] Oct 01 16:34:21 crc kubenswrapper[4869]: W1001 16:34:21.662704 4869 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod1907f504_e5aa_4cdf_868f_1dbafcb47d83.slice/crio-19e8b25af2481a48186cd9a49d9a038303479aae8af5242aecc99c52d92d244e WatchSource:0}: Error finding container 19e8b25af2481a48186cd9a49d9a038303479aae8af5242aecc99c52d92d244e: Status 404 returned error can't find the container with id 19e8b25af2481a48186cd9a49d9a038303479aae8af5242aecc99c52d92d244e Oct 01 16:34:21 crc kubenswrapper[4869]: I1001 16:34:21.873805 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/metallb-operator-controller-manager-fc84fcf8c-4ttdv" event={"ID":"1907f504-e5aa-4cdf-868f-1dbafcb47d83","Type":"ContainerStarted","Data":"19e8b25af2481a48186cd9a49d9a038303479aae8af5242aecc99c52d92d244e"} Oct 01 16:34:22 crc kubenswrapper[4869]: I1001 16:34:22.122924 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/metallb-operator-webhook-server-d479f666c-w8xkz"] Oct 01 16:34:22 crc kubenswrapper[4869]: W1001 16:34:22.131989 4869 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pode66dca24_690f_49b3_96a4_f4376279f654.slice/crio-645c0fdc0a6fd5ec4893d5fe078b67e684557cd2ce5f679e5e6265d369fe5f81 WatchSource:0}: Error finding container 645c0fdc0a6fd5ec4893d5fe078b67e684557cd2ce5f679e5e6265d369fe5f81: Status 404 returned error can't find the container with id 645c0fdc0a6fd5ec4893d5fe078b67e684557cd2ce5f679e5e6265d369fe5f81 Oct 01 16:34:22 crc kubenswrapper[4869]: I1001 16:34:22.884814 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/metallb-operator-webhook-server-d479f666c-w8xkz" event={"ID":"e66dca24-690f-49b3-96a4-f4376279f654","Type":"ContainerStarted","Data":"645c0fdc0a6fd5ec4893d5fe078b67e684557cd2ce5f679e5e6265d369fe5f81"} Oct 01 16:34:29 crc kubenswrapper[4869]: I1001 16:34:29.914062 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-nmstate/nmstate-webhook-6cdbc54649-k6lhz" Oct 01 16:34:29 crc kubenswrapper[4869]: I1001 16:34:29.987840 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/metallb-operator-webhook-server-d479f666c-w8xkz" event={"ID":"e66dca24-690f-49b3-96a4-f4376279f654","Type":"ContainerStarted","Data":"039b232b84bada219fa975e014c7548941f0a6943821cff60b9b0b903263e17c"} Oct 01 16:34:29 crc kubenswrapper[4869]: I1001 16:34:29.987915 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/metallb-operator-webhook-server-d479f666c-w8xkz" Oct 01 16:34:29 crc kubenswrapper[4869]: I1001 16:34:29.989957 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/metallb-operator-controller-manager-fc84fcf8c-4ttdv" event={"ID":"1907f504-e5aa-4cdf-868f-1dbafcb47d83","Type":"ContainerStarted","Data":"74a2fc84c27e883887ebf93e386bf124f1e746f539f38510342185271b0acca1"} Oct 01 16:34:29 crc kubenswrapper[4869]: I1001 16:34:29.990060 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/metallb-operator-controller-manager-fc84fcf8c-4ttdv" Oct 01 16:34:30 crc kubenswrapper[4869]: I1001 16:34:30.012669 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/metallb-operator-webhook-server-d479f666c-w8xkz" podStartSLOduration=1.615771901 podStartE2EDuration="9.012649966s" podCreationTimestamp="2025-10-01 16:34:21 +0000 UTC" firstStartedPulling="2025-10-01 16:34:22.135692835 +0000 UTC m=+5371.282535951" lastFinishedPulling="2025-10-01 16:34:29.5325709 +0000 UTC m=+5378.679414016" observedRunningTime="2025-10-01 16:34:30.009505316 +0000 UTC m=+5379.156348492" watchObservedRunningTime="2025-10-01 16:34:30.012649966 +0000 UTC m=+5379.159493082" Oct 01 16:34:30 crc kubenswrapper[4869]: I1001 16:34:30.041119 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/metallb-operator-controller-manager-fc84fcf8c-4ttdv" podStartSLOduration=5.889590283 podStartE2EDuration="10.041098486s" podCreationTimestamp="2025-10-01 16:34:20 +0000 UTC" firstStartedPulling="2025-10-01 16:34:21.673514364 +0000 UTC m=+5370.820357480" lastFinishedPulling="2025-10-01 16:34:25.825022567 +0000 UTC m=+5374.971865683" observedRunningTime="2025-10-01 16:34:30.03454593 +0000 UTC m=+5379.181389046" watchObservedRunningTime="2025-10-01 16:34:30.041098486 +0000 UTC m=+5379.187941602" Oct 01 16:34:41 crc kubenswrapper[4869]: I1001 16:34:41.549997 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/metallb-operator-webhook-server-d479f666c-w8xkz" Oct 01 16:34:41 crc kubenswrapper[4869]: I1001 16:34:41.663454 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["metallb-system/metallb-operator-webhook-server-96958c6f-4zrjt"] Oct 01 16:34:41 crc kubenswrapper[4869]: I1001 16:34:41.663847 4869 kuberuntime_container.go:808] "Killing container with a grace period" pod="metallb-system/metallb-operator-webhook-server-96958c6f-4zrjt" podUID="112154b6-526a-4e35-b3de-f3b95835eb03" containerName="webhook-server" containerID="cri-o://23c4c66704dd1afe5db5d785eedbff6303b0bddf5a7ebf7326d38bcf84adbf5a" gracePeriod=2 Oct 01 16:34:41 crc kubenswrapper[4869]: I1001 16:34:41.687933 4869 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["metallb-system/metallb-operator-webhook-server-96958c6f-4zrjt"] Oct 01 16:34:42 crc kubenswrapper[4869]: I1001 16:34:42.138896 4869 generic.go:334] "Generic (PLEG): container finished" podID="112154b6-526a-4e35-b3de-f3b95835eb03" containerID="23c4c66704dd1afe5db5d785eedbff6303b0bddf5a7ebf7326d38bcf84adbf5a" exitCode=0 Oct 01 16:34:42 crc kubenswrapper[4869]: I1001 16:34:42.347132 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="metallb-system/metallb-operator-webhook-server-96958c6f-4zrjt" Oct 01 16:34:42 crc kubenswrapper[4869]: I1001 16:34:42.420993 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2rg6w\" (UniqueName: \"kubernetes.io/projected/112154b6-526a-4e35-b3de-f3b95835eb03-kube-api-access-2rg6w\") pod \"112154b6-526a-4e35-b3de-f3b95835eb03\" (UID: \"112154b6-526a-4e35-b3de-f3b95835eb03\") " Oct 01 16:34:42 crc kubenswrapper[4869]: I1001 16:34:42.421076 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/112154b6-526a-4e35-b3de-f3b95835eb03-apiservice-cert\") pod \"112154b6-526a-4e35-b3de-f3b95835eb03\" (UID: \"112154b6-526a-4e35-b3de-f3b95835eb03\") " Oct 01 16:34:42 crc kubenswrapper[4869]: I1001 16:34:42.421188 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/112154b6-526a-4e35-b3de-f3b95835eb03-webhook-cert\") pod \"112154b6-526a-4e35-b3de-f3b95835eb03\" (UID: \"112154b6-526a-4e35-b3de-f3b95835eb03\") " Oct 01 16:34:42 crc kubenswrapper[4869]: I1001 16:34:42.434427 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/112154b6-526a-4e35-b3de-f3b95835eb03-apiservice-cert" (OuterVolumeSpecName: "apiservice-cert") pod "112154b6-526a-4e35-b3de-f3b95835eb03" (UID: "112154b6-526a-4e35-b3de-f3b95835eb03"). InnerVolumeSpecName "apiservice-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 16:34:42 crc kubenswrapper[4869]: I1001 16:34:42.440202 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/112154b6-526a-4e35-b3de-f3b95835eb03-kube-api-access-2rg6w" (OuterVolumeSpecName: "kube-api-access-2rg6w") pod "112154b6-526a-4e35-b3de-f3b95835eb03" (UID: "112154b6-526a-4e35-b3de-f3b95835eb03"). InnerVolumeSpecName "kube-api-access-2rg6w". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 16:34:42 crc kubenswrapper[4869]: I1001 16:34:42.440405 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/112154b6-526a-4e35-b3de-f3b95835eb03-webhook-cert" (OuterVolumeSpecName: "webhook-cert") pod "112154b6-526a-4e35-b3de-f3b95835eb03" (UID: "112154b6-526a-4e35-b3de-f3b95835eb03"). InnerVolumeSpecName "webhook-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 16:34:42 crc kubenswrapper[4869]: I1001 16:34:42.524527 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2rg6w\" (UniqueName: \"kubernetes.io/projected/112154b6-526a-4e35-b3de-f3b95835eb03-kube-api-access-2rg6w\") on node \"crc\" DevicePath \"\"" Oct 01 16:34:42 crc kubenswrapper[4869]: I1001 16:34:42.524570 4869 reconciler_common.go:293] "Volume detached for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/112154b6-526a-4e35-b3de-f3b95835eb03-apiservice-cert\") on node \"crc\" DevicePath \"\"" Oct 01 16:34:42 crc kubenswrapper[4869]: I1001 16:34:42.524579 4869 reconciler_common.go:293] "Volume detached for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/112154b6-526a-4e35-b3de-f3b95835eb03-webhook-cert\") on node \"crc\" DevicePath \"\"" Oct 01 16:34:43 crc kubenswrapper[4869]: I1001 16:34:43.149826 4869 scope.go:117] "RemoveContainer" containerID="23c4c66704dd1afe5db5d785eedbff6303b0bddf5a7ebf7326d38bcf84adbf5a" Oct 01 16:34:43 crc kubenswrapper[4869]: I1001 16:34:43.149877 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="metallb-system/metallb-operator-webhook-server-96958c6f-4zrjt" Oct 01 16:34:43 crc kubenswrapper[4869]: I1001 16:34:43.354493 4869 patch_prober.go:28] interesting pod/machine-config-daemon-c86m8 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 01 16:34:43 crc kubenswrapper[4869]: I1001 16:34:43.354556 4869 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-c86m8" podUID="a4b64f7f-0b03-4f47-965b-9fde048b735c" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 01 16:34:43 crc kubenswrapper[4869]: I1001 16:34:43.593077 4869 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="112154b6-526a-4e35-b3de-f3b95835eb03" path="/var/lib/kubelet/pods/112154b6-526a-4e35-b3de-f3b95835eb03/volumes" Oct 01 16:35:01 crc kubenswrapper[4869]: I1001 16:35:01.111503 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/metallb-operator-controller-manager-fc84fcf8c-4ttdv" Oct 01 16:35:01 crc kubenswrapper[4869]: I1001 16:35:01.195135 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["metallb-system/metallb-operator-controller-manager-56886c7897-6kmth"] Oct 01 16:35:01 crc kubenswrapper[4869]: I1001 16:35:01.196269 4869 kuberuntime_container.go:808] "Killing container with a grace period" pod="metallb-system/metallb-operator-controller-manager-56886c7897-6kmth" podUID="2ad6ecdc-44a8-4c62-89e8-ca70878847a5" containerName="manager" containerID="cri-o://bf7560e390e8e04e0ca810a1f6f18104760366687a62624bb10bbb11f1b4ea4c" gracePeriod=10 Oct 01 16:35:01 crc kubenswrapper[4869]: I1001 16:35:01.943500 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="metallb-system/metallb-operator-controller-manager-56886c7897-6kmth" Oct 01 16:35:02 crc kubenswrapper[4869]: I1001 16:35:02.012066 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/2ad6ecdc-44a8-4c62-89e8-ca70878847a5-apiservice-cert\") pod \"2ad6ecdc-44a8-4c62-89e8-ca70878847a5\" (UID: \"2ad6ecdc-44a8-4c62-89e8-ca70878847a5\") " Oct 01 16:35:02 crc kubenswrapper[4869]: I1001 16:35:02.012221 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-h6lgt\" (UniqueName: \"kubernetes.io/projected/2ad6ecdc-44a8-4c62-89e8-ca70878847a5-kube-api-access-h6lgt\") pod \"2ad6ecdc-44a8-4c62-89e8-ca70878847a5\" (UID: \"2ad6ecdc-44a8-4c62-89e8-ca70878847a5\") " Oct 01 16:35:02 crc kubenswrapper[4869]: I1001 16:35:02.012268 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/2ad6ecdc-44a8-4c62-89e8-ca70878847a5-webhook-cert\") pod \"2ad6ecdc-44a8-4c62-89e8-ca70878847a5\" (UID: \"2ad6ecdc-44a8-4c62-89e8-ca70878847a5\") " Oct 01 16:35:02 crc kubenswrapper[4869]: I1001 16:35:02.018774 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2ad6ecdc-44a8-4c62-89e8-ca70878847a5-kube-api-access-h6lgt" (OuterVolumeSpecName: "kube-api-access-h6lgt") pod "2ad6ecdc-44a8-4c62-89e8-ca70878847a5" (UID: "2ad6ecdc-44a8-4c62-89e8-ca70878847a5"). InnerVolumeSpecName "kube-api-access-h6lgt". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 16:35:02 crc kubenswrapper[4869]: I1001 16:35:02.021233 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2ad6ecdc-44a8-4c62-89e8-ca70878847a5-apiservice-cert" (OuterVolumeSpecName: "apiservice-cert") pod "2ad6ecdc-44a8-4c62-89e8-ca70878847a5" (UID: "2ad6ecdc-44a8-4c62-89e8-ca70878847a5"). InnerVolumeSpecName "apiservice-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 16:35:02 crc kubenswrapper[4869]: I1001 16:35:02.023449 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2ad6ecdc-44a8-4c62-89e8-ca70878847a5-webhook-cert" (OuterVolumeSpecName: "webhook-cert") pod "2ad6ecdc-44a8-4c62-89e8-ca70878847a5" (UID: "2ad6ecdc-44a8-4c62-89e8-ca70878847a5"). InnerVolumeSpecName "webhook-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 16:35:02 crc kubenswrapper[4869]: I1001 16:35:02.115494 4869 reconciler_common.go:293] "Volume detached for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/2ad6ecdc-44a8-4c62-89e8-ca70878847a5-apiservice-cert\") on node \"crc\" DevicePath \"\"" Oct 01 16:35:02 crc kubenswrapper[4869]: I1001 16:35:02.115532 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-h6lgt\" (UniqueName: \"kubernetes.io/projected/2ad6ecdc-44a8-4c62-89e8-ca70878847a5-kube-api-access-h6lgt\") on node \"crc\" DevicePath \"\"" Oct 01 16:35:02 crc kubenswrapper[4869]: I1001 16:35:02.115546 4869 reconciler_common.go:293] "Volume detached for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/2ad6ecdc-44a8-4c62-89e8-ca70878847a5-webhook-cert\") on node \"crc\" DevicePath \"\"" Oct 01 16:35:02 crc kubenswrapper[4869]: I1001 16:35:02.349098 4869 generic.go:334] "Generic (PLEG): container finished" podID="2ad6ecdc-44a8-4c62-89e8-ca70878847a5" containerID="bf7560e390e8e04e0ca810a1f6f18104760366687a62624bb10bbb11f1b4ea4c" exitCode=0 Oct 01 16:35:02 crc kubenswrapper[4869]: I1001 16:35:02.349145 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/metallb-operator-controller-manager-56886c7897-6kmth" event={"ID":"2ad6ecdc-44a8-4c62-89e8-ca70878847a5","Type":"ContainerDied","Data":"bf7560e390e8e04e0ca810a1f6f18104760366687a62624bb10bbb11f1b4ea4c"} Oct 01 16:35:02 crc kubenswrapper[4869]: I1001 16:35:02.349170 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="metallb-system/metallb-operator-controller-manager-56886c7897-6kmth" Oct 01 16:35:02 crc kubenswrapper[4869]: I1001 16:35:02.349192 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/metallb-operator-controller-manager-56886c7897-6kmth" event={"ID":"2ad6ecdc-44a8-4c62-89e8-ca70878847a5","Type":"ContainerDied","Data":"310acf8bf9d554fa38cad5b7adeb6f587de06f3d3f8a9479056f026e75a6f2e7"} Oct 01 16:35:02 crc kubenswrapper[4869]: I1001 16:35:02.349214 4869 scope.go:117] "RemoveContainer" containerID="bf7560e390e8e04e0ca810a1f6f18104760366687a62624bb10bbb11f1b4ea4c" Oct 01 16:35:02 crc kubenswrapper[4869]: I1001 16:35:02.375559 4869 scope.go:117] "RemoveContainer" containerID="bf7560e390e8e04e0ca810a1f6f18104760366687a62624bb10bbb11f1b4ea4c" Oct 01 16:35:02 crc kubenswrapper[4869]: E1001 16:35:02.376006 4869 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"bf7560e390e8e04e0ca810a1f6f18104760366687a62624bb10bbb11f1b4ea4c\": container with ID starting with bf7560e390e8e04e0ca810a1f6f18104760366687a62624bb10bbb11f1b4ea4c not found: ID does not exist" containerID="bf7560e390e8e04e0ca810a1f6f18104760366687a62624bb10bbb11f1b4ea4c" Oct 01 16:35:02 crc kubenswrapper[4869]: I1001 16:35:02.376047 4869 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"bf7560e390e8e04e0ca810a1f6f18104760366687a62624bb10bbb11f1b4ea4c"} err="failed to get container status \"bf7560e390e8e04e0ca810a1f6f18104760366687a62624bb10bbb11f1b4ea4c\": rpc error: code = NotFound desc = could not find container \"bf7560e390e8e04e0ca810a1f6f18104760366687a62624bb10bbb11f1b4ea4c\": container with ID starting with bf7560e390e8e04e0ca810a1f6f18104760366687a62624bb10bbb11f1b4ea4c not found: ID does not exist" Oct 01 16:35:02 crc kubenswrapper[4869]: I1001 16:35:02.385071 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["metallb-system/metallb-operator-controller-manager-56886c7897-6kmth"] Oct 01 16:35:02 crc kubenswrapper[4869]: I1001 16:35:02.393217 4869 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["metallb-system/metallb-operator-controller-manager-56886c7897-6kmth"] Oct 01 16:35:03 crc kubenswrapper[4869]: I1001 16:35:03.592963 4869 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2ad6ecdc-44a8-4c62-89e8-ca70878847a5" path="/var/lib/kubelet/pods/2ad6ecdc-44a8-4c62-89e8-ca70878847a5/volumes" Oct 01 16:35:09 crc kubenswrapper[4869]: I1001 16:35:09.814994 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/metallb-operator-controller-manager-ddc944bf4-hrp74"] Oct 01 16:35:09 crc kubenswrapper[4869]: E1001 16:35:09.815789 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2ad6ecdc-44a8-4c62-89e8-ca70878847a5" containerName="manager" Oct 01 16:35:09 crc kubenswrapper[4869]: I1001 16:35:09.815801 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="2ad6ecdc-44a8-4c62-89e8-ca70878847a5" containerName="manager" Oct 01 16:35:09 crc kubenswrapper[4869]: E1001 16:35:09.815824 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="112154b6-526a-4e35-b3de-f3b95835eb03" containerName="webhook-server" Oct 01 16:35:09 crc kubenswrapper[4869]: I1001 16:35:09.815829 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="112154b6-526a-4e35-b3de-f3b95835eb03" containerName="webhook-server" Oct 01 16:35:09 crc kubenswrapper[4869]: I1001 16:35:09.816019 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="112154b6-526a-4e35-b3de-f3b95835eb03" containerName="webhook-server" Oct 01 16:35:09 crc kubenswrapper[4869]: I1001 16:35:09.816035 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="2ad6ecdc-44a8-4c62-89e8-ca70878847a5" containerName="manager" Oct 01 16:35:09 crc kubenswrapper[4869]: I1001 16:35:09.816672 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/metallb-operator-controller-manager-ddc944bf4-hrp74" Oct 01 16:35:09 crc kubenswrapper[4869]: I1001 16:35:09.843836 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/metallb-operator-controller-manager-ddc944bf4-hrp74"] Oct 01 16:35:09 crc kubenswrapper[4869]: I1001 16:35:09.900461 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/77b7849a-6b82-4c2c-a23b-f5dd31c16a9f-webhook-cert\") pod \"metallb-operator-controller-manager-ddc944bf4-hrp74\" (UID: \"77b7849a-6b82-4c2c-a23b-f5dd31c16a9f\") " pod="metallb-system/metallb-operator-controller-manager-ddc944bf4-hrp74" Oct 01 16:35:09 crc kubenswrapper[4869]: I1001 16:35:09.900544 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/77b7849a-6b82-4c2c-a23b-f5dd31c16a9f-apiservice-cert\") pod \"metallb-operator-controller-manager-ddc944bf4-hrp74\" (UID: \"77b7849a-6b82-4c2c-a23b-f5dd31c16a9f\") " pod="metallb-system/metallb-operator-controller-manager-ddc944bf4-hrp74" Oct 01 16:35:09 crc kubenswrapper[4869]: I1001 16:35:09.900583 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qz5kn\" (UniqueName: \"kubernetes.io/projected/77b7849a-6b82-4c2c-a23b-f5dd31c16a9f-kube-api-access-qz5kn\") pod \"metallb-operator-controller-manager-ddc944bf4-hrp74\" (UID: \"77b7849a-6b82-4c2c-a23b-f5dd31c16a9f\") " pod="metallb-system/metallb-operator-controller-manager-ddc944bf4-hrp74" Oct 01 16:35:10 crc kubenswrapper[4869]: I1001 16:35:10.002758 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/77b7849a-6b82-4c2c-a23b-f5dd31c16a9f-webhook-cert\") pod \"metallb-operator-controller-manager-ddc944bf4-hrp74\" (UID: \"77b7849a-6b82-4c2c-a23b-f5dd31c16a9f\") " pod="metallb-system/metallb-operator-controller-manager-ddc944bf4-hrp74" Oct 01 16:35:10 crc kubenswrapper[4869]: I1001 16:35:10.002834 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/77b7849a-6b82-4c2c-a23b-f5dd31c16a9f-apiservice-cert\") pod \"metallb-operator-controller-manager-ddc944bf4-hrp74\" (UID: \"77b7849a-6b82-4c2c-a23b-f5dd31c16a9f\") " pod="metallb-system/metallb-operator-controller-manager-ddc944bf4-hrp74" Oct 01 16:35:10 crc kubenswrapper[4869]: I1001 16:35:10.002882 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qz5kn\" (UniqueName: \"kubernetes.io/projected/77b7849a-6b82-4c2c-a23b-f5dd31c16a9f-kube-api-access-qz5kn\") pod \"metallb-operator-controller-manager-ddc944bf4-hrp74\" (UID: \"77b7849a-6b82-4c2c-a23b-f5dd31c16a9f\") " pod="metallb-system/metallb-operator-controller-manager-ddc944bf4-hrp74" Oct 01 16:35:10 crc kubenswrapper[4869]: I1001 16:35:10.008786 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/77b7849a-6b82-4c2c-a23b-f5dd31c16a9f-apiservice-cert\") pod \"metallb-operator-controller-manager-ddc944bf4-hrp74\" (UID: \"77b7849a-6b82-4c2c-a23b-f5dd31c16a9f\") " pod="metallb-system/metallb-operator-controller-manager-ddc944bf4-hrp74" Oct 01 16:35:10 crc kubenswrapper[4869]: I1001 16:35:10.011951 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/77b7849a-6b82-4c2c-a23b-f5dd31c16a9f-webhook-cert\") pod \"metallb-operator-controller-manager-ddc944bf4-hrp74\" (UID: \"77b7849a-6b82-4c2c-a23b-f5dd31c16a9f\") " pod="metallb-system/metallb-operator-controller-manager-ddc944bf4-hrp74" Oct 01 16:35:10 crc kubenswrapper[4869]: I1001 16:35:10.029696 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qz5kn\" (UniqueName: \"kubernetes.io/projected/77b7849a-6b82-4c2c-a23b-f5dd31c16a9f-kube-api-access-qz5kn\") pod \"metallb-operator-controller-manager-ddc944bf4-hrp74\" (UID: \"77b7849a-6b82-4c2c-a23b-f5dd31c16a9f\") " pod="metallb-system/metallb-operator-controller-manager-ddc944bf4-hrp74" Oct 01 16:35:10 crc kubenswrapper[4869]: I1001 16:35:10.133495 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/metallb-operator-controller-manager-ddc944bf4-hrp74" Oct 01 16:35:10 crc kubenswrapper[4869]: I1001 16:35:10.145826 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/metallb-operator-webhook-server-869db94fcd-5ccjx"] Oct 01 16:35:10 crc kubenswrapper[4869]: I1001 16:35:10.147386 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/metallb-operator-webhook-server-869db94fcd-5ccjx" Oct 01 16:35:10 crc kubenswrapper[4869]: I1001 16:35:10.159619 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/metallb-operator-webhook-server-869db94fcd-5ccjx"] Oct 01 16:35:10 crc kubenswrapper[4869]: I1001 16:35:10.210993 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/cff5eb0a-37a8-473e-94b2-384c18f64054-webhook-cert\") pod \"metallb-operator-webhook-server-869db94fcd-5ccjx\" (UID: \"cff5eb0a-37a8-473e-94b2-384c18f64054\") " pod="metallb-system/metallb-operator-webhook-server-869db94fcd-5ccjx" Oct 01 16:35:10 crc kubenswrapper[4869]: I1001 16:35:10.211382 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/cff5eb0a-37a8-473e-94b2-384c18f64054-apiservice-cert\") pod \"metallb-operator-webhook-server-869db94fcd-5ccjx\" (UID: \"cff5eb0a-37a8-473e-94b2-384c18f64054\") " pod="metallb-system/metallb-operator-webhook-server-869db94fcd-5ccjx" Oct 01 16:35:10 crc kubenswrapper[4869]: I1001 16:35:10.211441 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vrd7c\" (UniqueName: \"kubernetes.io/projected/cff5eb0a-37a8-473e-94b2-384c18f64054-kube-api-access-vrd7c\") pod \"metallb-operator-webhook-server-869db94fcd-5ccjx\" (UID: \"cff5eb0a-37a8-473e-94b2-384c18f64054\") " pod="metallb-system/metallb-operator-webhook-server-869db94fcd-5ccjx" Oct 01 16:35:10 crc kubenswrapper[4869]: I1001 16:35:10.320612 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/cff5eb0a-37a8-473e-94b2-384c18f64054-webhook-cert\") pod \"metallb-operator-webhook-server-869db94fcd-5ccjx\" (UID: \"cff5eb0a-37a8-473e-94b2-384c18f64054\") " pod="metallb-system/metallb-operator-webhook-server-869db94fcd-5ccjx" Oct 01 16:35:10 crc kubenswrapper[4869]: I1001 16:35:10.320712 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/cff5eb0a-37a8-473e-94b2-384c18f64054-apiservice-cert\") pod \"metallb-operator-webhook-server-869db94fcd-5ccjx\" (UID: \"cff5eb0a-37a8-473e-94b2-384c18f64054\") " pod="metallb-system/metallb-operator-webhook-server-869db94fcd-5ccjx" Oct 01 16:35:10 crc kubenswrapper[4869]: I1001 16:35:10.320820 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vrd7c\" (UniqueName: \"kubernetes.io/projected/cff5eb0a-37a8-473e-94b2-384c18f64054-kube-api-access-vrd7c\") pod \"metallb-operator-webhook-server-869db94fcd-5ccjx\" (UID: \"cff5eb0a-37a8-473e-94b2-384c18f64054\") " pod="metallb-system/metallb-operator-webhook-server-869db94fcd-5ccjx" Oct 01 16:35:10 crc kubenswrapper[4869]: I1001 16:35:10.329786 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/cff5eb0a-37a8-473e-94b2-384c18f64054-webhook-cert\") pod \"metallb-operator-webhook-server-869db94fcd-5ccjx\" (UID: \"cff5eb0a-37a8-473e-94b2-384c18f64054\") " pod="metallb-system/metallb-operator-webhook-server-869db94fcd-5ccjx" Oct 01 16:35:10 crc kubenswrapper[4869]: I1001 16:35:10.334751 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/cff5eb0a-37a8-473e-94b2-384c18f64054-apiservice-cert\") pod \"metallb-operator-webhook-server-869db94fcd-5ccjx\" (UID: \"cff5eb0a-37a8-473e-94b2-384c18f64054\") " pod="metallb-system/metallb-operator-webhook-server-869db94fcd-5ccjx" Oct 01 16:35:10 crc kubenswrapper[4869]: I1001 16:35:10.342121 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vrd7c\" (UniqueName: \"kubernetes.io/projected/cff5eb0a-37a8-473e-94b2-384c18f64054-kube-api-access-vrd7c\") pod \"metallb-operator-webhook-server-869db94fcd-5ccjx\" (UID: \"cff5eb0a-37a8-473e-94b2-384c18f64054\") " pod="metallb-system/metallb-operator-webhook-server-869db94fcd-5ccjx" Oct 01 16:35:10 crc kubenswrapper[4869]: I1001 16:35:10.467583 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/metallb-operator-webhook-server-869db94fcd-5ccjx" Oct 01 16:35:10 crc kubenswrapper[4869]: W1001 16:35:10.700998 4869 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod77b7849a_6b82_4c2c_a23b_f5dd31c16a9f.slice/crio-0c2a8ae9bceb21e3535ce181d52df6bd8fc8e6b856bd542211524ee9a4d890e6 WatchSource:0}: Error finding container 0c2a8ae9bceb21e3535ce181d52df6bd8fc8e6b856bd542211524ee9a4d890e6: Status 404 returned error can't find the container with id 0c2a8ae9bceb21e3535ce181d52df6bd8fc8e6b856bd542211524ee9a4d890e6 Oct 01 16:35:10 crc kubenswrapper[4869]: I1001 16:35:10.701360 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/metallb-operator-controller-manager-ddc944bf4-hrp74"] Oct 01 16:35:10 crc kubenswrapper[4869]: W1001 16:35:10.987399 4869 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podcff5eb0a_37a8_473e_94b2_384c18f64054.slice/crio-24bf792ab04e5b48671edc5816d4d4ebc934be709f098d60fed57e469aafad28 WatchSource:0}: Error finding container 24bf792ab04e5b48671edc5816d4d4ebc934be709f098d60fed57e469aafad28: Status 404 returned error can't find the container with id 24bf792ab04e5b48671edc5816d4d4ebc934be709f098d60fed57e469aafad28 Oct 01 16:35:10 crc kubenswrapper[4869]: I1001 16:35:10.992873 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/metallb-operator-webhook-server-869db94fcd-5ccjx"] Oct 01 16:35:11 crc kubenswrapper[4869]: I1001 16:35:11.449995 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/metallb-operator-webhook-server-869db94fcd-5ccjx" event={"ID":"cff5eb0a-37a8-473e-94b2-384c18f64054","Type":"ContainerStarted","Data":"a5eacc5bd37e62532fd02789e2f41bc94990d888333a528a9a753e44b3a79b1d"} Oct 01 16:35:11 crc kubenswrapper[4869]: I1001 16:35:11.450357 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/metallb-operator-webhook-server-869db94fcd-5ccjx" Oct 01 16:35:11 crc kubenswrapper[4869]: I1001 16:35:11.450372 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/metallb-operator-webhook-server-869db94fcd-5ccjx" event={"ID":"cff5eb0a-37a8-473e-94b2-384c18f64054","Type":"ContainerStarted","Data":"24bf792ab04e5b48671edc5816d4d4ebc934be709f098d60fed57e469aafad28"} Oct 01 16:35:11 crc kubenswrapper[4869]: I1001 16:35:11.452198 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/metallb-operator-controller-manager-ddc944bf4-hrp74" event={"ID":"77b7849a-6b82-4c2c-a23b-f5dd31c16a9f","Type":"ContainerStarted","Data":"c76219e784fb0f7afdc89256e131ec35fe4e56bacece214a75a2aab9f0af9712"} Oct 01 16:35:11 crc kubenswrapper[4869]: I1001 16:35:11.452250 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/metallb-operator-controller-manager-ddc944bf4-hrp74" event={"ID":"77b7849a-6b82-4c2c-a23b-f5dd31c16a9f","Type":"ContainerStarted","Data":"0c2a8ae9bceb21e3535ce181d52df6bd8fc8e6b856bd542211524ee9a4d890e6"} Oct 01 16:35:11 crc kubenswrapper[4869]: I1001 16:35:11.452313 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/metallb-operator-controller-manager-ddc944bf4-hrp74" Oct 01 16:35:11 crc kubenswrapper[4869]: I1001 16:35:11.485124 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/metallb-operator-webhook-server-869db94fcd-5ccjx" podStartSLOduration=1.485102656 podStartE2EDuration="1.485102656s" podCreationTimestamp="2025-10-01 16:35:10 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-01 16:35:11.471444451 +0000 UTC m=+5420.618287577" watchObservedRunningTime="2025-10-01 16:35:11.485102656 +0000 UTC m=+5420.631945782" Oct 01 16:35:11 crc kubenswrapper[4869]: I1001 16:35:11.502328 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/metallb-operator-controller-manager-ddc944bf4-hrp74" podStartSLOduration=2.502305882 podStartE2EDuration="2.502305882s" podCreationTimestamp="2025-10-01 16:35:09 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-01 16:35:11.497679155 +0000 UTC m=+5420.644522281" watchObservedRunningTime="2025-10-01 16:35:11.502305882 +0000 UTC m=+5420.649149008" Oct 01 16:35:13 crc kubenswrapper[4869]: I1001 16:35:13.354220 4869 patch_prober.go:28] interesting pod/machine-config-daemon-c86m8 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 01 16:35:13 crc kubenswrapper[4869]: I1001 16:35:13.354701 4869 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-c86m8" podUID="a4b64f7f-0b03-4f47-965b-9fde048b735c" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 01 16:35:19 crc kubenswrapper[4869]: I1001 16:35:19.343783 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["metallb-system/frr-k8s-vrp9g"] Oct 01 16:35:19 crc kubenswrapper[4869]: I1001 16:35:19.344849 4869 kuberuntime_container.go:808] "Killing container with a grace period" pod="metallb-system/frr-k8s-vrp9g" podUID="38150466-9f4a-4ae1-a5b5-bfca202b829f" containerName="reloader" containerID="cri-o://b4637c37a19c42553fdba44eab27cdf16f2371202c7d1aa5006cea06065a1d37" gracePeriod=2 Oct 01 16:35:19 crc kubenswrapper[4869]: I1001 16:35:19.344906 4869 kuberuntime_container.go:808] "Killing container with a grace period" pod="metallb-system/frr-k8s-vrp9g" podUID="38150466-9f4a-4ae1-a5b5-bfca202b829f" containerName="frr-metrics" containerID="cri-o://d47376f0b0900e8d989fb702b9a922fb8d94fbadd197b208d10756adf7d9f781" gracePeriod=2 Oct 01 16:35:19 crc kubenswrapper[4869]: I1001 16:35:19.344943 4869 kuberuntime_container.go:808] "Killing container with a grace period" pod="metallb-system/frr-k8s-vrp9g" podUID="38150466-9f4a-4ae1-a5b5-bfca202b829f" containerName="kube-rbac-proxy-frr" containerID="cri-o://69b77abd8f39d740a581372658860145f35f34a8eacd6dccecd5de6817c73d1b" gracePeriod=2 Oct 01 16:35:19 crc kubenswrapper[4869]: I1001 16:35:19.344890 4869 kuberuntime_container.go:808] "Killing container with a grace period" pod="metallb-system/frr-k8s-vrp9g" podUID="38150466-9f4a-4ae1-a5b5-bfca202b829f" containerName="kube-rbac-proxy" containerID="cri-o://18fca2b1d4ce900b868746591ae6a63b53c11f8873e7f20c13e90a1ba65b66ff" gracePeriod=2 Oct 01 16:35:19 crc kubenswrapper[4869]: I1001 16:35:19.344810 4869 kuberuntime_container.go:808] "Killing container with a grace period" pod="metallb-system/frr-k8s-vrp9g" podUID="38150466-9f4a-4ae1-a5b5-bfca202b829f" containerName="controller" containerID="cri-o://7ea00b63fc78e883cd02ad429492496e6b43bbca90d812745be1b98ea1507b04" gracePeriod=2 Oct 01 16:35:19 crc kubenswrapper[4869]: I1001 16:35:19.344877 4869 kuberuntime_container.go:808] "Killing container with a grace period" pod="metallb-system/frr-k8s-vrp9g" podUID="38150466-9f4a-4ae1-a5b5-bfca202b829f" containerName="frr" containerID="cri-o://7183574049d68257b43b0d7565c6d99799de35132e86d9a4f83fcd75701bb77c" gracePeriod=2 Oct 01 16:35:19 crc kubenswrapper[4869]: I1001 16:35:19.360975 4869 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["metallb-system/frr-k8s-vrp9g"] Oct 01 16:35:19 crc kubenswrapper[4869]: I1001 16:35:19.387911 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/frr-k8s-webhook-server-64bf5d555-w6xl4"] Oct 01 16:35:19 crc kubenswrapper[4869]: E1001 16:35:19.388610 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="38150466-9f4a-4ae1-a5b5-bfca202b829f" containerName="kube-rbac-proxy" Oct 01 16:35:19 crc kubenswrapper[4869]: I1001 16:35:19.388632 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="38150466-9f4a-4ae1-a5b5-bfca202b829f" containerName="kube-rbac-proxy" Oct 01 16:35:19 crc kubenswrapper[4869]: E1001 16:35:19.388671 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="38150466-9f4a-4ae1-a5b5-bfca202b829f" containerName="cp-frr-files" Oct 01 16:35:19 crc kubenswrapper[4869]: I1001 16:35:19.388679 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="38150466-9f4a-4ae1-a5b5-bfca202b829f" containerName="cp-frr-files" Oct 01 16:35:19 crc kubenswrapper[4869]: E1001 16:35:19.388696 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="38150466-9f4a-4ae1-a5b5-bfca202b829f" containerName="reloader" Oct 01 16:35:19 crc kubenswrapper[4869]: I1001 16:35:19.388702 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="38150466-9f4a-4ae1-a5b5-bfca202b829f" containerName="reloader" Oct 01 16:35:19 crc kubenswrapper[4869]: E1001 16:35:19.388717 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="38150466-9f4a-4ae1-a5b5-bfca202b829f" containerName="cp-metrics" Oct 01 16:35:19 crc kubenswrapper[4869]: I1001 16:35:19.388724 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="38150466-9f4a-4ae1-a5b5-bfca202b829f" containerName="cp-metrics" Oct 01 16:35:19 crc kubenswrapper[4869]: E1001 16:35:19.388757 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="38150466-9f4a-4ae1-a5b5-bfca202b829f" containerName="cp-reloader" Oct 01 16:35:19 crc kubenswrapper[4869]: I1001 16:35:19.388764 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="38150466-9f4a-4ae1-a5b5-bfca202b829f" containerName="cp-reloader" Oct 01 16:35:19 crc kubenswrapper[4869]: E1001 16:35:19.388772 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="38150466-9f4a-4ae1-a5b5-bfca202b829f" containerName="frr" Oct 01 16:35:19 crc kubenswrapper[4869]: I1001 16:35:19.388779 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="38150466-9f4a-4ae1-a5b5-bfca202b829f" containerName="frr" Oct 01 16:35:19 crc kubenswrapper[4869]: E1001 16:35:19.388789 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="38150466-9f4a-4ae1-a5b5-bfca202b829f" containerName="controller" Oct 01 16:35:19 crc kubenswrapper[4869]: I1001 16:35:19.388796 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="38150466-9f4a-4ae1-a5b5-bfca202b829f" containerName="controller" Oct 01 16:35:19 crc kubenswrapper[4869]: E1001 16:35:19.388840 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="38150466-9f4a-4ae1-a5b5-bfca202b829f" containerName="frr-metrics" Oct 01 16:35:19 crc kubenswrapper[4869]: I1001 16:35:19.388848 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="38150466-9f4a-4ae1-a5b5-bfca202b829f" containerName="frr-metrics" Oct 01 16:35:19 crc kubenswrapper[4869]: E1001 16:35:19.388859 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="38150466-9f4a-4ae1-a5b5-bfca202b829f" containerName="kube-rbac-proxy-frr" Oct 01 16:35:19 crc kubenswrapper[4869]: I1001 16:35:19.388867 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="38150466-9f4a-4ae1-a5b5-bfca202b829f" containerName="kube-rbac-proxy-frr" Oct 01 16:35:19 crc kubenswrapper[4869]: I1001 16:35:19.389148 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="38150466-9f4a-4ae1-a5b5-bfca202b829f" containerName="kube-rbac-proxy-frr" Oct 01 16:35:19 crc kubenswrapper[4869]: I1001 16:35:19.389176 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="38150466-9f4a-4ae1-a5b5-bfca202b829f" containerName="frr-metrics" Oct 01 16:35:19 crc kubenswrapper[4869]: I1001 16:35:19.389188 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="38150466-9f4a-4ae1-a5b5-bfca202b829f" containerName="reloader" Oct 01 16:35:19 crc kubenswrapper[4869]: I1001 16:35:19.389216 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="38150466-9f4a-4ae1-a5b5-bfca202b829f" containerName="kube-rbac-proxy" Oct 01 16:35:19 crc kubenswrapper[4869]: I1001 16:35:19.389231 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="38150466-9f4a-4ae1-a5b5-bfca202b829f" containerName="frr" Oct 01 16:35:19 crc kubenswrapper[4869]: I1001 16:35:19.389247 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="38150466-9f4a-4ae1-a5b5-bfca202b829f" containerName="controller" Oct 01 16:35:19 crc kubenswrapper[4869]: I1001 16:35:19.390278 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/frr-k8s-webhook-server-64bf5d555-w6xl4" Oct 01 16:35:19 crc kubenswrapper[4869]: I1001 16:35:19.400113 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/frr-k8s-webhook-server-64bf5d555-w6xl4"] Oct 01 16:35:19 crc kubenswrapper[4869]: I1001 16:35:19.412857 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/frr-k8s-8795b"] Oct 01 16:35:19 crc kubenswrapper[4869]: I1001 16:35:19.416864 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/frr-k8s-8795b" Oct 01 16:35:19 crc kubenswrapper[4869]: I1001 16:35:19.487483 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["metallb-system/speaker-kntp6"] Oct 01 16:35:19 crc kubenswrapper[4869]: I1001 16:35:19.511712 4869 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["metallb-system/speaker-kntp6"] Oct 01 16:35:19 crc kubenswrapper[4869]: I1001 16:35:19.512045 4869 kuberuntime_container.go:808] "Killing container with a grace period" pod="metallb-system/speaker-kntp6" podUID="74ce9afe-754c-4610-90f1-a9c42b2cd395" containerName="speaker" containerID="cri-o://48e854ae42ed5b5b19b9f5df29806b8cbdf94b2e1afe36b217cdc8ffa92eaa7e" gracePeriod=2 Oct 01 16:35:19 crc kubenswrapper[4869]: I1001 16:35:19.512297 4869 kuberuntime_container.go:808] "Killing container with a grace period" pod="metallb-system/speaker-kntp6" podUID="74ce9afe-754c-4610-90f1-a9c42b2cd395" containerName="kube-rbac-proxy" containerID="cri-o://97a0af7433489a0858c912899c72c07ca77fd506d778954fd6dd112e4f076228" gracePeriod=2 Oct 01 16:35:19 crc kubenswrapper[4869]: I1001 16:35:19.516514 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"reloader\" (UniqueName: \"kubernetes.io/empty-dir/e748d49d-c9ae-445f-87d7-311d7ef79b37-reloader\") pod \"frr-k8s-8795b\" (UID: \"e748d49d-c9ae-445f-87d7-311d7ef79b37\") " pod="metallb-system/frr-k8s-8795b" Oct 01 16:35:19 crc kubenswrapper[4869]: I1001 16:35:19.516575 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"frr-startup\" (UniqueName: \"kubernetes.io/configmap/e748d49d-c9ae-445f-87d7-311d7ef79b37-frr-startup\") pod \"frr-k8s-8795b\" (UID: \"e748d49d-c9ae-445f-87d7-311d7ef79b37\") " pod="metallb-system/frr-k8s-8795b" Oct 01 16:35:19 crc kubenswrapper[4869]: I1001 16:35:19.516625 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/517c59e1-4387-4182-8db2-374a1dd516e6-cert\") pod \"frr-k8s-webhook-server-64bf5d555-w6xl4\" (UID: \"517c59e1-4387-4182-8db2-374a1dd516e6\") " pod="metallb-system/frr-k8s-webhook-server-64bf5d555-w6xl4" Oct 01 16:35:19 crc kubenswrapper[4869]: I1001 16:35:19.516711 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-znrrx\" (UniqueName: \"kubernetes.io/projected/e748d49d-c9ae-445f-87d7-311d7ef79b37-kube-api-access-znrrx\") pod \"frr-k8s-8795b\" (UID: \"e748d49d-c9ae-445f-87d7-311d7ef79b37\") " pod="metallb-system/frr-k8s-8795b" Oct 01 16:35:19 crc kubenswrapper[4869]: I1001 16:35:19.516752 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-m5mgs\" (UniqueName: \"kubernetes.io/projected/517c59e1-4387-4182-8db2-374a1dd516e6-kube-api-access-m5mgs\") pod \"frr-k8s-webhook-server-64bf5d555-w6xl4\" (UID: \"517c59e1-4387-4182-8db2-374a1dd516e6\") " pod="metallb-system/frr-k8s-webhook-server-64bf5d555-w6xl4" Oct 01 16:35:19 crc kubenswrapper[4869]: I1001 16:35:19.516881 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/e748d49d-c9ae-445f-87d7-311d7ef79b37-metrics-certs\") pod \"frr-k8s-8795b\" (UID: \"e748d49d-c9ae-445f-87d7-311d7ef79b37\") " pod="metallb-system/frr-k8s-8795b" Oct 01 16:35:19 crc kubenswrapper[4869]: I1001 16:35:19.516947 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"frr-sockets\" (UniqueName: \"kubernetes.io/empty-dir/e748d49d-c9ae-445f-87d7-311d7ef79b37-frr-sockets\") pod \"frr-k8s-8795b\" (UID: \"e748d49d-c9ae-445f-87d7-311d7ef79b37\") " pod="metallb-system/frr-k8s-8795b" Oct 01 16:35:19 crc kubenswrapper[4869]: I1001 16:35:19.516974 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"frr-conf\" (UniqueName: \"kubernetes.io/empty-dir/e748d49d-c9ae-445f-87d7-311d7ef79b37-frr-conf\") pod \"frr-k8s-8795b\" (UID: \"e748d49d-c9ae-445f-87d7-311d7ef79b37\") " pod="metallb-system/frr-k8s-8795b" Oct 01 16:35:19 crc kubenswrapper[4869]: I1001 16:35:19.516997 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics\" (UniqueName: \"kubernetes.io/empty-dir/e748d49d-c9ae-445f-87d7-311d7ef79b37-metrics\") pod \"frr-k8s-8795b\" (UID: \"e748d49d-c9ae-445f-87d7-311d7ef79b37\") " pod="metallb-system/frr-k8s-8795b" Oct 01 16:35:19 crc kubenswrapper[4869]: I1001 16:35:19.536867 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/speaker-6gmkw"] Oct 01 16:35:19 crc kubenswrapper[4869]: E1001 16:35:19.537700 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="74ce9afe-754c-4610-90f1-a9c42b2cd395" containerName="speaker" Oct 01 16:35:19 crc kubenswrapper[4869]: I1001 16:35:19.537718 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="74ce9afe-754c-4610-90f1-a9c42b2cd395" containerName="speaker" Oct 01 16:35:19 crc kubenswrapper[4869]: E1001 16:35:19.537745 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="74ce9afe-754c-4610-90f1-a9c42b2cd395" containerName="kube-rbac-proxy" Oct 01 16:35:19 crc kubenswrapper[4869]: I1001 16:35:19.537754 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="74ce9afe-754c-4610-90f1-a9c42b2cd395" containerName="kube-rbac-proxy" Oct 01 16:35:19 crc kubenswrapper[4869]: I1001 16:35:19.537984 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="74ce9afe-754c-4610-90f1-a9c42b2cd395" containerName="speaker" Oct 01 16:35:19 crc kubenswrapper[4869]: I1001 16:35:19.538014 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="74ce9afe-754c-4610-90f1-a9c42b2cd395" containerName="kube-rbac-proxy" Oct 01 16:35:19 crc kubenswrapper[4869]: I1001 16:35:19.539237 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/speaker-6gmkw" Oct 01 16:35:19 crc kubenswrapper[4869]: I1001 16:35:19.624725 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"reloader\" (UniqueName: \"kubernetes.io/empty-dir/e748d49d-c9ae-445f-87d7-311d7ef79b37-reloader\") pod \"frr-k8s-8795b\" (UID: \"e748d49d-c9ae-445f-87d7-311d7ef79b37\") " pod="metallb-system/frr-k8s-8795b" Oct 01 16:35:19 crc kubenswrapper[4869]: I1001 16:35:19.624774 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"frr-startup\" (UniqueName: \"kubernetes.io/configmap/e748d49d-c9ae-445f-87d7-311d7ef79b37-frr-startup\") pod \"frr-k8s-8795b\" (UID: \"e748d49d-c9ae-445f-87d7-311d7ef79b37\") " pod="metallb-system/frr-k8s-8795b" Oct 01 16:35:19 crc kubenswrapper[4869]: I1001 16:35:19.624812 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metallb-excludel2\" (UniqueName: \"kubernetes.io/configmap/86377387-9ca6-4577-a44d-125364686f83-metallb-excludel2\") pod \"speaker-6gmkw\" (UID: \"86377387-9ca6-4577-a44d-125364686f83\") " pod="metallb-system/speaker-6gmkw" Oct 01 16:35:19 crc kubenswrapper[4869]: I1001 16:35:19.624836 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/517c59e1-4387-4182-8db2-374a1dd516e6-cert\") pod \"frr-k8s-webhook-server-64bf5d555-w6xl4\" (UID: \"517c59e1-4387-4182-8db2-374a1dd516e6\") " pod="metallb-system/frr-k8s-webhook-server-64bf5d555-w6xl4" Oct 01 16:35:19 crc kubenswrapper[4869]: I1001 16:35:19.624878 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dmtcl\" (UniqueName: \"kubernetes.io/projected/86377387-9ca6-4577-a44d-125364686f83-kube-api-access-dmtcl\") pod \"speaker-6gmkw\" (UID: \"86377387-9ca6-4577-a44d-125364686f83\") " pod="metallb-system/speaker-6gmkw" Oct 01 16:35:19 crc kubenswrapper[4869]: I1001 16:35:19.624907 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-znrrx\" (UniqueName: \"kubernetes.io/projected/e748d49d-c9ae-445f-87d7-311d7ef79b37-kube-api-access-znrrx\") pod \"frr-k8s-8795b\" (UID: \"e748d49d-c9ae-445f-87d7-311d7ef79b37\") " pod="metallb-system/frr-k8s-8795b" Oct 01 16:35:19 crc kubenswrapper[4869]: I1001 16:35:19.624929 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-m5mgs\" (UniqueName: \"kubernetes.io/projected/517c59e1-4387-4182-8db2-374a1dd516e6-kube-api-access-m5mgs\") pod \"frr-k8s-webhook-server-64bf5d555-w6xl4\" (UID: \"517c59e1-4387-4182-8db2-374a1dd516e6\") " pod="metallb-system/frr-k8s-webhook-server-64bf5d555-w6xl4" Oct 01 16:35:19 crc kubenswrapper[4869]: I1001 16:35:19.624993 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/86377387-9ca6-4577-a44d-125364686f83-memberlist\") pod \"speaker-6gmkw\" (UID: \"86377387-9ca6-4577-a44d-125364686f83\") " pod="metallb-system/speaker-6gmkw" Oct 01 16:35:19 crc kubenswrapper[4869]: I1001 16:35:19.625035 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/e748d49d-c9ae-445f-87d7-311d7ef79b37-metrics-certs\") pod \"frr-k8s-8795b\" (UID: \"e748d49d-c9ae-445f-87d7-311d7ef79b37\") " pod="metallb-system/frr-k8s-8795b" Oct 01 16:35:19 crc kubenswrapper[4869]: I1001 16:35:19.625057 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/86377387-9ca6-4577-a44d-125364686f83-metrics-certs\") pod \"speaker-6gmkw\" (UID: \"86377387-9ca6-4577-a44d-125364686f83\") " pod="metallb-system/speaker-6gmkw" Oct 01 16:35:19 crc kubenswrapper[4869]: I1001 16:35:19.625076 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"frr-sockets\" (UniqueName: \"kubernetes.io/empty-dir/e748d49d-c9ae-445f-87d7-311d7ef79b37-frr-sockets\") pod \"frr-k8s-8795b\" (UID: \"e748d49d-c9ae-445f-87d7-311d7ef79b37\") " pod="metallb-system/frr-k8s-8795b" Oct 01 16:35:19 crc kubenswrapper[4869]: I1001 16:35:19.625094 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"frr-conf\" (UniqueName: \"kubernetes.io/empty-dir/e748d49d-c9ae-445f-87d7-311d7ef79b37-frr-conf\") pod \"frr-k8s-8795b\" (UID: \"e748d49d-c9ae-445f-87d7-311d7ef79b37\") " pod="metallb-system/frr-k8s-8795b" Oct 01 16:35:19 crc kubenswrapper[4869]: I1001 16:35:19.625109 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics\" (UniqueName: \"kubernetes.io/empty-dir/e748d49d-c9ae-445f-87d7-311d7ef79b37-metrics\") pod \"frr-k8s-8795b\" (UID: \"e748d49d-c9ae-445f-87d7-311d7ef79b37\") " pod="metallb-system/frr-k8s-8795b" Oct 01 16:35:19 crc kubenswrapper[4869]: I1001 16:35:19.639476 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics\" (UniqueName: \"kubernetes.io/empty-dir/e748d49d-c9ae-445f-87d7-311d7ef79b37-metrics\") pod \"frr-k8s-8795b\" (UID: \"e748d49d-c9ae-445f-87d7-311d7ef79b37\") " pod="metallb-system/frr-k8s-8795b" Oct 01 16:35:19 crc kubenswrapper[4869]: I1001 16:35:19.640486 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"reloader\" (UniqueName: \"kubernetes.io/empty-dir/e748d49d-c9ae-445f-87d7-311d7ef79b37-reloader\") pod \"frr-k8s-8795b\" (UID: \"e748d49d-c9ae-445f-87d7-311d7ef79b37\") " pod="metallb-system/frr-k8s-8795b" Oct 01 16:35:19 crc kubenswrapper[4869]: I1001 16:35:19.640803 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"frr-startup\" (UniqueName: \"kubernetes.io/configmap/e748d49d-c9ae-445f-87d7-311d7ef79b37-frr-startup\") pod \"frr-k8s-8795b\" (UID: \"e748d49d-c9ae-445f-87d7-311d7ef79b37\") " pod="metallb-system/frr-k8s-8795b" Oct 01 16:35:19 crc kubenswrapper[4869]: I1001 16:35:19.655690 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/517c59e1-4387-4182-8db2-374a1dd516e6-cert\") pod \"frr-k8s-webhook-server-64bf5d555-w6xl4\" (UID: \"517c59e1-4387-4182-8db2-374a1dd516e6\") " pod="metallb-system/frr-k8s-webhook-server-64bf5d555-w6xl4" Oct 01 16:35:19 crc kubenswrapper[4869]: I1001 16:35:19.662567 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"frr-conf\" (UniqueName: \"kubernetes.io/empty-dir/e748d49d-c9ae-445f-87d7-311d7ef79b37-frr-conf\") pod \"frr-k8s-8795b\" (UID: \"e748d49d-c9ae-445f-87d7-311d7ef79b37\") " pod="metallb-system/frr-k8s-8795b" Oct 01 16:35:19 crc kubenswrapper[4869]: I1001 16:35:19.666610 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"frr-sockets\" (UniqueName: \"kubernetes.io/empty-dir/e748d49d-c9ae-445f-87d7-311d7ef79b37-frr-sockets\") pod \"frr-k8s-8795b\" (UID: \"e748d49d-c9ae-445f-87d7-311d7ef79b37\") " pod="metallb-system/frr-k8s-8795b" Oct 01 16:35:19 crc kubenswrapper[4869]: I1001 16:35:19.668572 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/controller-68d546b9d8-l2jbk"] Oct 01 16:35:19 crc kubenswrapper[4869]: I1001 16:35:19.671214 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-znrrx\" (UniqueName: \"kubernetes.io/projected/e748d49d-c9ae-445f-87d7-311d7ef79b37-kube-api-access-znrrx\") pod \"frr-k8s-8795b\" (UID: \"e748d49d-c9ae-445f-87d7-311d7ef79b37\") " pod="metallb-system/frr-k8s-8795b" Oct 01 16:35:19 crc kubenswrapper[4869]: I1001 16:35:19.671719 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/e748d49d-c9ae-445f-87d7-311d7ef79b37-metrics-certs\") pod \"frr-k8s-8795b\" (UID: \"e748d49d-c9ae-445f-87d7-311d7ef79b37\") " pod="metallb-system/frr-k8s-8795b" Oct 01 16:35:19 crc kubenswrapper[4869]: I1001 16:35:19.672318 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-m5mgs\" (UniqueName: \"kubernetes.io/projected/517c59e1-4387-4182-8db2-374a1dd516e6-kube-api-access-m5mgs\") pod \"frr-k8s-webhook-server-64bf5d555-w6xl4\" (UID: \"517c59e1-4387-4182-8db2-374a1dd516e6\") " pod="metallb-system/frr-k8s-webhook-server-64bf5d555-w6xl4" Oct 01 16:35:19 crc kubenswrapper[4869]: I1001 16:35:19.677445 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/controller-68d546b9d8-l2jbk"] Oct 01 16:35:19 crc kubenswrapper[4869]: I1001 16:35:19.677546 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/controller-68d546b9d8-l2jbk" Oct 01 16:35:19 crc kubenswrapper[4869]: I1001 16:35:19.724008 4869 generic.go:334] "Generic (PLEG): container finished" podID="38150466-9f4a-4ae1-a5b5-bfca202b829f" containerID="69b77abd8f39d740a581372658860145f35f34a8eacd6dccecd5de6817c73d1b" exitCode=0 Oct 01 16:35:19 crc kubenswrapper[4869]: I1001 16:35:19.724254 4869 generic.go:334] "Generic (PLEG): container finished" podID="38150466-9f4a-4ae1-a5b5-bfca202b829f" containerID="18fca2b1d4ce900b868746591ae6a63b53c11f8873e7f20c13e90a1ba65b66ff" exitCode=0 Oct 01 16:35:19 crc kubenswrapper[4869]: I1001 16:35:19.724349 4869 generic.go:334] "Generic (PLEG): container finished" podID="38150466-9f4a-4ae1-a5b5-bfca202b829f" containerID="d47376f0b0900e8d989fb702b9a922fb8d94fbadd197b208d10756adf7d9f781" exitCode=143 Oct 01 16:35:19 crc kubenswrapper[4869]: I1001 16:35:19.724408 4869 generic.go:334] "Generic (PLEG): container finished" podID="38150466-9f4a-4ae1-a5b5-bfca202b829f" containerID="b4637c37a19c42553fdba44eab27cdf16f2371202c7d1aa5006cea06065a1d37" exitCode=0 Oct 01 16:35:19 crc kubenswrapper[4869]: I1001 16:35:19.724466 4869 generic.go:334] "Generic (PLEG): container finished" podID="38150466-9f4a-4ae1-a5b5-bfca202b829f" containerID="7183574049d68257b43b0d7565c6d99799de35132e86d9a4f83fcd75701bb77c" exitCode=143 Oct 01 16:35:19 crc kubenswrapper[4869]: I1001 16:35:19.728609 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/86377387-9ca6-4577-a44d-125364686f83-memberlist\") pod \"speaker-6gmkw\" (UID: \"86377387-9ca6-4577-a44d-125364686f83\") " pod="metallb-system/speaker-6gmkw" Oct 01 16:35:19 crc kubenswrapper[4869]: I1001 16:35:19.728687 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/86377387-9ca6-4577-a44d-125364686f83-metrics-certs\") pod \"speaker-6gmkw\" (UID: \"86377387-9ca6-4577-a44d-125364686f83\") " pod="metallb-system/speaker-6gmkw" Oct 01 16:35:19 crc kubenswrapper[4869]: I1001 16:35:19.728784 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metallb-excludel2\" (UniqueName: \"kubernetes.io/configmap/86377387-9ca6-4577-a44d-125364686f83-metallb-excludel2\") pod \"speaker-6gmkw\" (UID: \"86377387-9ca6-4577-a44d-125364686f83\") " pod="metallb-system/speaker-6gmkw" Oct 01 16:35:19 crc kubenswrapper[4869]: I1001 16:35:19.728865 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dmtcl\" (UniqueName: \"kubernetes.io/projected/86377387-9ca6-4577-a44d-125364686f83-kube-api-access-dmtcl\") pod \"speaker-6gmkw\" (UID: \"86377387-9ca6-4577-a44d-125364686f83\") " pod="metallb-system/speaker-6gmkw" Oct 01 16:35:19 crc kubenswrapper[4869]: I1001 16:35:19.737425 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metallb-excludel2\" (UniqueName: \"kubernetes.io/configmap/86377387-9ca6-4577-a44d-125364686f83-metallb-excludel2\") pod \"speaker-6gmkw\" (UID: \"86377387-9ca6-4577-a44d-125364686f83\") " pod="metallb-system/speaker-6gmkw" Oct 01 16:35:19 crc kubenswrapper[4869]: I1001 16:35:19.739451 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/86377387-9ca6-4577-a44d-125364686f83-memberlist\") pod \"speaker-6gmkw\" (UID: \"86377387-9ca6-4577-a44d-125364686f83\") " pod="metallb-system/speaker-6gmkw" Oct 01 16:35:19 crc kubenswrapper[4869]: I1001 16:35:19.754306 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/86377387-9ca6-4577-a44d-125364686f83-metrics-certs\") pod \"speaker-6gmkw\" (UID: \"86377387-9ca6-4577-a44d-125364686f83\") " pod="metallb-system/speaker-6gmkw" Oct 01 16:35:19 crc kubenswrapper[4869]: I1001 16:35:19.772491 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/frr-k8s-8795b" Oct 01 16:35:19 crc kubenswrapper[4869]: I1001 16:35:19.782129 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dmtcl\" (UniqueName: \"kubernetes.io/projected/86377387-9ca6-4577-a44d-125364686f83-kube-api-access-dmtcl\") pod \"speaker-6gmkw\" (UID: \"86377387-9ca6-4577-a44d-125364686f83\") " pod="metallb-system/speaker-6gmkw" Oct 01 16:35:19 crc kubenswrapper[4869]: I1001 16:35:19.799351 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/speaker-6gmkw" Oct 01 16:35:19 crc kubenswrapper[4869]: I1001 16:35:19.830609 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-b874t\" (UniqueName: \"kubernetes.io/projected/18d835b6-473f-4ff7-9a54-bd4f280896c2-kube-api-access-b874t\") pod \"controller-68d546b9d8-l2jbk\" (UID: \"18d835b6-473f-4ff7-9a54-bd4f280896c2\") " pod="metallb-system/controller-68d546b9d8-l2jbk" Oct 01 16:35:19 crc kubenswrapper[4869]: I1001 16:35:19.830692 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/18d835b6-473f-4ff7-9a54-bd4f280896c2-cert\") pod \"controller-68d546b9d8-l2jbk\" (UID: \"18d835b6-473f-4ff7-9a54-bd4f280896c2\") " pod="metallb-system/controller-68d546b9d8-l2jbk" Oct 01 16:35:19 crc kubenswrapper[4869]: I1001 16:35:19.830850 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/18d835b6-473f-4ff7-9a54-bd4f280896c2-metrics-certs\") pod \"controller-68d546b9d8-l2jbk\" (UID: \"18d835b6-473f-4ff7-9a54-bd4f280896c2\") " pod="metallb-system/controller-68d546b9d8-l2jbk" Oct 01 16:35:19 crc kubenswrapper[4869]: I1001 16:35:19.859478 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="metallb-system/frr-k8s-vrp9g" Oct 01 16:35:19 crc kubenswrapper[4869]: I1001 16:35:19.932127 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-b874t\" (UniqueName: \"kubernetes.io/projected/18d835b6-473f-4ff7-9a54-bd4f280896c2-kube-api-access-b874t\") pod \"controller-68d546b9d8-l2jbk\" (UID: \"18d835b6-473f-4ff7-9a54-bd4f280896c2\") " pod="metallb-system/controller-68d546b9d8-l2jbk" Oct 01 16:35:19 crc kubenswrapper[4869]: I1001 16:35:19.932586 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/frr-k8s-webhook-server-64bf5d555-w6xl4" Oct 01 16:35:19 crc kubenswrapper[4869]: I1001 16:35:19.932799 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/18d835b6-473f-4ff7-9a54-bd4f280896c2-cert\") pod \"controller-68d546b9d8-l2jbk\" (UID: \"18d835b6-473f-4ff7-9a54-bd4f280896c2\") " pod="metallb-system/controller-68d546b9d8-l2jbk" Oct 01 16:35:19 crc kubenswrapper[4869]: I1001 16:35:19.933123 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/18d835b6-473f-4ff7-9a54-bd4f280896c2-metrics-certs\") pod \"controller-68d546b9d8-l2jbk\" (UID: \"18d835b6-473f-4ff7-9a54-bd4f280896c2\") " pod="metallb-system/controller-68d546b9d8-l2jbk" Oct 01 16:35:19 crc kubenswrapper[4869]: I1001 16:35:19.936523 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/18d835b6-473f-4ff7-9a54-bd4f280896c2-cert\") pod \"controller-68d546b9d8-l2jbk\" (UID: \"18d835b6-473f-4ff7-9a54-bd4f280896c2\") " pod="metallb-system/controller-68d546b9d8-l2jbk" Oct 01 16:35:19 crc kubenswrapper[4869]: I1001 16:35:19.936721 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/18d835b6-473f-4ff7-9a54-bd4f280896c2-metrics-certs\") pod \"controller-68d546b9d8-l2jbk\" (UID: \"18d835b6-473f-4ff7-9a54-bd4f280896c2\") " pod="metallb-system/controller-68d546b9d8-l2jbk" Oct 01 16:35:19 crc kubenswrapper[4869]: E1001 16:35:19.948656 4869 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod74ce9afe_754c_4610_90f1_a9c42b2cd395.slice/crio-conmon-97a0af7433489a0858c912899c72c07ca77fd506d778954fd6dd112e4f076228.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod38150466_9f4a_4ae1_a5b5_bfca202b829f.slice/crio-conmon-18fca2b1d4ce900b868746591ae6a63b53c11f8873e7f20c13e90a1ba65b66ff.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod74ce9afe_754c_4610_90f1_a9c42b2cd395.slice/crio-97a0af7433489a0858c912899c72c07ca77fd506d778954fd6dd112e4f076228.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod38150466_9f4a_4ae1_a5b5_bfca202b829f.slice/crio-7ea00b63fc78e883cd02ad429492496e6b43bbca90d812745be1b98ea1507b04.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod38150466_9f4a_4ae1_a5b5_bfca202b829f.slice/crio-7183574049d68257b43b0d7565c6d99799de35132e86d9a4f83fcd75701bb77c.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod38150466_9f4a_4ae1_a5b5_bfca202b829f.slice/crio-conmon-7ea00b63fc78e883cd02ad429492496e6b43bbca90d812745be1b98ea1507b04.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod38150466_9f4a_4ae1_a5b5_bfca202b829f.slice/crio-conmon-e859d261b906f2e4cb0aa85e666e6facffa874d1c8bb264bcbe50e6895c89dc2.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod74ce9afe_754c_4610_90f1_a9c42b2cd395.slice/crio-conmon-48e854ae42ed5b5b19b9f5df29806b8cbdf94b2e1afe36b217cdc8ffa92eaa7e.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod74ce9afe_754c_4610_90f1_a9c42b2cd395.slice/crio-48e854ae42ed5b5b19b9f5df29806b8cbdf94b2e1afe36b217cdc8ffa92eaa7e.scope\": RecentStats: unable to find data in memory cache]" Oct 01 16:35:19 crc kubenswrapper[4869]: I1001 16:35:19.949977 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-b874t\" (UniqueName: \"kubernetes.io/projected/18d835b6-473f-4ff7-9a54-bd4f280896c2-kube-api-access-b874t\") pod \"controller-68d546b9d8-l2jbk\" (UID: \"18d835b6-473f-4ff7-9a54-bd4f280896c2\") " pod="metallb-system/controller-68d546b9d8-l2jbk" Oct 01 16:35:20 crc kubenswrapper[4869]: I1001 16:35:20.034114 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/38150466-9f4a-4ae1-a5b5-bfca202b829f-metrics-certs\") pod \"38150466-9f4a-4ae1-a5b5-bfca202b829f\" (UID: \"38150466-9f4a-4ae1-a5b5-bfca202b829f\") " Oct 01 16:35:20 crc kubenswrapper[4869]: I1001 16:35:20.034655 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics\" (UniqueName: \"kubernetes.io/empty-dir/38150466-9f4a-4ae1-a5b5-bfca202b829f-metrics\") pod \"38150466-9f4a-4ae1-a5b5-bfca202b829f\" (UID: \"38150466-9f4a-4ae1-a5b5-bfca202b829f\") " Oct 01 16:35:20 crc kubenswrapper[4869]: I1001 16:35:20.034770 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"frr-sockets\" (UniqueName: \"kubernetes.io/empty-dir/38150466-9f4a-4ae1-a5b5-bfca202b829f-frr-sockets\") pod \"38150466-9f4a-4ae1-a5b5-bfca202b829f\" (UID: \"38150466-9f4a-4ae1-a5b5-bfca202b829f\") " Oct 01 16:35:20 crc kubenswrapper[4869]: I1001 16:35:20.035121 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"frr-startup\" (UniqueName: \"kubernetes.io/configmap/38150466-9f4a-4ae1-a5b5-bfca202b829f-frr-startup\") pod \"38150466-9f4a-4ae1-a5b5-bfca202b829f\" (UID: \"38150466-9f4a-4ae1-a5b5-bfca202b829f\") " Oct 01 16:35:20 crc kubenswrapper[4869]: I1001 16:35:20.035219 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6w4n4\" (UniqueName: \"kubernetes.io/projected/38150466-9f4a-4ae1-a5b5-bfca202b829f-kube-api-access-6w4n4\") pod \"38150466-9f4a-4ae1-a5b5-bfca202b829f\" (UID: \"38150466-9f4a-4ae1-a5b5-bfca202b829f\") " Oct 01 16:35:20 crc kubenswrapper[4869]: I1001 16:35:20.035320 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"reloader\" (UniqueName: \"kubernetes.io/empty-dir/38150466-9f4a-4ae1-a5b5-bfca202b829f-reloader\") pod \"38150466-9f4a-4ae1-a5b5-bfca202b829f\" (UID: \"38150466-9f4a-4ae1-a5b5-bfca202b829f\") " Oct 01 16:35:20 crc kubenswrapper[4869]: I1001 16:35:20.035421 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"frr-conf\" (UniqueName: \"kubernetes.io/empty-dir/38150466-9f4a-4ae1-a5b5-bfca202b829f-frr-conf\") pod \"38150466-9f4a-4ae1-a5b5-bfca202b829f\" (UID: \"38150466-9f4a-4ae1-a5b5-bfca202b829f\") " Oct 01 16:35:20 crc kubenswrapper[4869]: I1001 16:35:20.037655 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/38150466-9f4a-4ae1-a5b5-bfca202b829f-frr-startup" (OuterVolumeSpecName: "frr-startup") pod "38150466-9f4a-4ae1-a5b5-bfca202b829f" (UID: "38150466-9f4a-4ae1-a5b5-bfca202b829f"). InnerVolumeSpecName "frr-startup". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 16:35:20 crc kubenswrapper[4869]: I1001 16:35:20.038196 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/38150466-9f4a-4ae1-a5b5-bfca202b829f-frr-sockets" (OuterVolumeSpecName: "frr-sockets") pod "38150466-9f4a-4ae1-a5b5-bfca202b829f" (UID: "38150466-9f4a-4ae1-a5b5-bfca202b829f"). InnerVolumeSpecName "frr-sockets". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 16:35:20 crc kubenswrapper[4869]: I1001 16:35:20.039302 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/38150466-9f4a-4ae1-a5b5-bfca202b829f-metrics" (OuterVolumeSpecName: "metrics") pod "38150466-9f4a-4ae1-a5b5-bfca202b829f" (UID: "38150466-9f4a-4ae1-a5b5-bfca202b829f"). InnerVolumeSpecName "metrics". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 16:35:20 crc kubenswrapper[4869]: I1001 16:35:20.042476 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/38150466-9f4a-4ae1-a5b5-bfca202b829f-kube-api-access-6w4n4" (OuterVolumeSpecName: "kube-api-access-6w4n4") pod "38150466-9f4a-4ae1-a5b5-bfca202b829f" (UID: "38150466-9f4a-4ae1-a5b5-bfca202b829f"). InnerVolumeSpecName "kube-api-access-6w4n4". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 16:35:20 crc kubenswrapper[4869]: I1001 16:35:20.043664 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/38150466-9f4a-4ae1-a5b5-bfca202b829f-metrics-certs" (OuterVolumeSpecName: "metrics-certs") pod "38150466-9f4a-4ae1-a5b5-bfca202b829f" (UID: "38150466-9f4a-4ae1-a5b5-bfca202b829f"). InnerVolumeSpecName "metrics-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 16:35:20 crc kubenswrapper[4869]: I1001 16:35:20.050153 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/38150466-9f4a-4ae1-a5b5-bfca202b829f-reloader" (OuterVolumeSpecName: "reloader") pod "38150466-9f4a-4ae1-a5b5-bfca202b829f" (UID: "38150466-9f4a-4ae1-a5b5-bfca202b829f"). InnerVolumeSpecName "reloader". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 16:35:20 crc kubenswrapper[4869]: I1001 16:35:20.052102 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/38150466-9f4a-4ae1-a5b5-bfca202b829f-frr-conf" (OuterVolumeSpecName: "frr-conf") pod "38150466-9f4a-4ae1-a5b5-bfca202b829f" (UID: "38150466-9f4a-4ae1-a5b5-bfca202b829f"). InnerVolumeSpecName "frr-conf". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 16:35:20 crc kubenswrapper[4869]: I1001 16:35:20.138351 4869 reconciler_common.go:293] "Volume detached for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/38150466-9f4a-4ae1-a5b5-bfca202b829f-metrics-certs\") on node \"crc\" DevicePath \"\"" Oct 01 16:35:20 crc kubenswrapper[4869]: I1001 16:35:20.138787 4869 reconciler_common.go:293] "Volume detached for volume \"metrics\" (UniqueName: \"kubernetes.io/empty-dir/38150466-9f4a-4ae1-a5b5-bfca202b829f-metrics\") on node \"crc\" DevicePath \"\"" Oct 01 16:35:20 crc kubenswrapper[4869]: I1001 16:35:20.138801 4869 reconciler_common.go:293] "Volume detached for volume \"frr-sockets\" (UniqueName: \"kubernetes.io/empty-dir/38150466-9f4a-4ae1-a5b5-bfca202b829f-frr-sockets\") on node \"crc\" DevicePath \"\"" Oct 01 16:35:20 crc kubenswrapper[4869]: I1001 16:35:20.138812 4869 reconciler_common.go:293] "Volume detached for volume \"frr-startup\" (UniqueName: \"kubernetes.io/configmap/38150466-9f4a-4ae1-a5b5-bfca202b829f-frr-startup\") on node \"crc\" DevicePath \"\"" Oct 01 16:35:20 crc kubenswrapper[4869]: I1001 16:35:20.138825 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6w4n4\" (UniqueName: \"kubernetes.io/projected/38150466-9f4a-4ae1-a5b5-bfca202b829f-kube-api-access-6w4n4\") on node \"crc\" DevicePath \"\"" Oct 01 16:35:20 crc kubenswrapper[4869]: I1001 16:35:20.138836 4869 reconciler_common.go:293] "Volume detached for volume \"reloader\" (UniqueName: \"kubernetes.io/empty-dir/38150466-9f4a-4ae1-a5b5-bfca202b829f-reloader\") on node \"crc\" DevicePath \"\"" Oct 01 16:35:20 crc kubenswrapper[4869]: I1001 16:35:20.138848 4869 reconciler_common.go:293] "Volume detached for volume \"frr-conf\" (UniqueName: \"kubernetes.io/empty-dir/38150466-9f4a-4ae1-a5b5-bfca202b829f-frr-conf\") on node \"crc\" DevicePath \"\"" Oct 01 16:35:20 crc kubenswrapper[4869]: I1001 16:35:20.145680 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/controller-68d546b9d8-l2jbk" Oct 01 16:35:20 crc kubenswrapper[4869]: I1001 16:35:20.343871 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="metallb-system/speaker-kntp6" Oct 01 16:35:20 crc kubenswrapper[4869]: I1001 16:35:20.435880 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/frr-k8s-webhook-server-64bf5d555-w6xl4"] Oct 01 16:35:20 crc kubenswrapper[4869]: I1001 16:35:20.447350 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metallb-excludel2\" (UniqueName: \"kubernetes.io/configmap/74ce9afe-754c-4610-90f1-a9c42b2cd395-metallb-excludel2\") pod \"74ce9afe-754c-4610-90f1-a9c42b2cd395\" (UID: \"74ce9afe-754c-4610-90f1-a9c42b2cd395\") " Oct 01 16:35:20 crc kubenswrapper[4869]: I1001 16:35:20.447415 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/74ce9afe-754c-4610-90f1-a9c42b2cd395-metrics-certs\") pod \"74ce9afe-754c-4610-90f1-a9c42b2cd395\" (UID: \"74ce9afe-754c-4610-90f1-a9c42b2cd395\") " Oct 01 16:35:20 crc kubenswrapper[4869]: I1001 16:35:20.447439 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/74ce9afe-754c-4610-90f1-a9c42b2cd395-memberlist\") pod \"74ce9afe-754c-4610-90f1-a9c42b2cd395\" (UID: \"74ce9afe-754c-4610-90f1-a9c42b2cd395\") " Oct 01 16:35:20 crc kubenswrapper[4869]: I1001 16:35:20.447595 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lxmdv\" (UniqueName: \"kubernetes.io/projected/74ce9afe-754c-4610-90f1-a9c42b2cd395-kube-api-access-lxmdv\") pod \"74ce9afe-754c-4610-90f1-a9c42b2cd395\" (UID: \"74ce9afe-754c-4610-90f1-a9c42b2cd395\") " Oct 01 16:35:20 crc kubenswrapper[4869]: I1001 16:35:20.448764 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/74ce9afe-754c-4610-90f1-a9c42b2cd395-metallb-excludel2" (OuterVolumeSpecName: "metallb-excludel2") pod "74ce9afe-754c-4610-90f1-a9c42b2cd395" (UID: "74ce9afe-754c-4610-90f1-a9c42b2cd395"). InnerVolumeSpecName "metallb-excludel2". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 16:35:20 crc kubenswrapper[4869]: I1001 16:35:20.456527 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/74ce9afe-754c-4610-90f1-a9c42b2cd395-kube-api-access-lxmdv" (OuterVolumeSpecName: "kube-api-access-lxmdv") pod "74ce9afe-754c-4610-90f1-a9c42b2cd395" (UID: "74ce9afe-754c-4610-90f1-a9c42b2cd395"). InnerVolumeSpecName "kube-api-access-lxmdv". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 16:35:20 crc kubenswrapper[4869]: I1001 16:35:20.457519 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/74ce9afe-754c-4610-90f1-a9c42b2cd395-memberlist" (OuterVolumeSpecName: "memberlist") pod "74ce9afe-754c-4610-90f1-a9c42b2cd395" (UID: "74ce9afe-754c-4610-90f1-a9c42b2cd395"). InnerVolumeSpecName "memberlist". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 16:35:20 crc kubenswrapper[4869]: I1001 16:35:20.459373 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/74ce9afe-754c-4610-90f1-a9c42b2cd395-metrics-certs" (OuterVolumeSpecName: "metrics-certs") pod "74ce9afe-754c-4610-90f1-a9c42b2cd395" (UID: "74ce9afe-754c-4610-90f1-a9c42b2cd395"). InnerVolumeSpecName "metrics-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 16:35:20 crc kubenswrapper[4869]: I1001 16:35:20.549916 4869 reconciler_common.go:293] "Volume detached for volume \"metallb-excludel2\" (UniqueName: \"kubernetes.io/configmap/74ce9afe-754c-4610-90f1-a9c42b2cd395-metallb-excludel2\") on node \"crc\" DevicePath \"\"" Oct 01 16:35:20 crc kubenswrapper[4869]: I1001 16:35:20.549973 4869 reconciler_common.go:293] "Volume detached for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/74ce9afe-754c-4610-90f1-a9c42b2cd395-metrics-certs\") on node \"crc\" DevicePath \"\"" Oct 01 16:35:20 crc kubenswrapper[4869]: I1001 16:35:20.550003 4869 reconciler_common.go:293] "Volume detached for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/74ce9afe-754c-4610-90f1-a9c42b2cd395-memberlist\") on node \"crc\" DevicePath \"\"" Oct 01 16:35:20 crc kubenswrapper[4869]: I1001 16:35:20.550013 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lxmdv\" (UniqueName: \"kubernetes.io/projected/74ce9afe-754c-4610-90f1-a9c42b2cd395-kube-api-access-lxmdv\") on node \"crc\" DevicePath \"\"" Oct 01 16:35:20 crc kubenswrapper[4869]: I1001 16:35:20.675104 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/controller-68d546b9d8-l2jbk"] Oct 01 16:35:20 crc kubenswrapper[4869]: W1001 16:35:20.678120 4869 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod18d835b6_473f_4ff7_9a54_bd4f280896c2.slice/crio-9329245b00c3174b441d9643409ad29d54ecaaf1d8e6d8f24cd1c44259b830d0 WatchSource:0}: Error finding container 9329245b00c3174b441d9643409ad29d54ecaaf1d8e6d8f24cd1c44259b830d0: Status 404 returned error can't find the container with id 9329245b00c3174b441d9643409ad29d54ecaaf1d8e6d8f24cd1c44259b830d0 Oct 01 16:35:20 crc kubenswrapper[4869]: I1001 16:35:20.742685 4869 generic.go:334] "Generic (PLEG): container finished" podID="74ce9afe-754c-4610-90f1-a9c42b2cd395" containerID="97a0af7433489a0858c912899c72c07ca77fd506d778954fd6dd112e4f076228" exitCode=0 Oct 01 16:35:20 crc kubenswrapper[4869]: I1001 16:35:20.742722 4869 generic.go:334] "Generic (PLEG): container finished" podID="74ce9afe-754c-4610-90f1-a9c42b2cd395" containerID="48e854ae42ed5b5b19b9f5df29806b8cbdf94b2e1afe36b217cdc8ffa92eaa7e" exitCode=0 Oct 01 16:35:20 crc kubenswrapper[4869]: I1001 16:35:20.742771 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="metallb-system/speaker-kntp6" Oct 01 16:35:20 crc kubenswrapper[4869]: I1001 16:35:20.742788 4869 scope.go:117] "RemoveContainer" containerID="97a0af7433489a0858c912899c72c07ca77fd506d778954fd6dd112e4f076228" Oct 01 16:35:20 crc kubenswrapper[4869]: I1001 16:35:20.751636 4869 generic.go:334] "Generic (PLEG): container finished" podID="38150466-9f4a-4ae1-a5b5-bfca202b829f" containerID="7ea00b63fc78e883cd02ad429492496e6b43bbca90d812745be1b98ea1507b04" exitCode=0 Oct 01 16:35:20 crc kubenswrapper[4869]: I1001 16:35:20.751788 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="metallb-system/frr-k8s-vrp9g" Oct 01 16:35:20 crc kubenswrapper[4869]: I1001 16:35:20.756485 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/controller-68d546b9d8-l2jbk" event={"ID":"18d835b6-473f-4ff7-9a54-bd4f280896c2","Type":"ContainerStarted","Data":"9329245b00c3174b441d9643409ad29d54ecaaf1d8e6d8f24cd1c44259b830d0"} Oct 01 16:35:20 crc kubenswrapper[4869]: I1001 16:35:20.761722 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-8795b" event={"ID":"e748d49d-c9ae-445f-87d7-311d7ef79b37","Type":"ContainerStarted","Data":"59fc750faeeb8e0f2e6e086ec7e7d5b9ec8ec9168d38c711590cf7a6b943ec6a"} Oct 01 16:35:20 crc kubenswrapper[4869]: I1001 16:35:20.766299 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/speaker-6gmkw" event={"ID":"86377387-9ca6-4577-a44d-125364686f83","Type":"ContainerStarted","Data":"84cf67be47ec5998c9dc03d5f9bd2f7820c08aa3056589858c46548ab737f3bc"} Oct 01 16:35:20 crc kubenswrapper[4869]: I1001 16:35:20.766341 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/speaker-6gmkw" event={"ID":"86377387-9ca6-4577-a44d-125364686f83","Type":"ContainerStarted","Data":"5f8200717d9e8fd5956849990555aa929f06c53a7a4fe30e2f6e4880afe6581a"} Oct 01 16:35:20 crc kubenswrapper[4869]: I1001 16:35:20.766352 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/speaker-6gmkw" event={"ID":"86377387-9ca6-4577-a44d-125364686f83","Type":"ContainerStarted","Data":"40f18a3388edbd19d453cfcaf468ae8cee35cb133ff7a00653209a6024435939"} Oct 01 16:35:20 crc kubenswrapper[4869]: I1001 16:35:20.766683 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/speaker-6gmkw" Oct 01 16:35:20 crc kubenswrapper[4869]: I1001 16:35:20.774246 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-webhook-server-64bf5d555-w6xl4" event={"ID":"517c59e1-4387-4182-8db2-374a1dd516e6","Type":"ContainerStarted","Data":"7ff14f468ee8faf51b48e81a663b14b17086ac3a011489e5e3408579cd6f8353"} Oct 01 16:35:20 crc kubenswrapper[4869]: I1001 16:35:20.787232 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/speaker-6gmkw" podStartSLOduration=1.7872103 podStartE2EDuration="1.7872103s" podCreationTimestamp="2025-10-01 16:35:19 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-01 16:35:20.787017545 +0000 UTC m=+5429.933860661" watchObservedRunningTime="2025-10-01 16:35:20.7872103 +0000 UTC m=+5429.934053416" Oct 01 16:35:20 crc kubenswrapper[4869]: I1001 16:35:20.795796 4869 scope.go:117] "RemoveContainer" containerID="48e854ae42ed5b5b19b9f5df29806b8cbdf94b2e1afe36b217cdc8ffa92eaa7e" Oct 01 16:35:20 crc kubenswrapper[4869]: I1001 16:35:20.824540 4869 scope.go:117] "RemoveContainer" containerID="97a0af7433489a0858c912899c72c07ca77fd506d778954fd6dd112e4f076228" Oct 01 16:35:20 crc kubenswrapper[4869]: E1001 16:35:20.825163 4869 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"97a0af7433489a0858c912899c72c07ca77fd506d778954fd6dd112e4f076228\": container with ID starting with 97a0af7433489a0858c912899c72c07ca77fd506d778954fd6dd112e4f076228 not found: ID does not exist" containerID="97a0af7433489a0858c912899c72c07ca77fd506d778954fd6dd112e4f076228" Oct 01 16:35:20 crc kubenswrapper[4869]: I1001 16:35:20.825205 4869 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"97a0af7433489a0858c912899c72c07ca77fd506d778954fd6dd112e4f076228"} err="failed to get container status \"97a0af7433489a0858c912899c72c07ca77fd506d778954fd6dd112e4f076228\": rpc error: code = NotFound desc = could not find container \"97a0af7433489a0858c912899c72c07ca77fd506d778954fd6dd112e4f076228\": container with ID starting with 97a0af7433489a0858c912899c72c07ca77fd506d778954fd6dd112e4f076228 not found: ID does not exist" Oct 01 16:35:20 crc kubenswrapper[4869]: I1001 16:35:20.825227 4869 scope.go:117] "RemoveContainer" containerID="48e854ae42ed5b5b19b9f5df29806b8cbdf94b2e1afe36b217cdc8ffa92eaa7e" Oct 01 16:35:20 crc kubenswrapper[4869]: E1001 16:35:20.825625 4869 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"48e854ae42ed5b5b19b9f5df29806b8cbdf94b2e1afe36b217cdc8ffa92eaa7e\": container with ID starting with 48e854ae42ed5b5b19b9f5df29806b8cbdf94b2e1afe36b217cdc8ffa92eaa7e not found: ID does not exist" containerID="48e854ae42ed5b5b19b9f5df29806b8cbdf94b2e1afe36b217cdc8ffa92eaa7e" Oct 01 16:35:20 crc kubenswrapper[4869]: I1001 16:35:20.825673 4869 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"48e854ae42ed5b5b19b9f5df29806b8cbdf94b2e1afe36b217cdc8ffa92eaa7e"} err="failed to get container status \"48e854ae42ed5b5b19b9f5df29806b8cbdf94b2e1afe36b217cdc8ffa92eaa7e\": rpc error: code = NotFound desc = could not find container \"48e854ae42ed5b5b19b9f5df29806b8cbdf94b2e1afe36b217cdc8ffa92eaa7e\": container with ID starting with 48e854ae42ed5b5b19b9f5df29806b8cbdf94b2e1afe36b217cdc8ffa92eaa7e not found: ID does not exist" Oct 01 16:35:20 crc kubenswrapper[4869]: I1001 16:35:20.825705 4869 scope.go:117] "RemoveContainer" containerID="97a0af7433489a0858c912899c72c07ca77fd506d778954fd6dd112e4f076228" Oct 01 16:35:20 crc kubenswrapper[4869]: I1001 16:35:20.826187 4869 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"97a0af7433489a0858c912899c72c07ca77fd506d778954fd6dd112e4f076228"} err="failed to get container status \"97a0af7433489a0858c912899c72c07ca77fd506d778954fd6dd112e4f076228\": rpc error: code = NotFound desc = could not find container \"97a0af7433489a0858c912899c72c07ca77fd506d778954fd6dd112e4f076228\": container with ID starting with 97a0af7433489a0858c912899c72c07ca77fd506d778954fd6dd112e4f076228 not found: ID does not exist" Oct 01 16:35:20 crc kubenswrapper[4869]: I1001 16:35:20.826218 4869 scope.go:117] "RemoveContainer" containerID="48e854ae42ed5b5b19b9f5df29806b8cbdf94b2e1afe36b217cdc8ffa92eaa7e" Oct 01 16:35:20 crc kubenswrapper[4869]: I1001 16:35:20.826522 4869 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"48e854ae42ed5b5b19b9f5df29806b8cbdf94b2e1afe36b217cdc8ffa92eaa7e"} err="failed to get container status \"48e854ae42ed5b5b19b9f5df29806b8cbdf94b2e1afe36b217cdc8ffa92eaa7e\": rpc error: code = NotFound desc = could not find container \"48e854ae42ed5b5b19b9f5df29806b8cbdf94b2e1afe36b217cdc8ffa92eaa7e\": container with ID starting with 48e854ae42ed5b5b19b9f5df29806b8cbdf94b2e1afe36b217cdc8ffa92eaa7e not found: ID does not exist" Oct 01 16:35:20 crc kubenswrapper[4869]: I1001 16:35:20.826543 4869 scope.go:117] "RemoveContainer" containerID="69b77abd8f39d740a581372658860145f35f34a8eacd6dccecd5de6817c73d1b" Oct 01 16:35:20 crc kubenswrapper[4869]: I1001 16:35:20.850435 4869 scope.go:117] "RemoveContainer" containerID="18fca2b1d4ce900b868746591ae6a63b53c11f8873e7f20c13e90a1ba65b66ff" Oct 01 16:35:20 crc kubenswrapper[4869]: I1001 16:35:20.875822 4869 scope.go:117] "RemoveContainer" containerID="d47376f0b0900e8d989fb702b9a922fb8d94fbadd197b208d10756adf7d9f781" Oct 01 16:35:20 crc kubenswrapper[4869]: I1001 16:35:20.898918 4869 scope.go:117] "RemoveContainer" containerID="b4637c37a19c42553fdba44eab27cdf16f2371202c7d1aa5006cea06065a1d37" Oct 01 16:35:20 crc kubenswrapper[4869]: I1001 16:35:20.930311 4869 scope.go:117] "RemoveContainer" containerID="7183574049d68257b43b0d7565c6d99799de35132e86d9a4f83fcd75701bb77c" Oct 01 16:35:20 crc kubenswrapper[4869]: I1001 16:35:20.968904 4869 scope.go:117] "RemoveContainer" containerID="7ea00b63fc78e883cd02ad429492496e6b43bbca90d812745be1b98ea1507b04" Oct 01 16:35:21 crc kubenswrapper[4869]: I1001 16:35:21.004996 4869 scope.go:117] "RemoveContainer" containerID="0dd43c15e6404f8ea5d0b070b46184b12204e5be20f1f08405f36af12286da07" Oct 01 16:35:21 crc kubenswrapper[4869]: I1001 16:35:21.036646 4869 scope.go:117] "RemoveContainer" containerID="61737b4bf50fd855eb59de0e5a68e27ef4c598b4ddbe64764455fb568926bc52" Oct 01 16:35:21 crc kubenswrapper[4869]: I1001 16:35:21.089566 4869 scope.go:117] "RemoveContainer" containerID="2c66249e632201212d06fd305de1c369bfe7a8eb299a38579b01785ea1fb6157" Oct 01 16:35:21 crc kubenswrapper[4869]: I1001 16:35:21.117993 4869 scope.go:117] "RemoveContainer" containerID="69b77abd8f39d740a581372658860145f35f34a8eacd6dccecd5de6817c73d1b" Oct 01 16:35:21 crc kubenswrapper[4869]: E1001 16:35:21.118911 4869 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"69b77abd8f39d740a581372658860145f35f34a8eacd6dccecd5de6817c73d1b\": container with ID starting with 69b77abd8f39d740a581372658860145f35f34a8eacd6dccecd5de6817c73d1b not found: ID does not exist" containerID="69b77abd8f39d740a581372658860145f35f34a8eacd6dccecd5de6817c73d1b" Oct 01 16:35:21 crc kubenswrapper[4869]: I1001 16:35:21.118967 4869 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"69b77abd8f39d740a581372658860145f35f34a8eacd6dccecd5de6817c73d1b"} err="failed to get container status \"69b77abd8f39d740a581372658860145f35f34a8eacd6dccecd5de6817c73d1b\": rpc error: code = NotFound desc = could not find container \"69b77abd8f39d740a581372658860145f35f34a8eacd6dccecd5de6817c73d1b\": container with ID starting with 69b77abd8f39d740a581372658860145f35f34a8eacd6dccecd5de6817c73d1b not found: ID does not exist" Oct 01 16:35:21 crc kubenswrapper[4869]: I1001 16:35:21.119007 4869 scope.go:117] "RemoveContainer" containerID="18fca2b1d4ce900b868746591ae6a63b53c11f8873e7f20c13e90a1ba65b66ff" Oct 01 16:35:21 crc kubenswrapper[4869]: E1001 16:35:21.119536 4869 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"18fca2b1d4ce900b868746591ae6a63b53c11f8873e7f20c13e90a1ba65b66ff\": container with ID starting with 18fca2b1d4ce900b868746591ae6a63b53c11f8873e7f20c13e90a1ba65b66ff not found: ID does not exist" containerID="18fca2b1d4ce900b868746591ae6a63b53c11f8873e7f20c13e90a1ba65b66ff" Oct 01 16:35:21 crc kubenswrapper[4869]: I1001 16:35:21.119572 4869 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"18fca2b1d4ce900b868746591ae6a63b53c11f8873e7f20c13e90a1ba65b66ff"} err="failed to get container status \"18fca2b1d4ce900b868746591ae6a63b53c11f8873e7f20c13e90a1ba65b66ff\": rpc error: code = NotFound desc = could not find container \"18fca2b1d4ce900b868746591ae6a63b53c11f8873e7f20c13e90a1ba65b66ff\": container with ID starting with 18fca2b1d4ce900b868746591ae6a63b53c11f8873e7f20c13e90a1ba65b66ff not found: ID does not exist" Oct 01 16:35:21 crc kubenswrapper[4869]: I1001 16:35:21.119600 4869 scope.go:117] "RemoveContainer" containerID="d47376f0b0900e8d989fb702b9a922fb8d94fbadd197b208d10756adf7d9f781" Oct 01 16:35:21 crc kubenswrapper[4869]: E1001 16:35:21.120012 4869 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d47376f0b0900e8d989fb702b9a922fb8d94fbadd197b208d10756adf7d9f781\": container with ID starting with d47376f0b0900e8d989fb702b9a922fb8d94fbadd197b208d10756adf7d9f781 not found: ID does not exist" containerID="d47376f0b0900e8d989fb702b9a922fb8d94fbadd197b208d10756adf7d9f781" Oct 01 16:35:21 crc kubenswrapper[4869]: I1001 16:35:21.120044 4869 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d47376f0b0900e8d989fb702b9a922fb8d94fbadd197b208d10756adf7d9f781"} err="failed to get container status \"d47376f0b0900e8d989fb702b9a922fb8d94fbadd197b208d10756adf7d9f781\": rpc error: code = NotFound desc = could not find container \"d47376f0b0900e8d989fb702b9a922fb8d94fbadd197b208d10756adf7d9f781\": container with ID starting with d47376f0b0900e8d989fb702b9a922fb8d94fbadd197b208d10756adf7d9f781 not found: ID does not exist" Oct 01 16:35:21 crc kubenswrapper[4869]: I1001 16:35:21.120089 4869 scope.go:117] "RemoveContainer" containerID="b4637c37a19c42553fdba44eab27cdf16f2371202c7d1aa5006cea06065a1d37" Oct 01 16:35:21 crc kubenswrapper[4869]: E1001 16:35:21.120546 4869 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b4637c37a19c42553fdba44eab27cdf16f2371202c7d1aa5006cea06065a1d37\": container with ID starting with b4637c37a19c42553fdba44eab27cdf16f2371202c7d1aa5006cea06065a1d37 not found: ID does not exist" containerID="b4637c37a19c42553fdba44eab27cdf16f2371202c7d1aa5006cea06065a1d37" Oct 01 16:35:21 crc kubenswrapper[4869]: I1001 16:35:21.120581 4869 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b4637c37a19c42553fdba44eab27cdf16f2371202c7d1aa5006cea06065a1d37"} err="failed to get container status \"b4637c37a19c42553fdba44eab27cdf16f2371202c7d1aa5006cea06065a1d37\": rpc error: code = NotFound desc = could not find container \"b4637c37a19c42553fdba44eab27cdf16f2371202c7d1aa5006cea06065a1d37\": container with ID starting with b4637c37a19c42553fdba44eab27cdf16f2371202c7d1aa5006cea06065a1d37 not found: ID does not exist" Oct 01 16:35:21 crc kubenswrapper[4869]: I1001 16:35:21.120604 4869 scope.go:117] "RemoveContainer" containerID="7183574049d68257b43b0d7565c6d99799de35132e86d9a4f83fcd75701bb77c" Oct 01 16:35:21 crc kubenswrapper[4869]: E1001 16:35:21.120891 4869 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7183574049d68257b43b0d7565c6d99799de35132e86d9a4f83fcd75701bb77c\": container with ID starting with 7183574049d68257b43b0d7565c6d99799de35132e86d9a4f83fcd75701bb77c not found: ID does not exist" containerID="7183574049d68257b43b0d7565c6d99799de35132e86d9a4f83fcd75701bb77c" Oct 01 16:35:21 crc kubenswrapper[4869]: I1001 16:35:21.120951 4869 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7183574049d68257b43b0d7565c6d99799de35132e86d9a4f83fcd75701bb77c"} err="failed to get container status \"7183574049d68257b43b0d7565c6d99799de35132e86d9a4f83fcd75701bb77c\": rpc error: code = NotFound desc = could not find container \"7183574049d68257b43b0d7565c6d99799de35132e86d9a4f83fcd75701bb77c\": container with ID starting with 7183574049d68257b43b0d7565c6d99799de35132e86d9a4f83fcd75701bb77c not found: ID does not exist" Oct 01 16:35:21 crc kubenswrapper[4869]: I1001 16:35:21.120975 4869 scope.go:117] "RemoveContainer" containerID="7ea00b63fc78e883cd02ad429492496e6b43bbca90d812745be1b98ea1507b04" Oct 01 16:35:21 crc kubenswrapper[4869]: E1001 16:35:21.121400 4869 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7ea00b63fc78e883cd02ad429492496e6b43bbca90d812745be1b98ea1507b04\": container with ID starting with 7ea00b63fc78e883cd02ad429492496e6b43bbca90d812745be1b98ea1507b04 not found: ID does not exist" containerID="7ea00b63fc78e883cd02ad429492496e6b43bbca90d812745be1b98ea1507b04" Oct 01 16:35:21 crc kubenswrapper[4869]: I1001 16:35:21.121429 4869 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7ea00b63fc78e883cd02ad429492496e6b43bbca90d812745be1b98ea1507b04"} err="failed to get container status \"7ea00b63fc78e883cd02ad429492496e6b43bbca90d812745be1b98ea1507b04\": rpc error: code = NotFound desc = could not find container \"7ea00b63fc78e883cd02ad429492496e6b43bbca90d812745be1b98ea1507b04\": container with ID starting with 7ea00b63fc78e883cd02ad429492496e6b43bbca90d812745be1b98ea1507b04 not found: ID does not exist" Oct 01 16:35:21 crc kubenswrapper[4869]: I1001 16:35:21.121447 4869 scope.go:117] "RemoveContainer" containerID="0dd43c15e6404f8ea5d0b070b46184b12204e5be20f1f08405f36af12286da07" Oct 01 16:35:21 crc kubenswrapper[4869]: E1001 16:35:21.121691 4869 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0dd43c15e6404f8ea5d0b070b46184b12204e5be20f1f08405f36af12286da07\": container with ID starting with 0dd43c15e6404f8ea5d0b070b46184b12204e5be20f1f08405f36af12286da07 not found: ID does not exist" containerID="0dd43c15e6404f8ea5d0b070b46184b12204e5be20f1f08405f36af12286da07" Oct 01 16:35:21 crc kubenswrapper[4869]: I1001 16:35:21.121724 4869 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0dd43c15e6404f8ea5d0b070b46184b12204e5be20f1f08405f36af12286da07"} err="failed to get container status \"0dd43c15e6404f8ea5d0b070b46184b12204e5be20f1f08405f36af12286da07\": rpc error: code = NotFound desc = could not find container \"0dd43c15e6404f8ea5d0b070b46184b12204e5be20f1f08405f36af12286da07\": container with ID starting with 0dd43c15e6404f8ea5d0b070b46184b12204e5be20f1f08405f36af12286da07 not found: ID does not exist" Oct 01 16:35:21 crc kubenswrapper[4869]: I1001 16:35:21.121746 4869 scope.go:117] "RemoveContainer" containerID="61737b4bf50fd855eb59de0e5a68e27ef4c598b4ddbe64764455fb568926bc52" Oct 01 16:35:21 crc kubenswrapper[4869]: E1001 16:35:21.121999 4869 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"61737b4bf50fd855eb59de0e5a68e27ef4c598b4ddbe64764455fb568926bc52\": container with ID starting with 61737b4bf50fd855eb59de0e5a68e27ef4c598b4ddbe64764455fb568926bc52 not found: ID does not exist" containerID="61737b4bf50fd855eb59de0e5a68e27ef4c598b4ddbe64764455fb568926bc52" Oct 01 16:35:21 crc kubenswrapper[4869]: I1001 16:35:21.122027 4869 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"61737b4bf50fd855eb59de0e5a68e27ef4c598b4ddbe64764455fb568926bc52"} err="failed to get container status \"61737b4bf50fd855eb59de0e5a68e27ef4c598b4ddbe64764455fb568926bc52\": rpc error: code = NotFound desc = could not find container \"61737b4bf50fd855eb59de0e5a68e27ef4c598b4ddbe64764455fb568926bc52\": container with ID starting with 61737b4bf50fd855eb59de0e5a68e27ef4c598b4ddbe64764455fb568926bc52 not found: ID does not exist" Oct 01 16:35:21 crc kubenswrapper[4869]: I1001 16:35:21.122046 4869 scope.go:117] "RemoveContainer" containerID="2c66249e632201212d06fd305de1c369bfe7a8eb299a38579b01785ea1fb6157" Oct 01 16:35:21 crc kubenswrapper[4869]: E1001 16:35:21.122291 4869 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2c66249e632201212d06fd305de1c369bfe7a8eb299a38579b01785ea1fb6157\": container with ID starting with 2c66249e632201212d06fd305de1c369bfe7a8eb299a38579b01785ea1fb6157 not found: ID does not exist" containerID="2c66249e632201212d06fd305de1c369bfe7a8eb299a38579b01785ea1fb6157" Oct 01 16:35:21 crc kubenswrapper[4869]: I1001 16:35:21.122320 4869 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2c66249e632201212d06fd305de1c369bfe7a8eb299a38579b01785ea1fb6157"} err="failed to get container status \"2c66249e632201212d06fd305de1c369bfe7a8eb299a38579b01785ea1fb6157\": rpc error: code = NotFound desc = could not find container \"2c66249e632201212d06fd305de1c369bfe7a8eb299a38579b01785ea1fb6157\": container with ID starting with 2c66249e632201212d06fd305de1c369bfe7a8eb299a38579b01785ea1fb6157 not found: ID does not exist" Oct 01 16:35:21 crc kubenswrapper[4869]: I1001 16:35:21.593321 4869 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="38150466-9f4a-4ae1-a5b5-bfca202b829f" path="/var/lib/kubelet/pods/38150466-9f4a-4ae1-a5b5-bfca202b829f/volumes" Oct 01 16:35:21 crc kubenswrapper[4869]: I1001 16:35:21.597343 4869 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="74ce9afe-754c-4610-90f1-a9c42b2cd395" path="/var/lib/kubelet/pods/74ce9afe-754c-4610-90f1-a9c42b2cd395/volumes" Oct 01 16:35:21 crc kubenswrapper[4869]: I1001 16:35:21.788042 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/controller-68d546b9d8-l2jbk" event={"ID":"18d835b6-473f-4ff7-9a54-bd4f280896c2","Type":"ContainerStarted","Data":"71184a2c4a763400ba6955dd923296a907da31230eb1c065b275063edba4ab24"} Oct 01 16:35:21 crc kubenswrapper[4869]: I1001 16:35:21.788523 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/controller-68d546b9d8-l2jbk" Oct 01 16:35:21 crc kubenswrapper[4869]: I1001 16:35:21.788639 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/controller-68d546b9d8-l2jbk" event={"ID":"18d835b6-473f-4ff7-9a54-bd4f280896c2","Type":"ContainerStarted","Data":"907f8a9b7301b5f62b6427d6d19f4059e3a6e146b87dbb7ea4e846d8a8a5910c"} Oct 01 16:35:21 crc kubenswrapper[4869]: I1001 16:35:21.819624 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/controller-68d546b9d8-l2jbk" podStartSLOduration=2.819600609 podStartE2EDuration="2.819600609s" podCreationTimestamp="2025-10-01 16:35:19 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-01 16:35:21.802223749 +0000 UTC m=+5430.949066905" watchObservedRunningTime="2025-10-01 16:35:21.819600609 +0000 UTC m=+5430.966443735" Oct 01 16:35:27 crc kubenswrapper[4869]: I1001 16:35:27.860507 4869 generic.go:334] "Generic (PLEG): container finished" podID="e748d49d-c9ae-445f-87d7-311d7ef79b37" containerID="257f0793f9d0c63f35197682fe41bba50d94452ac6b103d9ed952b004f9a93cc" exitCode=0 Oct 01 16:35:27 crc kubenswrapper[4869]: I1001 16:35:27.860592 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-8795b" event={"ID":"e748d49d-c9ae-445f-87d7-311d7ef79b37","Type":"ContainerDied","Data":"257f0793f9d0c63f35197682fe41bba50d94452ac6b103d9ed952b004f9a93cc"} Oct 01 16:35:27 crc kubenswrapper[4869]: I1001 16:35:27.864811 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-webhook-server-64bf5d555-w6xl4" event={"ID":"517c59e1-4387-4182-8db2-374a1dd516e6","Type":"ContainerStarted","Data":"013f5ef0b06de4dca69a745d6bacdd621811c7a900513800f7abbc9704d8d82f"} Oct 01 16:35:27 crc kubenswrapper[4869]: I1001 16:35:27.864920 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/frr-k8s-webhook-server-64bf5d555-w6xl4" Oct 01 16:35:27 crc kubenswrapper[4869]: I1001 16:35:27.902934 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/frr-k8s-webhook-server-64bf5d555-w6xl4" podStartSLOduration=1.726332679 podStartE2EDuration="8.902912915s" podCreationTimestamp="2025-10-01 16:35:19 +0000 UTC" firstStartedPulling="2025-10-01 16:35:20.453320686 +0000 UTC m=+5429.600163802" lastFinishedPulling="2025-10-01 16:35:27.629900922 +0000 UTC m=+5436.776744038" observedRunningTime="2025-10-01 16:35:27.902208447 +0000 UTC m=+5437.049051573" watchObservedRunningTime="2025-10-01 16:35:27.902912915 +0000 UTC m=+5437.049756041" Oct 01 16:35:28 crc kubenswrapper[4869]: I1001 16:35:28.875988 4869 generic.go:334] "Generic (PLEG): container finished" podID="e748d49d-c9ae-445f-87d7-311d7ef79b37" containerID="749ed8d7ac1aba0beeebc9ede11b65bf9c5d03b61ff8392d3848c6f2cf4baab5" exitCode=0 Oct 01 16:35:28 crc kubenswrapper[4869]: I1001 16:35:28.876094 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-8795b" event={"ID":"e748d49d-c9ae-445f-87d7-311d7ef79b37","Type":"ContainerDied","Data":"749ed8d7ac1aba0beeebc9ede11b65bf9c5d03b61ff8392d3848c6f2cf4baab5"} Oct 01 16:35:29 crc kubenswrapper[4869]: I1001 16:35:29.898510 4869 generic.go:334] "Generic (PLEG): container finished" podID="e748d49d-c9ae-445f-87d7-311d7ef79b37" containerID="95a5fdc2fed04d80659514d8e24b3450170c51162ee71107660bdc2b4c8f5949" exitCode=0 Oct 01 16:35:29 crc kubenswrapper[4869]: I1001 16:35:29.898616 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-8795b" event={"ID":"e748d49d-c9ae-445f-87d7-311d7ef79b37","Type":"ContainerDied","Data":"95a5fdc2fed04d80659514d8e24b3450170c51162ee71107660bdc2b4c8f5949"} Oct 01 16:35:30 crc kubenswrapper[4869]: I1001 16:35:30.155310 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/controller-68d546b9d8-l2jbk" Oct 01 16:35:30 crc kubenswrapper[4869]: I1001 16:35:30.235336 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["metallb-system/controller-5d688f5ffc-jtdv4"] Oct 01 16:35:30 crc kubenswrapper[4869]: I1001 16:35:30.235641 4869 kuberuntime_container.go:808] "Killing container with a grace period" pod="metallb-system/controller-5d688f5ffc-jtdv4" podUID="b15f2099-55a9-4e22-a7da-a1b91fcd63db" containerName="controller" containerID="cri-o://f0e28e051f6a02338e2ccd35331422158adabea194661b5e4e3040620360e253" gracePeriod=2 Oct 01 16:35:30 crc kubenswrapper[4869]: I1001 16:35:30.236138 4869 kuberuntime_container.go:808] "Killing container with a grace period" pod="metallb-system/controller-5d688f5ffc-jtdv4" podUID="b15f2099-55a9-4e22-a7da-a1b91fcd63db" containerName="kube-rbac-proxy" containerID="cri-o://916d4e78082a466f9783987d107bb8d9a642ba201d705b86f0fa616f3fc9d79b" gracePeriod=2 Oct 01 16:35:30 crc kubenswrapper[4869]: I1001 16:35:30.245007 4869 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["metallb-system/controller-5d688f5ffc-jtdv4"] Oct 01 16:35:30 crc kubenswrapper[4869]: I1001 16:35:30.476915 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/metallb-operator-webhook-server-869db94fcd-5ccjx" Oct 01 16:35:30 crc kubenswrapper[4869]: I1001 16:35:30.557422 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["metallb-system/metallb-operator-webhook-server-d479f666c-w8xkz"] Oct 01 16:35:30 crc kubenswrapper[4869]: I1001 16:35:30.557681 4869 kuberuntime_container.go:808] "Killing container with a grace period" pod="metallb-system/metallb-operator-webhook-server-d479f666c-w8xkz" podUID="e66dca24-690f-49b3-96a4-f4376279f654" containerName="webhook-server" containerID="cri-o://039b232b84bada219fa975e014c7548941f0a6943821cff60b9b0b903263e17c" gracePeriod=2 Oct 01 16:35:30 crc kubenswrapper[4869]: I1001 16:35:30.592309 4869 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["metallb-system/metallb-operator-webhook-server-d479f666c-w8xkz"] Oct 01 16:35:30 crc kubenswrapper[4869]: I1001 16:35:30.919579 4869 generic.go:334] "Generic (PLEG): container finished" podID="b15f2099-55a9-4e22-a7da-a1b91fcd63db" containerID="916d4e78082a466f9783987d107bb8d9a642ba201d705b86f0fa616f3fc9d79b" exitCode=0 Oct 01 16:35:30 crc kubenswrapper[4869]: I1001 16:35:30.919806 4869 generic.go:334] "Generic (PLEG): container finished" podID="b15f2099-55a9-4e22-a7da-a1b91fcd63db" containerID="f0e28e051f6a02338e2ccd35331422158adabea194661b5e4e3040620360e253" exitCode=0 Oct 01 16:35:30 crc kubenswrapper[4869]: I1001 16:35:30.922058 4869 generic.go:334] "Generic (PLEG): container finished" podID="e66dca24-690f-49b3-96a4-f4376279f654" containerID="039b232b84bada219fa975e014c7548941f0a6943821cff60b9b0b903263e17c" exitCode=0 Oct 01 16:35:30 crc kubenswrapper[4869]: I1001 16:35:30.940618 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-8795b" event={"ID":"e748d49d-c9ae-445f-87d7-311d7ef79b37","Type":"ContainerStarted","Data":"8508db12ceda4b0212c20f0ca2df5d32797a89654799d070fbbe50a1e8733338"} Oct 01 16:35:30 crc kubenswrapper[4869]: I1001 16:35:30.940648 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-8795b" event={"ID":"e748d49d-c9ae-445f-87d7-311d7ef79b37","Type":"ContainerStarted","Data":"b9c16ce7fe229cfcc5ee4e77bf2fea7bbf1bde52a25130f1c38bd25815a825e9"} Oct 01 16:35:30 crc kubenswrapper[4869]: I1001 16:35:30.940658 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-8795b" event={"ID":"e748d49d-c9ae-445f-87d7-311d7ef79b37","Type":"ContainerStarted","Data":"993774daaac99d42de64de8e2943f6cbbb57de17710c4215b813b41a6f562f7e"} Oct 01 16:35:30 crc kubenswrapper[4869]: I1001 16:35:30.940667 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-8795b" event={"ID":"e748d49d-c9ae-445f-87d7-311d7ef79b37","Type":"ContainerStarted","Data":"c93023746a75a4f9d26af627010c8cfbfcc9839c6a854123b2f5a31419c6f083"} Oct 01 16:35:30 crc kubenswrapper[4869]: I1001 16:35:30.947162 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="metallb-system/controller-5d688f5ffc-jtdv4" Oct 01 16:35:31 crc kubenswrapper[4869]: I1001 16:35:31.002836 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/b15f2099-55a9-4e22-a7da-a1b91fcd63db-cert\") pod \"b15f2099-55a9-4e22-a7da-a1b91fcd63db\" (UID: \"b15f2099-55a9-4e22-a7da-a1b91fcd63db\") " Oct 01 16:35:31 crc kubenswrapper[4869]: I1001 16:35:31.003241 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-m2t6d\" (UniqueName: \"kubernetes.io/projected/b15f2099-55a9-4e22-a7da-a1b91fcd63db-kube-api-access-m2t6d\") pod \"b15f2099-55a9-4e22-a7da-a1b91fcd63db\" (UID: \"b15f2099-55a9-4e22-a7da-a1b91fcd63db\") " Oct 01 16:35:31 crc kubenswrapper[4869]: I1001 16:35:31.003466 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/b15f2099-55a9-4e22-a7da-a1b91fcd63db-metrics-certs\") pod \"b15f2099-55a9-4e22-a7da-a1b91fcd63db\" (UID: \"b15f2099-55a9-4e22-a7da-a1b91fcd63db\") " Oct 01 16:35:31 crc kubenswrapper[4869]: I1001 16:35:31.011665 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b15f2099-55a9-4e22-a7da-a1b91fcd63db-cert" (OuterVolumeSpecName: "cert") pod "b15f2099-55a9-4e22-a7da-a1b91fcd63db" (UID: "b15f2099-55a9-4e22-a7da-a1b91fcd63db"). InnerVolumeSpecName "cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 16:35:31 crc kubenswrapper[4869]: I1001 16:35:31.013932 4869 reconciler_common.go:293] "Volume detached for volume \"cert\" (UniqueName: \"kubernetes.io/secret/b15f2099-55a9-4e22-a7da-a1b91fcd63db-cert\") on node \"crc\" DevicePath \"\"" Oct 01 16:35:31 crc kubenswrapper[4869]: I1001 16:35:31.015467 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b15f2099-55a9-4e22-a7da-a1b91fcd63db-kube-api-access-m2t6d" (OuterVolumeSpecName: "kube-api-access-m2t6d") pod "b15f2099-55a9-4e22-a7da-a1b91fcd63db" (UID: "b15f2099-55a9-4e22-a7da-a1b91fcd63db"). InnerVolumeSpecName "kube-api-access-m2t6d". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 16:35:31 crc kubenswrapper[4869]: I1001 16:35:31.021229 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b15f2099-55a9-4e22-a7da-a1b91fcd63db-metrics-certs" (OuterVolumeSpecName: "metrics-certs") pod "b15f2099-55a9-4e22-a7da-a1b91fcd63db" (UID: "b15f2099-55a9-4e22-a7da-a1b91fcd63db"). InnerVolumeSpecName "metrics-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 16:35:31 crc kubenswrapper[4869]: I1001 16:35:31.116739 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-m2t6d\" (UniqueName: \"kubernetes.io/projected/b15f2099-55a9-4e22-a7da-a1b91fcd63db-kube-api-access-m2t6d\") on node \"crc\" DevicePath \"\"" Oct 01 16:35:31 crc kubenswrapper[4869]: I1001 16:35:31.116766 4869 reconciler_common.go:293] "Volume detached for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/b15f2099-55a9-4e22-a7da-a1b91fcd63db-metrics-certs\") on node \"crc\" DevicePath \"\"" Oct 01 16:35:31 crc kubenswrapper[4869]: I1001 16:35:31.227638 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="metallb-system/metallb-operator-webhook-server-d479f666c-w8xkz" Oct 01 16:35:31 crc kubenswrapper[4869]: I1001 16:35:31.319675 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/e66dca24-690f-49b3-96a4-f4376279f654-apiservice-cert\") pod \"e66dca24-690f-49b3-96a4-f4376279f654\" (UID: \"e66dca24-690f-49b3-96a4-f4376279f654\") " Oct 01 16:35:31 crc kubenswrapper[4869]: I1001 16:35:31.319756 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/e66dca24-690f-49b3-96a4-f4376279f654-webhook-cert\") pod \"e66dca24-690f-49b3-96a4-f4376279f654\" (UID: \"e66dca24-690f-49b3-96a4-f4376279f654\") " Oct 01 16:35:31 crc kubenswrapper[4869]: I1001 16:35:31.319879 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qqmc9\" (UniqueName: \"kubernetes.io/projected/e66dca24-690f-49b3-96a4-f4376279f654-kube-api-access-qqmc9\") pod \"e66dca24-690f-49b3-96a4-f4376279f654\" (UID: \"e66dca24-690f-49b3-96a4-f4376279f654\") " Oct 01 16:35:31 crc kubenswrapper[4869]: I1001 16:35:31.324562 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e66dca24-690f-49b3-96a4-f4376279f654-kube-api-access-qqmc9" (OuterVolumeSpecName: "kube-api-access-qqmc9") pod "e66dca24-690f-49b3-96a4-f4376279f654" (UID: "e66dca24-690f-49b3-96a4-f4376279f654"). InnerVolumeSpecName "kube-api-access-qqmc9". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 16:35:31 crc kubenswrapper[4869]: I1001 16:35:31.326813 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e66dca24-690f-49b3-96a4-f4376279f654-apiservice-cert" (OuterVolumeSpecName: "apiservice-cert") pod "e66dca24-690f-49b3-96a4-f4376279f654" (UID: "e66dca24-690f-49b3-96a4-f4376279f654"). InnerVolumeSpecName "apiservice-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 16:35:31 crc kubenswrapper[4869]: I1001 16:35:31.327177 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e66dca24-690f-49b3-96a4-f4376279f654-webhook-cert" (OuterVolumeSpecName: "webhook-cert") pod "e66dca24-690f-49b3-96a4-f4376279f654" (UID: "e66dca24-690f-49b3-96a4-f4376279f654"). InnerVolumeSpecName "webhook-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 16:35:31 crc kubenswrapper[4869]: I1001 16:35:31.422148 4869 reconciler_common.go:293] "Volume detached for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/e66dca24-690f-49b3-96a4-f4376279f654-apiservice-cert\") on node \"crc\" DevicePath \"\"" Oct 01 16:35:31 crc kubenswrapper[4869]: I1001 16:35:31.422211 4869 reconciler_common.go:293] "Volume detached for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/e66dca24-690f-49b3-96a4-f4376279f654-webhook-cert\") on node \"crc\" DevicePath \"\"" Oct 01 16:35:31 crc kubenswrapper[4869]: I1001 16:35:31.422225 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qqmc9\" (UniqueName: \"kubernetes.io/projected/e66dca24-690f-49b3-96a4-f4376279f654-kube-api-access-qqmc9\") on node \"crc\" DevicePath \"\"" Oct 01 16:35:31 crc kubenswrapper[4869]: I1001 16:35:31.592450 4869 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b15f2099-55a9-4e22-a7da-a1b91fcd63db" path="/var/lib/kubelet/pods/b15f2099-55a9-4e22-a7da-a1b91fcd63db/volumes" Oct 01 16:35:31 crc kubenswrapper[4869]: I1001 16:35:31.593755 4869 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e66dca24-690f-49b3-96a4-f4376279f654" path="/var/lib/kubelet/pods/e66dca24-690f-49b3-96a4-f4376279f654/volumes" Oct 01 16:35:31 crc kubenswrapper[4869]: I1001 16:35:31.956220 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-8795b" event={"ID":"e748d49d-c9ae-445f-87d7-311d7ef79b37","Type":"ContainerStarted","Data":"78786c92e153b6e4a220c8ebc959e3be4221a6d3d7c8837611db7b336a0266eb"} Oct 01 16:35:31 crc kubenswrapper[4869]: I1001 16:35:31.956265 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-8795b" event={"ID":"e748d49d-c9ae-445f-87d7-311d7ef79b37","Type":"ContainerStarted","Data":"f6108f79a792c35b287905a698a1e26336ef1488dda8aa27d48235f8a88b0d21"} Oct 01 16:35:31 crc kubenswrapper[4869]: I1001 16:35:31.956430 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/frr-k8s-8795b" Oct 01 16:35:31 crc kubenswrapper[4869]: I1001 16:35:31.958382 4869 scope.go:117] "RemoveContainer" containerID="916d4e78082a466f9783987d107bb8d9a642ba201d705b86f0fa616f3fc9d79b" Oct 01 16:35:31 crc kubenswrapper[4869]: I1001 16:35:31.958429 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="metallb-system/controller-5d688f5ffc-jtdv4" Oct 01 16:35:31 crc kubenswrapper[4869]: I1001 16:35:31.960829 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="metallb-system/metallb-operator-webhook-server-d479f666c-w8xkz" Oct 01 16:35:31 crc kubenswrapper[4869]: I1001 16:35:31.986540 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/frr-k8s-8795b" podStartSLOduration=5.388268646 podStartE2EDuration="12.986517559s" podCreationTimestamp="2025-10-01 16:35:19 +0000 UTC" firstStartedPulling="2025-10-01 16:35:20.00286079 +0000 UTC m=+5429.149703906" lastFinishedPulling="2025-10-01 16:35:27.601109693 +0000 UTC m=+5436.747952819" observedRunningTime="2025-10-01 16:35:31.982849957 +0000 UTC m=+5441.129693093" watchObservedRunningTime="2025-10-01 16:35:31.986517559 +0000 UTC m=+5441.133360675" Oct 01 16:35:32 crc kubenswrapper[4869]: I1001 16:35:32.033668 4869 scope.go:117] "RemoveContainer" containerID="f0e28e051f6a02338e2ccd35331422158adabea194661b5e4e3040620360e253" Oct 01 16:35:32 crc kubenswrapper[4869]: I1001 16:35:32.071247 4869 scope.go:117] "RemoveContainer" containerID="039b232b84bada219fa975e014c7548941f0a6943821cff60b9b0b903263e17c" Oct 01 16:35:34 crc kubenswrapper[4869]: I1001 16:35:34.773237 4869 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="metallb-system/frr-k8s-8795b" Oct 01 16:35:34 crc kubenswrapper[4869]: I1001 16:35:34.812827 4869 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="metallb-system/frr-k8s-8795b" Oct 01 16:35:39 crc kubenswrapper[4869]: I1001 16:35:39.803916 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/speaker-6gmkw" Oct 01 16:35:39 crc kubenswrapper[4869]: I1001 16:35:39.939672 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/frr-k8s-webhook-server-64bf5d555-w6xl4" Oct 01 16:35:40 crc kubenswrapper[4869]: I1001 16:35:39.999779 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["metallb-system/frr-k8s-webhook-server-5478bdb765-t9fnk"] Oct 01 16:35:40 crc kubenswrapper[4869]: I1001 16:35:40.000024 4869 kuberuntime_container.go:808] "Killing container with a grace period" pod="metallb-system/frr-k8s-webhook-server-5478bdb765-t9fnk" podUID="5ff28534-d014-499c-82cb-ffe31d55e7a1" containerName="frr-k8s-webhook-server" containerID="cri-o://1095f8dab0f9106a8f9d7073f841cbfcef247a736cc00e4657f56be21feddbd7" gracePeriod=10 Oct 01 16:35:40 crc kubenswrapper[4869]: I1001 16:35:40.141252 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/metallb-operator-controller-manager-ddc944bf4-hrp74" Oct 01 16:35:40 crc kubenswrapper[4869]: I1001 16:35:40.223520 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["metallb-system/metallb-operator-controller-manager-fc84fcf8c-4ttdv"] Oct 01 16:35:40 crc kubenswrapper[4869]: I1001 16:35:40.224627 4869 kuberuntime_container.go:808] "Killing container with a grace period" pod="metallb-system/metallb-operator-controller-manager-fc84fcf8c-4ttdv" podUID="1907f504-e5aa-4cdf-868f-1dbafcb47d83" containerName="manager" containerID="cri-o://74a2fc84c27e883887ebf93e386bf124f1e746f539f38510342185271b0acca1" gracePeriod=10 Oct 01 16:35:40 crc kubenswrapper[4869]: E1001 16:35:40.571119 4869 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod1907f504_e5aa_4cdf_868f_1dbafcb47d83.slice/crio-conmon-74a2fc84c27e883887ebf93e386bf124f1e746f539f38510342185271b0acca1.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod1907f504_e5aa_4cdf_868f_1dbafcb47d83.slice/crio-74a2fc84c27e883887ebf93e386bf124f1e746f539f38510342185271b0acca1.scope\": RecentStats: unable to find data in memory cache]" Oct 01 16:35:40 crc kubenswrapper[4869]: I1001 16:35:40.942683 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="metallb-system/frr-k8s-webhook-server-5478bdb765-t9fnk" Oct 01 16:35:40 crc kubenswrapper[4869]: I1001 16:35:40.948685 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="metallb-system/metallb-operator-controller-manager-fc84fcf8c-4ttdv" Oct 01 16:35:41 crc kubenswrapper[4869]: I1001 16:35:41.078792 4869 generic.go:334] "Generic (PLEG): container finished" podID="1907f504-e5aa-4cdf-868f-1dbafcb47d83" containerID="74a2fc84c27e883887ebf93e386bf124f1e746f539f38510342185271b0acca1" exitCode=0 Oct 01 16:35:41 crc kubenswrapper[4869]: I1001 16:35:41.078863 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/metallb-operator-controller-manager-fc84fcf8c-4ttdv" event={"ID":"1907f504-e5aa-4cdf-868f-1dbafcb47d83","Type":"ContainerDied","Data":"74a2fc84c27e883887ebf93e386bf124f1e746f539f38510342185271b0acca1"} Oct 01 16:35:41 crc kubenswrapper[4869]: I1001 16:35:41.078870 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="metallb-system/metallb-operator-controller-manager-fc84fcf8c-4ttdv" Oct 01 16:35:41 crc kubenswrapper[4869]: I1001 16:35:41.078890 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/metallb-operator-controller-manager-fc84fcf8c-4ttdv" event={"ID":"1907f504-e5aa-4cdf-868f-1dbafcb47d83","Type":"ContainerDied","Data":"19e8b25af2481a48186cd9a49d9a038303479aae8af5242aecc99c52d92d244e"} Oct 01 16:35:41 crc kubenswrapper[4869]: I1001 16:35:41.078908 4869 scope.go:117] "RemoveContainer" containerID="74a2fc84c27e883887ebf93e386bf124f1e746f539f38510342185271b0acca1" Oct 01 16:35:41 crc kubenswrapper[4869]: I1001 16:35:41.078958 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-spzw9\" (UniqueName: \"kubernetes.io/projected/5ff28534-d014-499c-82cb-ffe31d55e7a1-kube-api-access-spzw9\") pod \"5ff28534-d014-499c-82cb-ffe31d55e7a1\" (UID: \"5ff28534-d014-499c-82cb-ffe31d55e7a1\") " Oct 01 16:35:41 crc kubenswrapper[4869]: I1001 16:35:41.079016 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5jz5l\" (UniqueName: \"kubernetes.io/projected/1907f504-e5aa-4cdf-868f-1dbafcb47d83-kube-api-access-5jz5l\") pod \"1907f504-e5aa-4cdf-868f-1dbafcb47d83\" (UID: \"1907f504-e5aa-4cdf-868f-1dbafcb47d83\") " Oct 01 16:35:41 crc kubenswrapper[4869]: I1001 16:35:41.079084 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/1907f504-e5aa-4cdf-868f-1dbafcb47d83-apiservice-cert\") pod \"1907f504-e5aa-4cdf-868f-1dbafcb47d83\" (UID: \"1907f504-e5aa-4cdf-868f-1dbafcb47d83\") " Oct 01 16:35:41 crc kubenswrapper[4869]: I1001 16:35:41.079124 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/1907f504-e5aa-4cdf-868f-1dbafcb47d83-webhook-cert\") pod \"1907f504-e5aa-4cdf-868f-1dbafcb47d83\" (UID: \"1907f504-e5aa-4cdf-868f-1dbafcb47d83\") " Oct 01 16:35:41 crc kubenswrapper[4869]: I1001 16:35:41.079155 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/5ff28534-d014-499c-82cb-ffe31d55e7a1-cert\") pod \"5ff28534-d014-499c-82cb-ffe31d55e7a1\" (UID: \"5ff28534-d014-499c-82cb-ffe31d55e7a1\") " Oct 01 16:35:41 crc kubenswrapper[4869]: I1001 16:35:41.083083 4869 generic.go:334] "Generic (PLEG): container finished" podID="5ff28534-d014-499c-82cb-ffe31d55e7a1" containerID="1095f8dab0f9106a8f9d7073f841cbfcef247a736cc00e4657f56be21feddbd7" exitCode=0 Oct 01 16:35:41 crc kubenswrapper[4869]: I1001 16:35:41.083124 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-webhook-server-5478bdb765-t9fnk" event={"ID":"5ff28534-d014-499c-82cb-ffe31d55e7a1","Type":"ContainerDied","Data":"1095f8dab0f9106a8f9d7073f841cbfcef247a736cc00e4657f56be21feddbd7"} Oct 01 16:35:41 crc kubenswrapper[4869]: I1001 16:35:41.083151 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-webhook-server-5478bdb765-t9fnk" event={"ID":"5ff28534-d014-499c-82cb-ffe31d55e7a1","Type":"ContainerDied","Data":"33db778453772d84b486f5d86fe6d7df6f48ca41bdc2b9d1f8da310740965867"} Oct 01 16:35:41 crc kubenswrapper[4869]: I1001 16:35:41.083207 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="metallb-system/frr-k8s-webhook-server-5478bdb765-t9fnk" Oct 01 16:35:41 crc kubenswrapper[4869]: I1001 16:35:41.084477 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1907f504-e5aa-4cdf-868f-1dbafcb47d83-apiservice-cert" (OuterVolumeSpecName: "apiservice-cert") pod "1907f504-e5aa-4cdf-868f-1dbafcb47d83" (UID: "1907f504-e5aa-4cdf-868f-1dbafcb47d83"). InnerVolumeSpecName "apiservice-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 16:35:41 crc kubenswrapper[4869]: I1001 16:35:41.084741 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1907f504-e5aa-4cdf-868f-1dbafcb47d83-webhook-cert" (OuterVolumeSpecName: "webhook-cert") pod "1907f504-e5aa-4cdf-868f-1dbafcb47d83" (UID: "1907f504-e5aa-4cdf-868f-1dbafcb47d83"). InnerVolumeSpecName "webhook-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 16:35:41 crc kubenswrapper[4869]: I1001 16:35:41.084918 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5ff28534-d014-499c-82cb-ffe31d55e7a1-kube-api-access-spzw9" (OuterVolumeSpecName: "kube-api-access-spzw9") pod "5ff28534-d014-499c-82cb-ffe31d55e7a1" (UID: "5ff28534-d014-499c-82cb-ffe31d55e7a1"). InnerVolumeSpecName "kube-api-access-spzw9". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 16:35:41 crc kubenswrapper[4869]: I1001 16:35:41.087474 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5ff28534-d014-499c-82cb-ffe31d55e7a1-cert" (OuterVolumeSpecName: "cert") pod "5ff28534-d014-499c-82cb-ffe31d55e7a1" (UID: "5ff28534-d014-499c-82cb-ffe31d55e7a1"). InnerVolumeSpecName "cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 16:35:41 crc kubenswrapper[4869]: I1001 16:35:41.087478 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1907f504-e5aa-4cdf-868f-1dbafcb47d83-kube-api-access-5jz5l" (OuterVolumeSpecName: "kube-api-access-5jz5l") pod "1907f504-e5aa-4cdf-868f-1dbafcb47d83" (UID: "1907f504-e5aa-4cdf-868f-1dbafcb47d83"). InnerVolumeSpecName "kube-api-access-5jz5l". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 16:35:41 crc kubenswrapper[4869]: I1001 16:35:41.181199 4869 scope.go:117] "RemoveContainer" containerID="74a2fc84c27e883887ebf93e386bf124f1e746f539f38510342185271b0acca1" Oct 01 16:35:41 crc kubenswrapper[4869]: I1001 16:35:41.181240 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-spzw9\" (UniqueName: \"kubernetes.io/projected/5ff28534-d014-499c-82cb-ffe31d55e7a1-kube-api-access-spzw9\") on node \"crc\" DevicePath \"\"" Oct 01 16:35:41 crc kubenswrapper[4869]: I1001 16:35:41.181294 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5jz5l\" (UniqueName: \"kubernetes.io/projected/1907f504-e5aa-4cdf-868f-1dbafcb47d83-kube-api-access-5jz5l\") on node \"crc\" DevicePath \"\"" Oct 01 16:35:41 crc kubenswrapper[4869]: I1001 16:35:41.181308 4869 reconciler_common.go:293] "Volume detached for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/1907f504-e5aa-4cdf-868f-1dbafcb47d83-apiservice-cert\") on node \"crc\" DevicePath \"\"" Oct 01 16:35:41 crc kubenswrapper[4869]: I1001 16:35:41.181319 4869 reconciler_common.go:293] "Volume detached for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/1907f504-e5aa-4cdf-868f-1dbafcb47d83-webhook-cert\") on node \"crc\" DevicePath \"\"" Oct 01 16:35:41 crc kubenswrapper[4869]: I1001 16:35:41.181329 4869 reconciler_common.go:293] "Volume detached for volume \"cert\" (UniqueName: \"kubernetes.io/secret/5ff28534-d014-499c-82cb-ffe31d55e7a1-cert\") on node \"crc\" DevicePath \"\"" Oct 01 16:35:41 crc kubenswrapper[4869]: E1001 16:35:41.181819 4869 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"74a2fc84c27e883887ebf93e386bf124f1e746f539f38510342185271b0acca1\": container with ID starting with 74a2fc84c27e883887ebf93e386bf124f1e746f539f38510342185271b0acca1 not found: ID does not exist" containerID="74a2fc84c27e883887ebf93e386bf124f1e746f539f38510342185271b0acca1" Oct 01 16:35:41 crc kubenswrapper[4869]: I1001 16:35:41.181851 4869 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"74a2fc84c27e883887ebf93e386bf124f1e746f539f38510342185271b0acca1"} err="failed to get container status \"74a2fc84c27e883887ebf93e386bf124f1e746f539f38510342185271b0acca1\": rpc error: code = NotFound desc = could not find container \"74a2fc84c27e883887ebf93e386bf124f1e746f539f38510342185271b0acca1\": container with ID starting with 74a2fc84c27e883887ebf93e386bf124f1e746f539f38510342185271b0acca1 not found: ID does not exist" Oct 01 16:35:41 crc kubenswrapper[4869]: I1001 16:35:41.181871 4869 scope.go:117] "RemoveContainer" containerID="1095f8dab0f9106a8f9d7073f841cbfcef247a736cc00e4657f56be21feddbd7" Oct 01 16:35:41 crc kubenswrapper[4869]: I1001 16:35:41.201875 4869 scope.go:117] "RemoveContainer" containerID="1095f8dab0f9106a8f9d7073f841cbfcef247a736cc00e4657f56be21feddbd7" Oct 01 16:35:41 crc kubenswrapper[4869]: E1001 16:35:41.202345 4869 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1095f8dab0f9106a8f9d7073f841cbfcef247a736cc00e4657f56be21feddbd7\": container with ID starting with 1095f8dab0f9106a8f9d7073f841cbfcef247a736cc00e4657f56be21feddbd7 not found: ID does not exist" containerID="1095f8dab0f9106a8f9d7073f841cbfcef247a736cc00e4657f56be21feddbd7" Oct 01 16:35:41 crc kubenswrapper[4869]: I1001 16:35:41.202376 4869 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1095f8dab0f9106a8f9d7073f841cbfcef247a736cc00e4657f56be21feddbd7"} err="failed to get container status \"1095f8dab0f9106a8f9d7073f841cbfcef247a736cc00e4657f56be21feddbd7\": rpc error: code = NotFound desc = could not find container \"1095f8dab0f9106a8f9d7073f841cbfcef247a736cc00e4657f56be21feddbd7\": container with ID starting with 1095f8dab0f9106a8f9d7073f841cbfcef247a736cc00e4657f56be21feddbd7 not found: ID does not exist" Oct 01 16:35:41 crc kubenswrapper[4869]: I1001 16:35:41.413157 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["metallb-system/metallb-operator-controller-manager-fc84fcf8c-4ttdv"] Oct 01 16:35:41 crc kubenswrapper[4869]: I1001 16:35:41.430151 4869 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["metallb-system/metallb-operator-controller-manager-fc84fcf8c-4ttdv"] Oct 01 16:35:41 crc kubenswrapper[4869]: I1001 16:35:41.441810 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["metallb-system/frr-k8s-webhook-server-5478bdb765-t9fnk"] Oct 01 16:35:41 crc kubenswrapper[4869]: I1001 16:35:41.452678 4869 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["metallb-system/frr-k8s-webhook-server-5478bdb765-t9fnk"] Oct 01 16:35:41 crc kubenswrapper[4869]: I1001 16:35:41.593157 4869 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1907f504-e5aa-4cdf-868f-1dbafcb47d83" path="/var/lib/kubelet/pods/1907f504-e5aa-4cdf-868f-1dbafcb47d83/volumes" Oct 01 16:35:41 crc kubenswrapper[4869]: I1001 16:35:41.593749 4869 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5ff28534-d014-499c-82cb-ffe31d55e7a1" path="/var/lib/kubelet/pods/5ff28534-d014-499c-82cb-ffe31d55e7a1/volumes" Oct 01 16:35:43 crc kubenswrapper[4869]: I1001 16:35:43.353982 4869 patch_prober.go:28] interesting pod/machine-config-daemon-c86m8 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 01 16:35:43 crc kubenswrapper[4869]: I1001 16:35:43.354408 4869 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-c86m8" podUID="a4b64f7f-0b03-4f47-965b-9fde048b735c" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 01 16:35:43 crc kubenswrapper[4869]: I1001 16:35:43.354462 4869 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-c86m8" Oct 01 16:35:43 crc kubenswrapper[4869]: I1001 16:35:43.355163 4869 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"c121385614507939bd4b0233bfac1b4b5096c8ffc9330ccf589f609e0b1c878a"} pod="openshift-machine-config-operator/machine-config-daemon-c86m8" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Oct 01 16:35:43 crc kubenswrapper[4869]: I1001 16:35:43.355216 4869 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-c86m8" podUID="a4b64f7f-0b03-4f47-965b-9fde048b735c" containerName="machine-config-daemon" containerID="cri-o://c121385614507939bd4b0233bfac1b4b5096c8ffc9330ccf589f609e0b1c878a" gracePeriod=600 Oct 01 16:35:44 crc kubenswrapper[4869]: I1001 16:35:44.121644 4869 generic.go:334] "Generic (PLEG): container finished" podID="a4b64f7f-0b03-4f47-965b-9fde048b735c" containerID="c121385614507939bd4b0233bfac1b4b5096c8ffc9330ccf589f609e0b1c878a" exitCode=0 Oct 01 16:35:44 crc kubenswrapper[4869]: I1001 16:35:44.121706 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-c86m8" event={"ID":"a4b64f7f-0b03-4f47-965b-9fde048b735c","Type":"ContainerDied","Data":"c121385614507939bd4b0233bfac1b4b5096c8ffc9330ccf589f609e0b1c878a"} Oct 01 16:35:44 crc kubenswrapper[4869]: I1001 16:35:44.121996 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-c86m8" event={"ID":"a4b64f7f-0b03-4f47-965b-9fde048b735c","Type":"ContainerStarted","Data":"7dd9c90d275bfe93ccea8e2e9638d51d1466814860669199622dcb3c3bbdd327"} Oct 01 16:35:44 crc kubenswrapper[4869]: I1001 16:35:44.122021 4869 scope.go:117] "RemoveContainer" containerID="0eaa53cd5d569b0e6bef9edefc7d1fa07489655924f7f785aa79773af28d727f" Oct 01 16:35:49 crc kubenswrapper[4869]: I1001 16:35:49.776839 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/frr-k8s-8795b" Oct 01 16:37:24 crc kubenswrapper[4869]: I1001 16:37:24.309995 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-5nsjv"] Oct 01 16:37:24 crc kubenswrapper[4869]: E1001 16:37:24.310868 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b15f2099-55a9-4e22-a7da-a1b91fcd63db" containerName="controller" Oct 01 16:37:24 crc kubenswrapper[4869]: I1001 16:37:24.310883 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="b15f2099-55a9-4e22-a7da-a1b91fcd63db" containerName="controller" Oct 01 16:37:24 crc kubenswrapper[4869]: E1001 16:37:24.310905 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e66dca24-690f-49b3-96a4-f4376279f654" containerName="webhook-server" Oct 01 16:37:24 crc kubenswrapper[4869]: I1001 16:37:24.310913 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="e66dca24-690f-49b3-96a4-f4376279f654" containerName="webhook-server" Oct 01 16:37:24 crc kubenswrapper[4869]: E1001 16:37:24.310933 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b15f2099-55a9-4e22-a7da-a1b91fcd63db" containerName="kube-rbac-proxy" Oct 01 16:37:24 crc kubenswrapper[4869]: I1001 16:37:24.310941 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="b15f2099-55a9-4e22-a7da-a1b91fcd63db" containerName="kube-rbac-proxy" Oct 01 16:37:24 crc kubenswrapper[4869]: E1001 16:37:24.310973 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5ff28534-d014-499c-82cb-ffe31d55e7a1" containerName="frr-k8s-webhook-server" Oct 01 16:37:24 crc kubenswrapper[4869]: I1001 16:37:24.310982 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="5ff28534-d014-499c-82cb-ffe31d55e7a1" containerName="frr-k8s-webhook-server" Oct 01 16:37:24 crc kubenswrapper[4869]: E1001 16:37:24.311013 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1907f504-e5aa-4cdf-868f-1dbafcb47d83" containerName="manager" Oct 01 16:37:24 crc kubenswrapper[4869]: I1001 16:37:24.311020 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="1907f504-e5aa-4cdf-868f-1dbafcb47d83" containerName="manager" Oct 01 16:37:24 crc kubenswrapper[4869]: I1001 16:37:24.313625 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="5ff28534-d014-499c-82cb-ffe31d55e7a1" containerName="frr-k8s-webhook-server" Oct 01 16:37:24 crc kubenswrapper[4869]: I1001 16:37:24.313655 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="b15f2099-55a9-4e22-a7da-a1b91fcd63db" containerName="kube-rbac-proxy" Oct 01 16:37:24 crc kubenswrapper[4869]: I1001 16:37:24.313671 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="1907f504-e5aa-4cdf-868f-1dbafcb47d83" containerName="manager" Oct 01 16:37:24 crc kubenswrapper[4869]: I1001 16:37:24.313692 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="b15f2099-55a9-4e22-a7da-a1b91fcd63db" containerName="controller" Oct 01 16:37:24 crc kubenswrapper[4869]: I1001 16:37:24.313707 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="e66dca24-690f-49b3-96a4-f4376279f654" containerName="webhook-server" Oct 01 16:37:24 crc kubenswrapper[4869]: I1001 16:37:24.315788 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-5nsjv" Oct 01 16:37:24 crc kubenswrapper[4869]: I1001 16:37:24.331239 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-5nsjv"] Oct 01 16:37:24 crc kubenswrapper[4869]: I1001 16:37:24.399444 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/30babdf3-c623-4d15-9d97-d7496b2c58e6-catalog-content\") pod \"certified-operators-5nsjv\" (UID: \"30babdf3-c623-4d15-9d97-d7496b2c58e6\") " pod="openshift-marketplace/certified-operators-5nsjv" Oct 01 16:37:24 crc kubenswrapper[4869]: I1001 16:37:24.399729 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/30babdf3-c623-4d15-9d97-d7496b2c58e6-utilities\") pod \"certified-operators-5nsjv\" (UID: \"30babdf3-c623-4d15-9d97-d7496b2c58e6\") " pod="openshift-marketplace/certified-operators-5nsjv" Oct 01 16:37:24 crc kubenswrapper[4869]: I1001 16:37:24.399902 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ms4jb\" (UniqueName: \"kubernetes.io/projected/30babdf3-c623-4d15-9d97-d7496b2c58e6-kube-api-access-ms4jb\") pod \"certified-operators-5nsjv\" (UID: \"30babdf3-c623-4d15-9d97-d7496b2c58e6\") " pod="openshift-marketplace/certified-operators-5nsjv" Oct 01 16:37:24 crc kubenswrapper[4869]: I1001 16:37:24.501372 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ms4jb\" (UniqueName: \"kubernetes.io/projected/30babdf3-c623-4d15-9d97-d7496b2c58e6-kube-api-access-ms4jb\") pod \"certified-operators-5nsjv\" (UID: \"30babdf3-c623-4d15-9d97-d7496b2c58e6\") " pod="openshift-marketplace/certified-operators-5nsjv" Oct 01 16:37:24 crc kubenswrapper[4869]: I1001 16:37:24.501489 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/30babdf3-c623-4d15-9d97-d7496b2c58e6-catalog-content\") pod \"certified-operators-5nsjv\" (UID: \"30babdf3-c623-4d15-9d97-d7496b2c58e6\") " pod="openshift-marketplace/certified-operators-5nsjv" Oct 01 16:37:24 crc kubenswrapper[4869]: I1001 16:37:24.501569 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/30babdf3-c623-4d15-9d97-d7496b2c58e6-utilities\") pod \"certified-operators-5nsjv\" (UID: \"30babdf3-c623-4d15-9d97-d7496b2c58e6\") " pod="openshift-marketplace/certified-operators-5nsjv" Oct 01 16:37:24 crc kubenswrapper[4869]: I1001 16:37:24.501980 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/30babdf3-c623-4d15-9d97-d7496b2c58e6-utilities\") pod \"certified-operators-5nsjv\" (UID: \"30babdf3-c623-4d15-9d97-d7496b2c58e6\") " pod="openshift-marketplace/certified-operators-5nsjv" Oct 01 16:37:24 crc kubenswrapper[4869]: I1001 16:37:24.502328 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/30babdf3-c623-4d15-9d97-d7496b2c58e6-catalog-content\") pod \"certified-operators-5nsjv\" (UID: \"30babdf3-c623-4d15-9d97-d7496b2c58e6\") " pod="openshift-marketplace/certified-operators-5nsjv" Oct 01 16:37:24 crc kubenswrapper[4869]: I1001 16:37:24.524054 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ms4jb\" (UniqueName: \"kubernetes.io/projected/30babdf3-c623-4d15-9d97-d7496b2c58e6-kube-api-access-ms4jb\") pod \"certified-operators-5nsjv\" (UID: \"30babdf3-c623-4d15-9d97-d7496b2c58e6\") " pod="openshift-marketplace/certified-operators-5nsjv" Oct 01 16:37:24 crc kubenswrapper[4869]: I1001 16:37:24.645980 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-5nsjv" Oct 01 16:37:25 crc kubenswrapper[4869]: I1001 16:37:25.189313 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-5nsjv"] Oct 01 16:37:26 crc kubenswrapper[4869]: I1001 16:37:26.108837 4869 generic.go:334] "Generic (PLEG): container finished" podID="30babdf3-c623-4d15-9d97-d7496b2c58e6" containerID="5a8e4cc0e1b2420fc4e77f8512178447f4cec2c2a97eab415bfb557013346d39" exitCode=0 Oct 01 16:37:26 crc kubenswrapper[4869]: I1001 16:37:26.108951 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-5nsjv" event={"ID":"30babdf3-c623-4d15-9d97-d7496b2c58e6","Type":"ContainerDied","Data":"5a8e4cc0e1b2420fc4e77f8512178447f4cec2c2a97eab415bfb557013346d39"} Oct 01 16:37:26 crc kubenswrapper[4869]: I1001 16:37:26.109137 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-5nsjv" event={"ID":"30babdf3-c623-4d15-9d97-d7496b2c58e6","Type":"ContainerStarted","Data":"2e8bc9ed4c350ac7ea2fa53597d953c9cfe664005c43ee0add6462678c0b33aa"} Oct 01 16:37:27 crc kubenswrapper[4869]: I1001 16:37:27.118769 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-5nsjv" event={"ID":"30babdf3-c623-4d15-9d97-d7496b2c58e6","Type":"ContainerStarted","Data":"053774cf162f006563b21520e7b720de49c9ebf2b1e11b113c354e82e5362dff"} Oct 01 16:37:28 crc kubenswrapper[4869]: I1001 16:37:28.134492 4869 generic.go:334] "Generic (PLEG): container finished" podID="30babdf3-c623-4d15-9d97-d7496b2c58e6" containerID="053774cf162f006563b21520e7b720de49c9ebf2b1e11b113c354e82e5362dff" exitCode=0 Oct 01 16:37:28 crc kubenswrapper[4869]: I1001 16:37:28.134554 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-5nsjv" event={"ID":"30babdf3-c623-4d15-9d97-d7496b2c58e6","Type":"ContainerDied","Data":"053774cf162f006563b21520e7b720de49c9ebf2b1e11b113c354e82e5362dff"} Oct 01 16:37:29 crc kubenswrapper[4869]: I1001 16:37:29.144695 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-5nsjv" event={"ID":"30babdf3-c623-4d15-9d97-d7496b2c58e6","Type":"ContainerStarted","Data":"4ba1b7387b5fc9b881254bd71979a8faf64068ce9aa79ddba37bfe4d27ad002e"} Oct 01 16:37:29 crc kubenswrapper[4869]: I1001 16:37:29.168584 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-5nsjv" podStartSLOduration=2.55398631 podStartE2EDuration="5.168564079s" podCreationTimestamp="2025-10-01 16:37:24 +0000 UTC" firstStartedPulling="2025-10-01 16:37:26.110723616 +0000 UTC m=+5555.257566732" lastFinishedPulling="2025-10-01 16:37:28.725301365 +0000 UTC m=+5557.872144501" observedRunningTime="2025-10-01 16:37:29.164965658 +0000 UTC m=+5558.311808774" watchObservedRunningTime="2025-10-01 16:37:29.168564079 +0000 UTC m=+5558.315407195" Oct 01 16:37:30 crc kubenswrapper[4869]: I1001 16:37:30.698840 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-wn2wb"] Oct 01 16:37:30 crc kubenswrapper[4869]: I1001 16:37:30.701024 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-wn2wb" Oct 01 16:37:30 crc kubenswrapper[4869]: I1001 16:37:30.721143 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-wn2wb"] Oct 01 16:37:30 crc kubenswrapper[4869]: I1001 16:37:30.851159 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b155ee91-d703-424e-9eb3-c9d7615d9bed-utilities\") pod \"redhat-marketplace-wn2wb\" (UID: \"b155ee91-d703-424e-9eb3-c9d7615d9bed\") " pod="openshift-marketplace/redhat-marketplace-wn2wb" Oct 01 16:37:30 crc kubenswrapper[4869]: I1001 16:37:30.851230 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tqtrn\" (UniqueName: \"kubernetes.io/projected/b155ee91-d703-424e-9eb3-c9d7615d9bed-kube-api-access-tqtrn\") pod \"redhat-marketplace-wn2wb\" (UID: \"b155ee91-d703-424e-9eb3-c9d7615d9bed\") " pod="openshift-marketplace/redhat-marketplace-wn2wb" Oct 01 16:37:30 crc kubenswrapper[4869]: I1001 16:37:30.851316 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b155ee91-d703-424e-9eb3-c9d7615d9bed-catalog-content\") pod \"redhat-marketplace-wn2wb\" (UID: \"b155ee91-d703-424e-9eb3-c9d7615d9bed\") " pod="openshift-marketplace/redhat-marketplace-wn2wb" Oct 01 16:37:30 crc kubenswrapper[4869]: I1001 16:37:30.953489 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b155ee91-d703-424e-9eb3-c9d7615d9bed-utilities\") pod \"redhat-marketplace-wn2wb\" (UID: \"b155ee91-d703-424e-9eb3-c9d7615d9bed\") " pod="openshift-marketplace/redhat-marketplace-wn2wb" Oct 01 16:37:30 crc kubenswrapper[4869]: I1001 16:37:30.953548 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tqtrn\" (UniqueName: \"kubernetes.io/projected/b155ee91-d703-424e-9eb3-c9d7615d9bed-kube-api-access-tqtrn\") pod \"redhat-marketplace-wn2wb\" (UID: \"b155ee91-d703-424e-9eb3-c9d7615d9bed\") " pod="openshift-marketplace/redhat-marketplace-wn2wb" Oct 01 16:37:30 crc kubenswrapper[4869]: I1001 16:37:30.953580 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b155ee91-d703-424e-9eb3-c9d7615d9bed-catalog-content\") pod \"redhat-marketplace-wn2wb\" (UID: \"b155ee91-d703-424e-9eb3-c9d7615d9bed\") " pod="openshift-marketplace/redhat-marketplace-wn2wb" Oct 01 16:37:30 crc kubenswrapper[4869]: I1001 16:37:30.953943 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b155ee91-d703-424e-9eb3-c9d7615d9bed-utilities\") pod \"redhat-marketplace-wn2wb\" (UID: \"b155ee91-d703-424e-9eb3-c9d7615d9bed\") " pod="openshift-marketplace/redhat-marketplace-wn2wb" Oct 01 16:37:30 crc kubenswrapper[4869]: I1001 16:37:30.953992 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b155ee91-d703-424e-9eb3-c9d7615d9bed-catalog-content\") pod \"redhat-marketplace-wn2wb\" (UID: \"b155ee91-d703-424e-9eb3-c9d7615d9bed\") " pod="openshift-marketplace/redhat-marketplace-wn2wb" Oct 01 16:37:30 crc kubenswrapper[4869]: I1001 16:37:30.978551 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tqtrn\" (UniqueName: \"kubernetes.io/projected/b155ee91-d703-424e-9eb3-c9d7615d9bed-kube-api-access-tqtrn\") pod \"redhat-marketplace-wn2wb\" (UID: \"b155ee91-d703-424e-9eb3-c9d7615d9bed\") " pod="openshift-marketplace/redhat-marketplace-wn2wb" Oct 01 16:37:31 crc kubenswrapper[4869]: I1001 16:37:31.025536 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-wn2wb" Oct 01 16:37:31 crc kubenswrapper[4869]: I1001 16:37:31.489702 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-wn2wb"] Oct 01 16:37:31 crc kubenswrapper[4869]: W1001 16:37:31.490153 4869 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podb155ee91_d703_424e_9eb3_c9d7615d9bed.slice/crio-05deb91ee6e9366eb9e2ab5d7402efa35a792853aa4f8188b7d510194051bb60 WatchSource:0}: Error finding container 05deb91ee6e9366eb9e2ab5d7402efa35a792853aa4f8188b7d510194051bb60: Status 404 returned error can't find the container with id 05deb91ee6e9366eb9e2ab5d7402efa35a792853aa4f8188b7d510194051bb60 Oct 01 16:37:32 crc kubenswrapper[4869]: I1001 16:37:32.175837 4869 generic.go:334] "Generic (PLEG): container finished" podID="b155ee91-d703-424e-9eb3-c9d7615d9bed" containerID="7baf26c2667420289889790e42a42146961205720c9678d64f2f9925308bc8f3" exitCode=0 Oct 01 16:37:32 crc kubenswrapper[4869]: I1001 16:37:32.176003 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-wn2wb" event={"ID":"b155ee91-d703-424e-9eb3-c9d7615d9bed","Type":"ContainerDied","Data":"7baf26c2667420289889790e42a42146961205720c9678d64f2f9925308bc8f3"} Oct 01 16:37:32 crc kubenswrapper[4869]: I1001 16:37:32.176353 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-wn2wb" event={"ID":"b155ee91-d703-424e-9eb3-c9d7615d9bed","Type":"ContainerStarted","Data":"05deb91ee6e9366eb9e2ab5d7402efa35a792853aa4f8188b7d510194051bb60"} Oct 01 16:37:34 crc kubenswrapper[4869]: I1001 16:37:34.193591 4869 generic.go:334] "Generic (PLEG): container finished" podID="b155ee91-d703-424e-9eb3-c9d7615d9bed" containerID="24d8e6045f1e3ff49ad75d168068cde4d5a4c6243528a5e8618c474bf63c830c" exitCode=0 Oct 01 16:37:34 crc kubenswrapper[4869]: I1001 16:37:34.193696 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-wn2wb" event={"ID":"b155ee91-d703-424e-9eb3-c9d7615d9bed","Type":"ContainerDied","Data":"24d8e6045f1e3ff49ad75d168068cde4d5a4c6243528a5e8618c474bf63c830c"} Oct 01 16:37:34 crc kubenswrapper[4869]: I1001 16:37:34.646415 4869 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-5nsjv" Oct 01 16:37:34 crc kubenswrapper[4869]: I1001 16:37:34.646802 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-5nsjv" Oct 01 16:37:34 crc kubenswrapper[4869]: I1001 16:37:34.705867 4869 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-5nsjv" Oct 01 16:37:35 crc kubenswrapper[4869]: I1001 16:37:35.204349 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-wn2wb" event={"ID":"b155ee91-d703-424e-9eb3-c9d7615d9bed","Type":"ContainerStarted","Data":"92a92905ce87f188ba239cd3d528cd6a85e92edf6f8a6f1ffbb31fbb982f5df9"} Oct 01 16:37:35 crc kubenswrapper[4869]: I1001 16:37:35.233920 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-wn2wb" podStartSLOduration=2.8395847659999998 podStartE2EDuration="5.233898349s" podCreationTimestamp="2025-10-01 16:37:30 +0000 UTC" firstStartedPulling="2025-10-01 16:37:32.179538675 +0000 UTC m=+5561.326381801" lastFinishedPulling="2025-10-01 16:37:34.573852268 +0000 UTC m=+5563.720695384" observedRunningTime="2025-10-01 16:37:35.233394056 +0000 UTC m=+5564.380237192" watchObservedRunningTime="2025-10-01 16:37:35.233898349 +0000 UTC m=+5564.380741465" Oct 01 16:37:35 crc kubenswrapper[4869]: I1001 16:37:35.256117 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-5nsjv" Oct 01 16:37:36 crc kubenswrapper[4869]: I1001 16:37:36.486474 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-5nsjv"] Oct 01 16:37:37 crc kubenswrapper[4869]: I1001 16:37:37.221811 4869 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-5nsjv" podUID="30babdf3-c623-4d15-9d97-d7496b2c58e6" containerName="registry-server" containerID="cri-o://4ba1b7387b5fc9b881254bd71979a8faf64068ce9aa79ddba37bfe4d27ad002e" gracePeriod=2 Oct 01 16:37:37 crc kubenswrapper[4869]: I1001 16:37:37.924159 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-5nsjv" Oct 01 16:37:38 crc kubenswrapper[4869]: I1001 16:37:38.001878 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/30babdf3-c623-4d15-9d97-d7496b2c58e6-utilities\") pod \"30babdf3-c623-4d15-9d97-d7496b2c58e6\" (UID: \"30babdf3-c623-4d15-9d97-d7496b2c58e6\") " Oct 01 16:37:38 crc kubenswrapper[4869]: I1001 16:37:38.001932 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/30babdf3-c623-4d15-9d97-d7496b2c58e6-catalog-content\") pod \"30babdf3-c623-4d15-9d97-d7496b2c58e6\" (UID: \"30babdf3-c623-4d15-9d97-d7496b2c58e6\") " Oct 01 16:37:38 crc kubenswrapper[4869]: I1001 16:37:38.002025 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ms4jb\" (UniqueName: \"kubernetes.io/projected/30babdf3-c623-4d15-9d97-d7496b2c58e6-kube-api-access-ms4jb\") pod \"30babdf3-c623-4d15-9d97-d7496b2c58e6\" (UID: \"30babdf3-c623-4d15-9d97-d7496b2c58e6\") " Oct 01 16:37:38 crc kubenswrapper[4869]: I1001 16:37:38.003250 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/30babdf3-c623-4d15-9d97-d7496b2c58e6-utilities" (OuterVolumeSpecName: "utilities") pod "30babdf3-c623-4d15-9d97-d7496b2c58e6" (UID: "30babdf3-c623-4d15-9d97-d7496b2c58e6"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 16:37:38 crc kubenswrapper[4869]: I1001 16:37:38.012548 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/30babdf3-c623-4d15-9d97-d7496b2c58e6-kube-api-access-ms4jb" (OuterVolumeSpecName: "kube-api-access-ms4jb") pod "30babdf3-c623-4d15-9d97-d7496b2c58e6" (UID: "30babdf3-c623-4d15-9d97-d7496b2c58e6"). InnerVolumeSpecName "kube-api-access-ms4jb". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 16:37:38 crc kubenswrapper[4869]: I1001 16:37:38.104213 4869 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/30babdf3-c623-4d15-9d97-d7496b2c58e6-utilities\") on node \"crc\" DevicePath \"\"" Oct 01 16:37:38 crc kubenswrapper[4869]: I1001 16:37:38.104272 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ms4jb\" (UniqueName: \"kubernetes.io/projected/30babdf3-c623-4d15-9d97-d7496b2c58e6-kube-api-access-ms4jb\") on node \"crc\" DevicePath \"\"" Oct 01 16:37:38 crc kubenswrapper[4869]: I1001 16:37:38.186739 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/30babdf3-c623-4d15-9d97-d7496b2c58e6-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "30babdf3-c623-4d15-9d97-d7496b2c58e6" (UID: "30babdf3-c623-4d15-9d97-d7496b2c58e6"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 16:37:38 crc kubenswrapper[4869]: I1001 16:37:38.206707 4869 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/30babdf3-c623-4d15-9d97-d7496b2c58e6-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 01 16:37:38 crc kubenswrapper[4869]: I1001 16:37:38.236248 4869 generic.go:334] "Generic (PLEG): container finished" podID="30babdf3-c623-4d15-9d97-d7496b2c58e6" containerID="4ba1b7387b5fc9b881254bd71979a8faf64068ce9aa79ddba37bfe4d27ad002e" exitCode=0 Oct 01 16:37:38 crc kubenswrapper[4869]: I1001 16:37:38.236336 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-5nsjv" event={"ID":"30babdf3-c623-4d15-9d97-d7496b2c58e6","Type":"ContainerDied","Data":"4ba1b7387b5fc9b881254bd71979a8faf64068ce9aa79ddba37bfe4d27ad002e"} Oct 01 16:37:38 crc kubenswrapper[4869]: I1001 16:37:38.236381 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-5nsjv" event={"ID":"30babdf3-c623-4d15-9d97-d7496b2c58e6","Type":"ContainerDied","Data":"2e8bc9ed4c350ac7ea2fa53597d953c9cfe664005c43ee0add6462678c0b33aa"} Oct 01 16:37:38 crc kubenswrapper[4869]: I1001 16:37:38.236381 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-5nsjv" Oct 01 16:37:38 crc kubenswrapper[4869]: I1001 16:37:38.236544 4869 scope.go:117] "RemoveContainer" containerID="4ba1b7387b5fc9b881254bd71979a8faf64068ce9aa79ddba37bfe4d27ad002e" Oct 01 16:37:38 crc kubenswrapper[4869]: I1001 16:37:38.271666 4869 scope.go:117] "RemoveContainer" containerID="053774cf162f006563b21520e7b720de49c9ebf2b1e11b113c354e82e5362dff" Oct 01 16:37:38 crc kubenswrapper[4869]: I1001 16:37:38.299756 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-5nsjv"] Oct 01 16:37:38 crc kubenswrapper[4869]: I1001 16:37:38.307348 4869 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-5nsjv"] Oct 01 16:37:38 crc kubenswrapper[4869]: I1001 16:37:38.315062 4869 scope.go:117] "RemoveContainer" containerID="5a8e4cc0e1b2420fc4e77f8512178447f4cec2c2a97eab415bfb557013346d39" Oct 01 16:37:38 crc kubenswrapper[4869]: I1001 16:37:38.364220 4869 scope.go:117] "RemoveContainer" containerID="4ba1b7387b5fc9b881254bd71979a8faf64068ce9aa79ddba37bfe4d27ad002e" Oct 01 16:37:38 crc kubenswrapper[4869]: E1001 16:37:38.364819 4869 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4ba1b7387b5fc9b881254bd71979a8faf64068ce9aa79ddba37bfe4d27ad002e\": container with ID starting with 4ba1b7387b5fc9b881254bd71979a8faf64068ce9aa79ddba37bfe4d27ad002e not found: ID does not exist" containerID="4ba1b7387b5fc9b881254bd71979a8faf64068ce9aa79ddba37bfe4d27ad002e" Oct 01 16:37:38 crc kubenswrapper[4869]: I1001 16:37:38.364983 4869 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4ba1b7387b5fc9b881254bd71979a8faf64068ce9aa79ddba37bfe4d27ad002e"} err="failed to get container status \"4ba1b7387b5fc9b881254bd71979a8faf64068ce9aa79ddba37bfe4d27ad002e\": rpc error: code = NotFound desc = could not find container \"4ba1b7387b5fc9b881254bd71979a8faf64068ce9aa79ddba37bfe4d27ad002e\": container with ID starting with 4ba1b7387b5fc9b881254bd71979a8faf64068ce9aa79ddba37bfe4d27ad002e not found: ID does not exist" Oct 01 16:37:38 crc kubenswrapper[4869]: I1001 16:37:38.365098 4869 scope.go:117] "RemoveContainer" containerID="053774cf162f006563b21520e7b720de49c9ebf2b1e11b113c354e82e5362dff" Oct 01 16:37:38 crc kubenswrapper[4869]: E1001 16:37:38.365955 4869 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"053774cf162f006563b21520e7b720de49c9ebf2b1e11b113c354e82e5362dff\": container with ID starting with 053774cf162f006563b21520e7b720de49c9ebf2b1e11b113c354e82e5362dff not found: ID does not exist" containerID="053774cf162f006563b21520e7b720de49c9ebf2b1e11b113c354e82e5362dff" Oct 01 16:37:38 crc kubenswrapper[4869]: I1001 16:37:38.366034 4869 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"053774cf162f006563b21520e7b720de49c9ebf2b1e11b113c354e82e5362dff"} err="failed to get container status \"053774cf162f006563b21520e7b720de49c9ebf2b1e11b113c354e82e5362dff\": rpc error: code = NotFound desc = could not find container \"053774cf162f006563b21520e7b720de49c9ebf2b1e11b113c354e82e5362dff\": container with ID starting with 053774cf162f006563b21520e7b720de49c9ebf2b1e11b113c354e82e5362dff not found: ID does not exist" Oct 01 16:37:38 crc kubenswrapper[4869]: I1001 16:37:38.366083 4869 scope.go:117] "RemoveContainer" containerID="5a8e4cc0e1b2420fc4e77f8512178447f4cec2c2a97eab415bfb557013346d39" Oct 01 16:37:38 crc kubenswrapper[4869]: E1001 16:37:38.366512 4869 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5a8e4cc0e1b2420fc4e77f8512178447f4cec2c2a97eab415bfb557013346d39\": container with ID starting with 5a8e4cc0e1b2420fc4e77f8512178447f4cec2c2a97eab415bfb557013346d39 not found: ID does not exist" containerID="5a8e4cc0e1b2420fc4e77f8512178447f4cec2c2a97eab415bfb557013346d39" Oct 01 16:37:38 crc kubenswrapper[4869]: I1001 16:37:38.366560 4869 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5a8e4cc0e1b2420fc4e77f8512178447f4cec2c2a97eab415bfb557013346d39"} err="failed to get container status \"5a8e4cc0e1b2420fc4e77f8512178447f4cec2c2a97eab415bfb557013346d39\": rpc error: code = NotFound desc = could not find container \"5a8e4cc0e1b2420fc4e77f8512178447f4cec2c2a97eab415bfb557013346d39\": container with ID starting with 5a8e4cc0e1b2420fc4e77f8512178447f4cec2c2a97eab415bfb557013346d39 not found: ID does not exist" Oct 01 16:37:39 crc kubenswrapper[4869]: I1001 16:37:39.596786 4869 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="30babdf3-c623-4d15-9d97-d7496b2c58e6" path="/var/lib/kubelet/pods/30babdf3-c623-4d15-9d97-d7496b2c58e6/volumes" Oct 01 16:37:41 crc kubenswrapper[4869]: I1001 16:37:41.026082 4869 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-wn2wb" Oct 01 16:37:41 crc kubenswrapper[4869]: I1001 16:37:41.026427 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-wn2wb" Oct 01 16:37:41 crc kubenswrapper[4869]: I1001 16:37:41.072943 4869 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-wn2wb" Oct 01 16:37:41 crc kubenswrapper[4869]: I1001 16:37:41.336332 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-wn2wb" Oct 01 16:37:42 crc kubenswrapper[4869]: I1001 16:37:42.280936 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-wn2wb"] Oct 01 16:37:43 crc kubenswrapper[4869]: I1001 16:37:43.280640 4869 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-wn2wb" podUID="b155ee91-d703-424e-9eb3-c9d7615d9bed" containerName="registry-server" containerID="cri-o://92a92905ce87f188ba239cd3d528cd6a85e92edf6f8a6f1ffbb31fbb982f5df9" gracePeriod=2 Oct 01 16:37:43 crc kubenswrapper[4869]: I1001 16:37:43.354437 4869 patch_prober.go:28] interesting pod/machine-config-daemon-c86m8 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 01 16:37:43 crc kubenswrapper[4869]: I1001 16:37:43.354488 4869 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-c86m8" podUID="a4b64f7f-0b03-4f47-965b-9fde048b735c" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 01 16:37:43 crc kubenswrapper[4869]: I1001 16:37:43.828401 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-wn2wb" Oct 01 16:37:43 crc kubenswrapper[4869]: I1001 16:37:43.933641 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tqtrn\" (UniqueName: \"kubernetes.io/projected/b155ee91-d703-424e-9eb3-c9d7615d9bed-kube-api-access-tqtrn\") pod \"b155ee91-d703-424e-9eb3-c9d7615d9bed\" (UID: \"b155ee91-d703-424e-9eb3-c9d7615d9bed\") " Oct 01 16:37:43 crc kubenswrapper[4869]: I1001 16:37:43.934056 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b155ee91-d703-424e-9eb3-c9d7615d9bed-utilities\") pod \"b155ee91-d703-424e-9eb3-c9d7615d9bed\" (UID: \"b155ee91-d703-424e-9eb3-c9d7615d9bed\") " Oct 01 16:37:43 crc kubenswrapper[4869]: I1001 16:37:43.934745 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b155ee91-d703-424e-9eb3-c9d7615d9bed-utilities" (OuterVolumeSpecName: "utilities") pod "b155ee91-d703-424e-9eb3-c9d7615d9bed" (UID: "b155ee91-d703-424e-9eb3-c9d7615d9bed"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 16:37:43 crc kubenswrapper[4869]: I1001 16:37:43.934960 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b155ee91-d703-424e-9eb3-c9d7615d9bed-catalog-content\") pod \"b155ee91-d703-424e-9eb3-c9d7615d9bed\" (UID: \"b155ee91-d703-424e-9eb3-c9d7615d9bed\") " Oct 01 16:37:43 crc kubenswrapper[4869]: I1001 16:37:43.939783 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b155ee91-d703-424e-9eb3-c9d7615d9bed-kube-api-access-tqtrn" (OuterVolumeSpecName: "kube-api-access-tqtrn") pod "b155ee91-d703-424e-9eb3-c9d7615d9bed" (UID: "b155ee91-d703-424e-9eb3-c9d7615d9bed"). InnerVolumeSpecName "kube-api-access-tqtrn". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 16:37:43 crc kubenswrapper[4869]: I1001 16:37:43.948217 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b155ee91-d703-424e-9eb3-c9d7615d9bed-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "b155ee91-d703-424e-9eb3-c9d7615d9bed" (UID: "b155ee91-d703-424e-9eb3-c9d7615d9bed"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 16:37:43 crc kubenswrapper[4869]: I1001 16:37:43.949207 4869 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b155ee91-d703-424e-9eb3-c9d7615d9bed-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 01 16:37:43 crc kubenswrapper[4869]: I1001 16:37:43.949237 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tqtrn\" (UniqueName: \"kubernetes.io/projected/b155ee91-d703-424e-9eb3-c9d7615d9bed-kube-api-access-tqtrn\") on node \"crc\" DevicePath \"\"" Oct 01 16:37:43 crc kubenswrapper[4869]: I1001 16:37:43.949251 4869 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b155ee91-d703-424e-9eb3-c9d7615d9bed-utilities\") on node \"crc\" DevicePath \"\"" Oct 01 16:37:44 crc kubenswrapper[4869]: I1001 16:37:44.292421 4869 generic.go:334] "Generic (PLEG): container finished" podID="b155ee91-d703-424e-9eb3-c9d7615d9bed" containerID="92a92905ce87f188ba239cd3d528cd6a85e92edf6f8a6f1ffbb31fbb982f5df9" exitCode=0 Oct 01 16:37:44 crc kubenswrapper[4869]: I1001 16:37:44.292458 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-wn2wb" event={"ID":"b155ee91-d703-424e-9eb3-c9d7615d9bed","Type":"ContainerDied","Data":"92a92905ce87f188ba239cd3d528cd6a85e92edf6f8a6f1ffbb31fbb982f5df9"} Oct 01 16:37:44 crc kubenswrapper[4869]: I1001 16:37:44.292482 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-wn2wb" event={"ID":"b155ee91-d703-424e-9eb3-c9d7615d9bed","Type":"ContainerDied","Data":"05deb91ee6e9366eb9e2ab5d7402efa35a792853aa4f8188b7d510194051bb60"} Oct 01 16:37:44 crc kubenswrapper[4869]: I1001 16:37:44.292498 4869 scope.go:117] "RemoveContainer" containerID="92a92905ce87f188ba239cd3d528cd6a85e92edf6f8a6f1ffbb31fbb982f5df9" Oct 01 16:37:44 crc kubenswrapper[4869]: I1001 16:37:44.292600 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-wn2wb" Oct 01 16:37:44 crc kubenswrapper[4869]: I1001 16:37:44.326385 4869 scope.go:117] "RemoveContainer" containerID="24d8e6045f1e3ff49ad75d168068cde4d5a4c6243528a5e8618c474bf63c830c" Oct 01 16:37:44 crc kubenswrapper[4869]: I1001 16:37:44.330531 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-wn2wb"] Oct 01 16:37:44 crc kubenswrapper[4869]: I1001 16:37:44.341018 4869 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-wn2wb"] Oct 01 16:37:44 crc kubenswrapper[4869]: I1001 16:37:44.359002 4869 scope.go:117] "RemoveContainer" containerID="7baf26c2667420289889790e42a42146961205720c9678d64f2f9925308bc8f3" Oct 01 16:37:44 crc kubenswrapper[4869]: I1001 16:37:44.401638 4869 scope.go:117] "RemoveContainer" containerID="92a92905ce87f188ba239cd3d528cd6a85e92edf6f8a6f1ffbb31fbb982f5df9" Oct 01 16:37:44 crc kubenswrapper[4869]: E1001 16:37:44.402893 4869 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"92a92905ce87f188ba239cd3d528cd6a85e92edf6f8a6f1ffbb31fbb982f5df9\": container with ID starting with 92a92905ce87f188ba239cd3d528cd6a85e92edf6f8a6f1ffbb31fbb982f5df9 not found: ID does not exist" containerID="92a92905ce87f188ba239cd3d528cd6a85e92edf6f8a6f1ffbb31fbb982f5df9" Oct 01 16:37:44 crc kubenswrapper[4869]: I1001 16:37:44.402954 4869 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"92a92905ce87f188ba239cd3d528cd6a85e92edf6f8a6f1ffbb31fbb982f5df9"} err="failed to get container status \"92a92905ce87f188ba239cd3d528cd6a85e92edf6f8a6f1ffbb31fbb982f5df9\": rpc error: code = NotFound desc = could not find container \"92a92905ce87f188ba239cd3d528cd6a85e92edf6f8a6f1ffbb31fbb982f5df9\": container with ID starting with 92a92905ce87f188ba239cd3d528cd6a85e92edf6f8a6f1ffbb31fbb982f5df9 not found: ID does not exist" Oct 01 16:37:44 crc kubenswrapper[4869]: I1001 16:37:44.402993 4869 scope.go:117] "RemoveContainer" containerID="24d8e6045f1e3ff49ad75d168068cde4d5a4c6243528a5e8618c474bf63c830c" Oct 01 16:37:44 crc kubenswrapper[4869]: E1001 16:37:44.403623 4869 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"24d8e6045f1e3ff49ad75d168068cde4d5a4c6243528a5e8618c474bf63c830c\": container with ID starting with 24d8e6045f1e3ff49ad75d168068cde4d5a4c6243528a5e8618c474bf63c830c not found: ID does not exist" containerID="24d8e6045f1e3ff49ad75d168068cde4d5a4c6243528a5e8618c474bf63c830c" Oct 01 16:37:44 crc kubenswrapper[4869]: I1001 16:37:44.403899 4869 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"24d8e6045f1e3ff49ad75d168068cde4d5a4c6243528a5e8618c474bf63c830c"} err="failed to get container status \"24d8e6045f1e3ff49ad75d168068cde4d5a4c6243528a5e8618c474bf63c830c\": rpc error: code = NotFound desc = could not find container \"24d8e6045f1e3ff49ad75d168068cde4d5a4c6243528a5e8618c474bf63c830c\": container with ID starting with 24d8e6045f1e3ff49ad75d168068cde4d5a4c6243528a5e8618c474bf63c830c not found: ID does not exist" Oct 01 16:37:44 crc kubenswrapper[4869]: I1001 16:37:44.403936 4869 scope.go:117] "RemoveContainer" containerID="7baf26c2667420289889790e42a42146961205720c9678d64f2f9925308bc8f3" Oct 01 16:37:44 crc kubenswrapper[4869]: E1001 16:37:44.405524 4869 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7baf26c2667420289889790e42a42146961205720c9678d64f2f9925308bc8f3\": container with ID starting with 7baf26c2667420289889790e42a42146961205720c9678d64f2f9925308bc8f3 not found: ID does not exist" containerID="7baf26c2667420289889790e42a42146961205720c9678d64f2f9925308bc8f3" Oct 01 16:37:44 crc kubenswrapper[4869]: I1001 16:37:44.405565 4869 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7baf26c2667420289889790e42a42146961205720c9678d64f2f9925308bc8f3"} err="failed to get container status \"7baf26c2667420289889790e42a42146961205720c9678d64f2f9925308bc8f3\": rpc error: code = NotFound desc = could not find container \"7baf26c2667420289889790e42a42146961205720c9678d64f2f9925308bc8f3\": container with ID starting with 7baf26c2667420289889790e42a42146961205720c9678d64f2f9925308bc8f3 not found: ID does not exist" Oct 01 16:37:45 crc kubenswrapper[4869]: I1001 16:37:45.600734 4869 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b155ee91-d703-424e-9eb3-c9d7615d9bed" path="/var/lib/kubelet/pods/b155ee91-d703-424e-9eb3-c9d7615d9bed/volumes" Oct 01 16:38:13 crc kubenswrapper[4869]: I1001 16:38:13.354533 4869 patch_prober.go:28] interesting pod/machine-config-daemon-c86m8 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 01 16:38:13 crc kubenswrapper[4869]: I1001 16:38:13.355120 4869 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-c86m8" podUID="a4b64f7f-0b03-4f47-965b-9fde048b735c" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 01 16:38:43 crc kubenswrapper[4869]: I1001 16:38:43.353998 4869 patch_prober.go:28] interesting pod/machine-config-daemon-c86m8 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 01 16:38:43 crc kubenswrapper[4869]: I1001 16:38:43.354639 4869 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-c86m8" podUID="a4b64f7f-0b03-4f47-965b-9fde048b735c" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 01 16:38:43 crc kubenswrapper[4869]: I1001 16:38:43.354706 4869 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-c86m8" Oct 01 16:38:43 crc kubenswrapper[4869]: I1001 16:38:43.355766 4869 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"7dd9c90d275bfe93ccea8e2e9638d51d1466814860669199622dcb3c3bbdd327"} pod="openshift-machine-config-operator/machine-config-daemon-c86m8" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Oct 01 16:38:43 crc kubenswrapper[4869]: I1001 16:38:43.355870 4869 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-c86m8" podUID="a4b64f7f-0b03-4f47-965b-9fde048b735c" containerName="machine-config-daemon" containerID="cri-o://7dd9c90d275bfe93ccea8e2e9638d51d1466814860669199622dcb3c3bbdd327" gracePeriod=600 Oct 01 16:38:43 crc kubenswrapper[4869]: E1001 16:38:43.479076 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-c86m8_openshift-machine-config-operator(a4b64f7f-0b03-4f47-965b-9fde048b735c)\"" pod="openshift-machine-config-operator/machine-config-daemon-c86m8" podUID="a4b64f7f-0b03-4f47-965b-9fde048b735c" Oct 01 16:38:43 crc kubenswrapper[4869]: I1001 16:38:43.916307 4869 generic.go:334] "Generic (PLEG): container finished" podID="a4b64f7f-0b03-4f47-965b-9fde048b735c" containerID="7dd9c90d275bfe93ccea8e2e9638d51d1466814860669199622dcb3c3bbdd327" exitCode=0 Oct 01 16:38:43 crc kubenswrapper[4869]: I1001 16:38:43.916363 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-c86m8" event={"ID":"a4b64f7f-0b03-4f47-965b-9fde048b735c","Type":"ContainerDied","Data":"7dd9c90d275bfe93ccea8e2e9638d51d1466814860669199622dcb3c3bbdd327"} Oct 01 16:38:43 crc kubenswrapper[4869]: I1001 16:38:43.916574 4869 scope.go:117] "RemoveContainer" containerID="c121385614507939bd4b0233bfac1b4b5096c8ffc9330ccf589f609e0b1c878a" Oct 01 16:38:43 crc kubenswrapper[4869]: I1001 16:38:43.917162 4869 scope.go:117] "RemoveContainer" containerID="7dd9c90d275bfe93ccea8e2e9638d51d1466814860669199622dcb3c3bbdd327" Oct 01 16:38:43 crc kubenswrapper[4869]: E1001 16:38:43.917395 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-c86m8_openshift-machine-config-operator(a4b64f7f-0b03-4f47-965b-9fde048b735c)\"" pod="openshift-machine-config-operator/machine-config-daemon-c86m8" podUID="a4b64f7f-0b03-4f47-965b-9fde048b735c" Oct 01 16:38:57 crc kubenswrapper[4869]: I1001 16:38:57.582818 4869 scope.go:117] "RemoveContainer" containerID="7dd9c90d275bfe93ccea8e2e9638d51d1466814860669199622dcb3c3bbdd327" Oct 01 16:38:57 crc kubenswrapper[4869]: E1001 16:38:57.584038 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-c86m8_openshift-machine-config-operator(a4b64f7f-0b03-4f47-965b-9fde048b735c)\"" pod="openshift-machine-config-operator/machine-config-daemon-c86m8" podUID="a4b64f7f-0b03-4f47-965b-9fde048b735c" Oct 01 16:39:10 crc kubenswrapper[4869]: I1001 16:39:10.581920 4869 scope.go:117] "RemoveContainer" containerID="7dd9c90d275bfe93ccea8e2e9638d51d1466814860669199622dcb3c3bbdd327" Oct 01 16:39:10 crc kubenswrapper[4869]: E1001 16:39:10.582851 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-c86m8_openshift-machine-config-operator(a4b64f7f-0b03-4f47-965b-9fde048b735c)\"" pod="openshift-machine-config-operator/machine-config-daemon-c86m8" podUID="a4b64f7f-0b03-4f47-965b-9fde048b735c" Oct 01 16:39:25 crc kubenswrapper[4869]: I1001 16:39:25.581111 4869 scope.go:117] "RemoveContainer" containerID="7dd9c90d275bfe93ccea8e2e9638d51d1466814860669199622dcb3c3bbdd327" Oct 01 16:39:25 crc kubenswrapper[4869]: E1001 16:39:25.582174 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-c86m8_openshift-machine-config-operator(a4b64f7f-0b03-4f47-965b-9fde048b735c)\"" pod="openshift-machine-config-operator/machine-config-daemon-c86m8" podUID="a4b64f7f-0b03-4f47-965b-9fde048b735c" Oct 01 16:39:39 crc kubenswrapper[4869]: I1001 16:39:39.580982 4869 scope.go:117] "RemoveContainer" containerID="7dd9c90d275bfe93ccea8e2e9638d51d1466814860669199622dcb3c3bbdd327" Oct 01 16:39:39 crc kubenswrapper[4869]: E1001 16:39:39.582031 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-c86m8_openshift-machine-config-operator(a4b64f7f-0b03-4f47-965b-9fde048b735c)\"" pod="openshift-machine-config-operator/machine-config-daemon-c86m8" podUID="a4b64f7f-0b03-4f47-965b-9fde048b735c" Oct 01 16:39:51 crc kubenswrapper[4869]: I1001 16:39:51.588047 4869 scope.go:117] "RemoveContainer" containerID="7dd9c90d275bfe93ccea8e2e9638d51d1466814860669199622dcb3c3bbdd327" Oct 01 16:39:51 crc kubenswrapper[4869]: E1001 16:39:51.588991 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-c86m8_openshift-machine-config-operator(a4b64f7f-0b03-4f47-965b-9fde048b735c)\"" pod="openshift-machine-config-operator/machine-config-daemon-c86m8" podUID="a4b64f7f-0b03-4f47-965b-9fde048b735c" Oct 01 16:40:04 crc kubenswrapper[4869]: I1001 16:40:04.581559 4869 scope.go:117] "RemoveContainer" containerID="7dd9c90d275bfe93ccea8e2e9638d51d1466814860669199622dcb3c3bbdd327" Oct 01 16:40:04 crc kubenswrapper[4869]: E1001 16:40:04.582748 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-c86m8_openshift-machine-config-operator(a4b64f7f-0b03-4f47-965b-9fde048b735c)\"" pod="openshift-machine-config-operator/machine-config-daemon-c86m8" podUID="a4b64f7f-0b03-4f47-965b-9fde048b735c" Oct 01 16:40:17 crc kubenswrapper[4869]: I1001 16:40:17.585278 4869 scope.go:117] "RemoveContainer" containerID="7dd9c90d275bfe93ccea8e2e9638d51d1466814860669199622dcb3c3bbdd327" Oct 01 16:40:17 crc kubenswrapper[4869]: E1001 16:40:17.586206 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-c86m8_openshift-machine-config-operator(a4b64f7f-0b03-4f47-965b-9fde048b735c)\"" pod="openshift-machine-config-operator/machine-config-daemon-c86m8" podUID="a4b64f7f-0b03-4f47-965b-9fde048b735c" Oct 01 16:40:31 crc kubenswrapper[4869]: I1001 16:40:31.598980 4869 scope.go:117] "RemoveContainer" containerID="7dd9c90d275bfe93ccea8e2e9638d51d1466814860669199622dcb3c3bbdd327" Oct 01 16:40:31 crc kubenswrapper[4869]: E1001 16:40:31.599621 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-c86m8_openshift-machine-config-operator(a4b64f7f-0b03-4f47-965b-9fde048b735c)\"" pod="openshift-machine-config-operator/machine-config-daemon-c86m8" podUID="a4b64f7f-0b03-4f47-965b-9fde048b735c" Oct 01 16:40:46 crc kubenswrapper[4869]: I1001 16:40:46.581639 4869 scope.go:117] "RemoveContainer" containerID="7dd9c90d275bfe93ccea8e2e9638d51d1466814860669199622dcb3c3bbdd327" Oct 01 16:40:46 crc kubenswrapper[4869]: E1001 16:40:46.582458 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-c86m8_openshift-machine-config-operator(a4b64f7f-0b03-4f47-965b-9fde048b735c)\"" pod="openshift-machine-config-operator/machine-config-daemon-c86m8" podUID="a4b64f7f-0b03-4f47-965b-9fde048b735c" Oct 01 16:40:59 crc kubenswrapper[4869]: I1001 16:40:59.580770 4869 scope.go:117] "RemoveContainer" containerID="7dd9c90d275bfe93ccea8e2e9638d51d1466814860669199622dcb3c3bbdd327" Oct 01 16:40:59 crc kubenswrapper[4869]: E1001 16:40:59.581763 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-c86m8_openshift-machine-config-operator(a4b64f7f-0b03-4f47-965b-9fde048b735c)\"" pod="openshift-machine-config-operator/machine-config-daemon-c86m8" podUID="a4b64f7f-0b03-4f47-965b-9fde048b735c" Oct 01 16:41:04 crc kubenswrapper[4869]: I1001 16:41:04.655884 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-qw74c"] Oct 01 16:41:04 crc kubenswrapper[4869]: E1001 16:41:04.656915 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b155ee91-d703-424e-9eb3-c9d7615d9bed" containerName="extract-utilities" Oct 01 16:41:04 crc kubenswrapper[4869]: I1001 16:41:04.656931 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="b155ee91-d703-424e-9eb3-c9d7615d9bed" containerName="extract-utilities" Oct 01 16:41:04 crc kubenswrapper[4869]: E1001 16:41:04.656945 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b155ee91-d703-424e-9eb3-c9d7615d9bed" containerName="registry-server" Oct 01 16:41:04 crc kubenswrapper[4869]: I1001 16:41:04.656951 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="b155ee91-d703-424e-9eb3-c9d7615d9bed" containerName="registry-server" Oct 01 16:41:04 crc kubenswrapper[4869]: E1001 16:41:04.656963 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="30babdf3-c623-4d15-9d97-d7496b2c58e6" containerName="extract-utilities" Oct 01 16:41:04 crc kubenswrapper[4869]: I1001 16:41:04.656969 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="30babdf3-c623-4d15-9d97-d7496b2c58e6" containerName="extract-utilities" Oct 01 16:41:04 crc kubenswrapper[4869]: E1001 16:41:04.656995 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="30babdf3-c623-4d15-9d97-d7496b2c58e6" containerName="extract-content" Oct 01 16:41:04 crc kubenswrapper[4869]: I1001 16:41:04.657000 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="30babdf3-c623-4d15-9d97-d7496b2c58e6" containerName="extract-content" Oct 01 16:41:04 crc kubenswrapper[4869]: E1001 16:41:04.657012 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b155ee91-d703-424e-9eb3-c9d7615d9bed" containerName="extract-content" Oct 01 16:41:04 crc kubenswrapper[4869]: I1001 16:41:04.657017 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="b155ee91-d703-424e-9eb3-c9d7615d9bed" containerName="extract-content" Oct 01 16:41:04 crc kubenswrapper[4869]: E1001 16:41:04.657025 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="30babdf3-c623-4d15-9d97-d7496b2c58e6" containerName="registry-server" Oct 01 16:41:04 crc kubenswrapper[4869]: I1001 16:41:04.657031 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="30babdf3-c623-4d15-9d97-d7496b2c58e6" containerName="registry-server" Oct 01 16:41:04 crc kubenswrapper[4869]: I1001 16:41:04.657220 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="30babdf3-c623-4d15-9d97-d7496b2c58e6" containerName="registry-server" Oct 01 16:41:04 crc kubenswrapper[4869]: I1001 16:41:04.657248 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="b155ee91-d703-424e-9eb3-c9d7615d9bed" containerName="registry-server" Oct 01 16:41:04 crc kubenswrapper[4869]: I1001 16:41:04.659070 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-qw74c" Oct 01 16:41:04 crc kubenswrapper[4869]: I1001 16:41:04.690692 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-qw74c"] Oct 01 16:41:04 crc kubenswrapper[4869]: I1001 16:41:04.804414 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-b88rm\" (UniqueName: \"kubernetes.io/projected/e17b5dbf-bfdb-4980-9bc3-027f80602cb4-kube-api-access-b88rm\") pod \"community-operators-qw74c\" (UID: \"e17b5dbf-bfdb-4980-9bc3-027f80602cb4\") " pod="openshift-marketplace/community-operators-qw74c" Oct 01 16:41:04 crc kubenswrapper[4869]: I1001 16:41:04.804745 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e17b5dbf-bfdb-4980-9bc3-027f80602cb4-catalog-content\") pod \"community-operators-qw74c\" (UID: \"e17b5dbf-bfdb-4980-9bc3-027f80602cb4\") " pod="openshift-marketplace/community-operators-qw74c" Oct 01 16:41:04 crc kubenswrapper[4869]: I1001 16:41:04.804775 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e17b5dbf-bfdb-4980-9bc3-027f80602cb4-utilities\") pod \"community-operators-qw74c\" (UID: \"e17b5dbf-bfdb-4980-9bc3-027f80602cb4\") " pod="openshift-marketplace/community-operators-qw74c" Oct 01 16:41:04 crc kubenswrapper[4869]: I1001 16:41:04.906625 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e17b5dbf-bfdb-4980-9bc3-027f80602cb4-catalog-content\") pod \"community-operators-qw74c\" (UID: \"e17b5dbf-bfdb-4980-9bc3-027f80602cb4\") " pod="openshift-marketplace/community-operators-qw74c" Oct 01 16:41:04 crc kubenswrapper[4869]: I1001 16:41:04.906957 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e17b5dbf-bfdb-4980-9bc3-027f80602cb4-utilities\") pod \"community-operators-qw74c\" (UID: \"e17b5dbf-bfdb-4980-9bc3-027f80602cb4\") " pod="openshift-marketplace/community-operators-qw74c" Oct 01 16:41:04 crc kubenswrapper[4869]: I1001 16:41:04.907409 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-b88rm\" (UniqueName: \"kubernetes.io/projected/e17b5dbf-bfdb-4980-9bc3-027f80602cb4-kube-api-access-b88rm\") pod \"community-operators-qw74c\" (UID: \"e17b5dbf-bfdb-4980-9bc3-027f80602cb4\") " pod="openshift-marketplace/community-operators-qw74c" Oct 01 16:41:04 crc kubenswrapper[4869]: I1001 16:41:04.907585 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e17b5dbf-bfdb-4980-9bc3-027f80602cb4-utilities\") pod \"community-operators-qw74c\" (UID: \"e17b5dbf-bfdb-4980-9bc3-027f80602cb4\") " pod="openshift-marketplace/community-operators-qw74c" Oct 01 16:41:04 crc kubenswrapper[4869]: I1001 16:41:04.907917 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e17b5dbf-bfdb-4980-9bc3-027f80602cb4-catalog-content\") pod \"community-operators-qw74c\" (UID: \"e17b5dbf-bfdb-4980-9bc3-027f80602cb4\") " pod="openshift-marketplace/community-operators-qw74c" Oct 01 16:41:04 crc kubenswrapper[4869]: I1001 16:41:04.932753 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-b88rm\" (UniqueName: \"kubernetes.io/projected/e17b5dbf-bfdb-4980-9bc3-027f80602cb4-kube-api-access-b88rm\") pod \"community-operators-qw74c\" (UID: \"e17b5dbf-bfdb-4980-9bc3-027f80602cb4\") " pod="openshift-marketplace/community-operators-qw74c" Oct 01 16:41:04 crc kubenswrapper[4869]: I1001 16:41:04.993827 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-qw74c" Oct 01 16:41:05 crc kubenswrapper[4869]: I1001 16:41:05.514407 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-qw74c"] Oct 01 16:41:06 crc kubenswrapper[4869]: I1001 16:41:06.340254 4869 generic.go:334] "Generic (PLEG): container finished" podID="e17b5dbf-bfdb-4980-9bc3-027f80602cb4" containerID="14ebaba96e8b9d808d0ee7cd4573887dd790243f1a7485bb4be4ea1534a9cb7b" exitCode=0 Oct 01 16:41:06 crc kubenswrapper[4869]: I1001 16:41:06.340589 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-qw74c" event={"ID":"e17b5dbf-bfdb-4980-9bc3-027f80602cb4","Type":"ContainerDied","Data":"14ebaba96e8b9d808d0ee7cd4573887dd790243f1a7485bb4be4ea1534a9cb7b"} Oct 01 16:41:06 crc kubenswrapper[4869]: I1001 16:41:06.340620 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-qw74c" event={"ID":"e17b5dbf-bfdb-4980-9bc3-027f80602cb4","Type":"ContainerStarted","Data":"7f1ab22a2b761b98b8264ab4cb16c33e9b364bb1c9d81097245be70156106d0f"} Oct 01 16:41:06 crc kubenswrapper[4869]: I1001 16:41:06.342709 4869 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Oct 01 16:41:08 crc kubenswrapper[4869]: I1001 16:41:08.372238 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-qw74c" event={"ID":"e17b5dbf-bfdb-4980-9bc3-027f80602cb4","Type":"ContainerStarted","Data":"1239dd43899b5367cb9eccf920391aba397c810f6d8294c89bb13effb8247975"} Oct 01 16:41:09 crc kubenswrapper[4869]: I1001 16:41:09.390344 4869 generic.go:334] "Generic (PLEG): container finished" podID="e17b5dbf-bfdb-4980-9bc3-027f80602cb4" containerID="1239dd43899b5367cb9eccf920391aba397c810f6d8294c89bb13effb8247975" exitCode=0 Oct 01 16:41:09 crc kubenswrapper[4869]: I1001 16:41:09.390411 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-qw74c" event={"ID":"e17b5dbf-bfdb-4980-9bc3-027f80602cb4","Type":"ContainerDied","Data":"1239dd43899b5367cb9eccf920391aba397c810f6d8294c89bb13effb8247975"} Oct 01 16:41:10 crc kubenswrapper[4869]: I1001 16:41:10.404210 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-qw74c" event={"ID":"e17b5dbf-bfdb-4980-9bc3-027f80602cb4","Type":"ContainerStarted","Data":"fddd2b7dc445bfeb85a9aa8c889c9a4901223d7b8225975afbc2c5b84517dcf1"} Oct 01 16:41:10 crc kubenswrapper[4869]: I1001 16:41:10.430461 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-qw74c" podStartSLOduration=2.913228505 podStartE2EDuration="6.430441597s" podCreationTimestamp="2025-10-01 16:41:04 +0000 UTC" firstStartedPulling="2025-10-01 16:41:06.342524484 +0000 UTC m=+5775.489367600" lastFinishedPulling="2025-10-01 16:41:09.859737566 +0000 UTC m=+5779.006580692" observedRunningTime="2025-10-01 16:41:10.423190994 +0000 UTC m=+5779.570034120" watchObservedRunningTime="2025-10-01 16:41:10.430441597 +0000 UTC m=+5779.577284713" Oct 01 16:41:10 crc kubenswrapper[4869]: I1001 16:41:10.581857 4869 scope.go:117] "RemoveContainer" containerID="7dd9c90d275bfe93ccea8e2e9638d51d1466814860669199622dcb3c3bbdd327" Oct 01 16:41:10 crc kubenswrapper[4869]: E1001 16:41:10.582340 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-c86m8_openshift-machine-config-operator(a4b64f7f-0b03-4f47-965b-9fde048b735c)\"" pod="openshift-machine-config-operator/machine-config-daemon-c86m8" podUID="a4b64f7f-0b03-4f47-965b-9fde048b735c" Oct 01 16:41:14 crc kubenswrapper[4869]: I1001 16:41:14.994089 4869 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-qw74c" Oct 01 16:41:14 crc kubenswrapper[4869]: I1001 16:41:14.994946 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-qw74c" Oct 01 16:41:15 crc kubenswrapper[4869]: I1001 16:41:15.072901 4869 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-qw74c" Oct 01 16:41:15 crc kubenswrapper[4869]: I1001 16:41:15.513020 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-qw74c" Oct 01 16:41:15 crc kubenswrapper[4869]: I1001 16:41:15.567485 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-qw74c"] Oct 01 16:41:17 crc kubenswrapper[4869]: I1001 16:41:17.472019 4869 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-qw74c" podUID="e17b5dbf-bfdb-4980-9bc3-027f80602cb4" containerName="registry-server" containerID="cri-o://fddd2b7dc445bfeb85a9aa8c889c9a4901223d7b8225975afbc2c5b84517dcf1" gracePeriod=2 Oct 01 16:41:18 crc kubenswrapper[4869]: I1001 16:41:18.029162 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-qw74c" Oct 01 16:41:18 crc kubenswrapper[4869]: I1001 16:41:18.105431 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-b88rm\" (UniqueName: \"kubernetes.io/projected/e17b5dbf-bfdb-4980-9bc3-027f80602cb4-kube-api-access-b88rm\") pod \"e17b5dbf-bfdb-4980-9bc3-027f80602cb4\" (UID: \"e17b5dbf-bfdb-4980-9bc3-027f80602cb4\") " Oct 01 16:41:18 crc kubenswrapper[4869]: I1001 16:41:18.105525 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e17b5dbf-bfdb-4980-9bc3-027f80602cb4-catalog-content\") pod \"e17b5dbf-bfdb-4980-9bc3-027f80602cb4\" (UID: \"e17b5dbf-bfdb-4980-9bc3-027f80602cb4\") " Oct 01 16:41:18 crc kubenswrapper[4869]: I1001 16:41:18.105571 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e17b5dbf-bfdb-4980-9bc3-027f80602cb4-utilities\") pod \"e17b5dbf-bfdb-4980-9bc3-027f80602cb4\" (UID: \"e17b5dbf-bfdb-4980-9bc3-027f80602cb4\") " Oct 01 16:41:18 crc kubenswrapper[4869]: I1001 16:41:18.108316 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e17b5dbf-bfdb-4980-9bc3-027f80602cb4-utilities" (OuterVolumeSpecName: "utilities") pod "e17b5dbf-bfdb-4980-9bc3-027f80602cb4" (UID: "e17b5dbf-bfdb-4980-9bc3-027f80602cb4"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 16:41:18 crc kubenswrapper[4869]: I1001 16:41:18.113277 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e17b5dbf-bfdb-4980-9bc3-027f80602cb4-kube-api-access-b88rm" (OuterVolumeSpecName: "kube-api-access-b88rm") pod "e17b5dbf-bfdb-4980-9bc3-027f80602cb4" (UID: "e17b5dbf-bfdb-4980-9bc3-027f80602cb4"). InnerVolumeSpecName "kube-api-access-b88rm". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 16:41:18 crc kubenswrapper[4869]: I1001 16:41:18.176798 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e17b5dbf-bfdb-4980-9bc3-027f80602cb4-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "e17b5dbf-bfdb-4980-9bc3-027f80602cb4" (UID: "e17b5dbf-bfdb-4980-9bc3-027f80602cb4"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 16:41:18 crc kubenswrapper[4869]: I1001 16:41:18.208870 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-b88rm\" (UniqueName: \"kubernetes.io/projected/e17b5dbf-bfdb-4980-9bc3-027f80602cb4-kube-api-access-b88rm\") on node \"crc\" DevicePath \"\"" Oct 01 16:41:18 crc kubenswrapper[4869]: I1001 16:41:18.208912 4869 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e17b5dbf-bfdb-4980-9bc3-027f80602cb4-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 01 16:41:18 crc kubenswrapper[4869]: I1001 16:41:18.208921 4869 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e17b5dbf-bfdb-4980-9bc3-027f80602cb4-utilities\") on node \"crc\" DevicePath \"\"" Oct 01 16:41:18 crc kubenswrapper[4869]: I1001 16:41:18.483392 4869 generic.go:334] "Generic (PLEG): container finished" podID="e17b5dbf-bfdb-4980-9bc3-027f80602cb4" containerID="fddd2b7dc445bfeb85a9aa8c889c9a4901223d7b8225975afbc2c5b84517dcf1" exitCode=0 Oct 01 16:41:18 crc kubenswrapper[4869]: I1001 16:41:18.483475 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-qw74c" Oct 01 16:41:18 crc kubenswrapper[4869]: I1001 16:41:18.483493 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-qw74c" event={"ID":"e17b5dbf-bfdb-4980-9bc3-027f80602cb4","Type":"ContainerDied","Data":"fddd2b7dc445bfeb85a9aa8c889c9a4901223d7b8225975afbc2c5b84517dcf1"} Oct 01 16:41:18 crc kubenswrapper[4869]: I1001 16:41:18.483956 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-qw74c" event={"ID":"e17b5dbf-bfdb-4980-9bc3-027f80602cb4","Type":"ContainerDied","Data":"7f1ab22a2b761b98b8264ab4cb16c33e9b364bb1c9d81097245be70156106d0f"} Oct 01 16:41:18 crc kubenswrapper[4869]: I1001 16:41:18.483987 4869 scope.go:117] "RemoveContainer" containerID="fddd2b7dc445bfeb85a9aa8c889c9a4901223d7b8225975afbc2c5b84517dcf1" Oct 01 16:41:18 crc kubenswrapper[4869]: I1001 16:41:18.525159 4869 scope.go:117] "RemoveContainer" containerID="1239dd43899b5367cb9eccf920391aba397c810f6d8294c89bb13effb8247975" Oct 01 16:41:18 crc kubenswrapper[4869]: I1001 16:41:18.526366 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-qw74c"] Oct 01 16:41:18 crc kubenswrapper[4869]: I1001 16:41:18.544481 4869 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-qw74c"] Oct 01 16:41:18 crc kubenswrapper[4869]: I1001 16:41:18.555308 4869 scope.go:117] "RemoveContainer" containerID="14ebaba96e8b9d808d0ee7cd4573887dd790243f1a7485bb4be4ea1534a9cb7b" Oct 01 16:41:18 crc kubenswrapper[4869]: I1001 16:41:18.602841 4869 scope.go:117] "RemoveContainer" containerID="fddd2b7dc445bfeb85a9aa8c889c9a4901223d7b8225975afbc2c5b84517dcf1" Oct 01 16:41:18 crc kubenswrapper[4869]: E1001 16:41:18.603440 4869 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"fddd2b7dc445bfeb85a9aa8c889c9a4901223d7b8225975afbc2c5b84517dcf1\": container with ID starting with fddd2b7dc445bfeb85a9aa8c889c9a4901223d7b8225975afbc2c5b84517dcf1 not found: ID does not exist" containerID="fddd2b7dc445bfeb85a9aa8c889c9a4901223d7b8225975afbc2c5b84517dcf1" Oct 01 16:41:18 crc kubenswrapper[4869]: I1001 16:41:18.603485 4869 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"fddd2b7dc445bfeb85a9aa8c889c9a4901223d7b8225975afbc2c5b84517dcf1"} err="failed to get container status \"fddd2b7dc445bfeb85a9aa8c889c9a4901223d7b8225975afbc2c5b84517dcf1\": rpc error: code = NotFound desc = could not find container \"fddd2b7dc445bfeb85a9aa8c889c9a4901223d7b8225975afbc2c5b84517dcf1\": container with ID starting with fddd2b7dc445bfeb85a9aa8c889c9a4901223d7b8225975afbc2c5b84517dcf1 not found: ID does not exist" Oct 01 16:41:18 crc kubenswrapper[4869]: I1001 16:41:18.603511 4869 scope.go:117] "RemoveContainer" containerID="1239dd43899b5367cb9eccf920391aba397c810f6d8294c89bb13effb8247975" Oct 01 16:41:18 crc kubenswrapper[4869]: E1001 16:41:18.603898 4869 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1239dd43899b5367cb9eccf920391aba397c810f6d8294c89bb13effb8247975\": container with ID starting with 1239dd43899b5367cb9eccf920391aba397c810f6d8294c89bb13effb8247975 not found: ID does not exist" containerID="1239dd43899b5367cb9eccf920391aba397c810f6d8294c89bb13effb8247975" Oct 01 16:41:18 crc kubenswrapper[4869]: I1001 16:41:18.603942 4869 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1239dd43899b5367cb9eccf920391aba397c810f6d8294c89bb13effb8247975"} err="failed to get container status \"1239dd43899b5367cb9eccf920391aba397c810f6d8294c89bb13effb8247975\": rpc error: code = NotFound desc = could not find container \"1239dd43899b5367cb9eccf920391aba397c810f6d8294c89bb13effb8247975\": container with ID starting with 1239dd43899b5367cb9eccf920391aba397c810f6d8294c89bb13effb8247975 not found: ID does not exist" Oct 01 16:41:18 crc kubenswrapper[4869]: I1001 16:41:18.603975 4869 scope.go:117] "RemoveContainer" containerID="14ebaba96e8b9d808d0ee7cd4573887dd790243f1a7485bb4be4ea1534a9cb7b" Oct 01 16:41:18 crc kubenswrapper[4869]: E1001 16:41:18.604443 4869 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"14ebaba96e8b9d808d0ee7cd4573887dd790243f1a7485bb4be4ea1534a9cb7b\": container with ID starting with 14ebaba96e8b9d808d0ee7cd4573887dd790243f1a7485bb4be4ea1534a9cb7b not found: ID does not exist" containerID="14ebaba96e8b9d808d0ee7cd4573887dd790243f1a7485bb4be4ea1534a9cb7b" Oct 01 16:41:18 crc kubenswrapper[4869]: I1001 16:41:18.604467 4869 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"14ebaba96e8b9d808d0ee7cd4573887dd790243f1a7485bb4be4ea1534a9cb7b"} err="failed to get container status \"14ebaba96e8b9d808d0ee7cd4573887dd790243f1a7485bb4be4ea1534a9cb7b\": rpc error: code = NotFound desc = could not find container \"14ebaba96e8b9d808d0ee7cd4573887dd790243f1a7485bb4be4ea1534a9cb7b\": container with ID starting with 14ebaba96e8b9d808d0ee7cd4573887dd790243f1a7485bb4be4ea1534a9cb7b not found: ID does not exist" Oct 01 16:41:19 crc kubenswrapper[4869]: I1001 16:41:19.591929 4869 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e17b5dbf-bfdb-4980-9bc3-027f80602cb4" path="/var/lib/kubelet/pods/e17b5dbf-bfdb-4980-9bc3-027f80602cb4/volumes" Oct 01 16:41:25 crc kubenswrapper[4869]: I1001 16:41:25.581223 4869 scope.go:117] "RemoveContainer" containerID="7dd9c90d275bfe93ccea8e2e9638d51d1466814860669199622dcb3c3bbdd327" Oct 01 16:41:25 crc kubenswrapper[4869]: E1001 16:41:25.582126 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-c86m8_openshift-machine-config-operator(a4b64f7f-0b03-4f47-965b-9fde048b735c)\"" pod="openshift-machine-config-operator/machine-config-daemon-c86m8" podUID="a4b64f7f-0b03-4f47-965b-9fde048b735c" Oct 01 16:41:36 crc kubenswrapper[4869]: I1001 16:41:36.580951 4869 scope.go:117] "RemoveContainer" containerID="7dd9c90d275bfe93ccea8e2e9638d51d1466814860669199622dcb3c3bbdd327" Oct 01 16:41:36 crc kubenswrapper[4869]: E1001 16:41:36.581609 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-c86m8_openshift-machine-config-operator(a4b64f7f-0b03-4f47-965b-9fde048b735c)\"" pod="openshift-machine-config-operator/machine-config-daemon-c86m8" podUID="a4b64f7f-0b03-4f47-965b-9fde048b735c" Oct 01 16:41:48 crc kubenswrapper[4869]: I1001 16:41:48.581438 4869 scope.go:117] "RemoveContainer" containerID="7dd9c90d275bfe93ccea8e2e9638d51d1466814860669199622dcb3c3bbdd327" Oct 01 16:41:48 crc kubenswrapper[4869]: E1001 16:41:48.582169 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-c86m8_openshift-machine-config-operator(a4b64f7f-0b03-4f47-965b-9fde048b735c)\"" pod="openshift-machine-config-operator/machine-config-daemon-c86m8" podUID="a4b64f7f-0b03-4f47-965b-9fde048b735c" Oct 01 16:42:00 crc kubenswrapper[4869]: I1001 16:42:00.582233 4869 scope.go:117] "RemoveContainer" containerID="7dd9c90d275bfe93ccea8e2e9638d51d1466814860669199622dcb3c3bbdd327" Oct 01 16:42:00 crc kubenswrapper[4869]: E1001 16:42:00.582961 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-c86m8_openshift-machine-config-operator(a4b64f7f-0b03-4f47-965b-9fde048b735c)\"" pod="openshift-machine-config-operator/machine-config-daemon-c86m8" podUID="a4b64f7f-0b03-4f47-965b-9fde048b735c" Oct 01 16:42:11 crc kubenswrapper[4869]: I1001 16:42:11.593606 4869 scope.go:117] "RemoveContainer" containerID="7dd9c90d275bfe93ccea8e2e9638d51d1466814860669199622dcb3c3bbdd327" Oct 01 16:42:11 crc kubenswrapper[4869]: E1001 16:42:11.594601 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-c86m8_openshift-machine-config-operator(a4b64f7f-0b03-4f47-965b-9fde048b735c)\"" pod="openshift-machine-config-operator/machine-config-daemon-c86m8" podUID="a4b64f7f-0b03-4f47-965b-9fde048b735c" Oct 01 16:42:23 crc kubenswrapper[4869]: I1001 16:42:23.582103 4869 scope.go:117] "RemoveContainer" containerID="7dd9c90d275bfe93ccea8e2e9638d51d1466814860669199622dcb3c3bbdd327" Oct 01 16:42:23 crc kubenswrapper[4869]: E1001 16:42:23.583044 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-c86m8_openshift-machine-config-operator(a4b64f7f-0b03-4f47-965b-9fde048b735c)\"" pod="openshift-machine-config-operator/machine-config-daemon-c86m8" podUID="a4b64f7f-0b03-4f47-965b-9fde048b735c" Oct 01 16:42:36 crc kubenswrapper[4869]: I1001 16:42:36.581405 4869 scope.go:117] "RemoveContainer" containerID="7dd9c90d275bfe93ccea8e2e9638d51d1466814860669199622dcb3c3bbdd327" Oct 01 16:42:36 crc kubenswrapper[4869]: E1001 16:42:36.582131 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-c86m8_openshift-machine-config-operator(a4b64f7f-0b03-4f47-965b-9fde048b735c)\"" pod="openshift-machine-config-operator/machine-config-daemon-c86m8" podUID="a4b64f7f-0b03-4f47-965b-9fde048b735c" Oct 01 16:42:51 crc kubenswrapper[4869]: I1001 16:42:51.596066 4869 scope.go:117] "RemoveContainer" containerID="7dd9c90d275bfe93ccea8e2e9638d51d1466814860669199622dcb3c3bbdd327" Oct 01 16:42:51 crc kubenswrapper[4869]: E1001 16:42:51.597093 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-c86m8_openshift-machine-config-operator(a4b64f7f-0b03-4f47-965b-9fde048b735c)\"" pod="openshift-machine-config-operator/machine-config-daemon-c86m8" podUID="a4b64f7f-0b03-4f47-965b-9fde048b735c" Oct 01 16:43:05 crc kubenswrapper[4869]: I1001 16:43:05.581879 4869 scope.go:117] "RemoveContainer" containerID="7dd9c90d275bfe93ccea8e2e9638d51d1466814860669199622dcb3c3bbdd327" Oct 01 16:43:05 crc kubenswrapper[4869]: E1001 16:43:05.582772 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-c86m8_openshift-machine-config-operator(a4b64f7f-0b03-4f47-965b-9fde048b735c)\"" pod="openshift-machine-config-operator/machine-config-daemon-c86m8" podUID="a4b64f7f-0b03-4f47-965b-9fde048b735c" Oct 01 16:43:18 crc kubenswrapper[4869]: I1001 16:43:18.582146 4869 scope.go:117] "RemoveContainer" containerID="7dd9c90d275bfe93ccea8e2e9638d51d1466814860669199622dcb3c3bbdd327" Oct 01 16:43:18 crc kubenswrapper[4869]: E1001 16:43:18.583336 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-c86m8_openshift-machine-config-operator(a4b64f7f-0b03-4f47-965b-9fde048b735c)\"" pod="openshift-machine-config-operator/machine-config-daemon-c86m8" podUID="a4b64f7f-0b03-4f47-965b-9fde048b735c" Oct 01 16:43:31 crc kubenswrapper[4869]: I1001 16:43:31.588094 4869 scope.go:117] "RemoveContainer" containerID="7dd9c90d275bfe93ccea8e2e9638d51d1466814860669199622dcb3c3bbdd327" Oct 01 16:43:31 crc kubenswrapper[4869]: E1001 16:43:31.588939 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-c86m8_openshift-machine-config-operator(a4b64f7f-0b03-4f47-965b-9fde048b735c)\"" pod="openshift-machine-config-operator/machine-config-daemon-c86m8" podUID="a4b64f7f-0b03-4f47-965b-9fde048b735c" Oct 01 16:43:43 crc kubenswrapper[4869]: I1001 16:43:43.582619 4869 scope.go:117] "RemoveContainer" containerID="7dd9c90d275bfe93ccea8e2e9638d51d1466814860669199622dcb3c3bbdd327" Oct 01 16:43:43 crc kubenswrapper[4869]: I1001 16:43:43.882683 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-c86m8" event={"ID":"a4b64f7f-0b03-4f47-965b-9fde048b735c","Type":"ContainerStarted","Data":"9215450ce030b33b2df0e062d278173daf0447c034a566b1a9acbc4e50d0060b"} Oct 01 16:45:00 crc kubenswrapper[4869]: I1001 16:45:00.209215 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29322285-9zcfs"] Oct 01 16:45:00 crc kubenswrapper[4869]: E1001 16:45:00.210376 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e17b5dbf-bfdb-4980-9bc3-027f80602cb4" containerName="extract-utilities" Oct 01 16:45:00 crc kubenswrapper[4869]: I1001 16:45:00.210395 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="e17b5dbf-bfdb-4980-9bc3-027f80602cb4" containerName="extract-utilities" Oct 01 16:45:00 crc kubenswrapper[4869]: E1001 16:45:00.210433 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e17b5dbf-bfdb-4980-9bc3-027f80602cb4" containerName="registry-server" Oct 01 16:45:00 crc kubenswrapper[4869]: I1001 16:45:00.210441 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="e17b5dbf-bfdb-4980-9bc3-027f80602cb4" containerName="registry-server" Oct 01 16:45:00 crc kubenswrapper[4869]: E1001 16:45:00.210457 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e17b5dbf-bfdb-4980-9bc3-027f80602cb4" containerName="extract-content" Oct 01 16:45:00 crc kubenswrapper[4869]: I1001 16:45:00.210465 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="e17b5dbf-bfdb-4980-9bc3-027f80602cb4" containerName="extract-content" Oct 01 16:45:00 crc kubenswrapper[4869]: I1001 16:45:00.210706 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="e17b5dbf-bfdb-4980-9bc3-027f80602cb4" containerName="registry-server" Oct 01 16:45:00 crc kubenswrapper[4869]: I1001 16:45:00.211562 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29322285-9zcfs" Oct 01 16:45:00 crc kubenswrapper[4869]: I1001 16:45:00.214427 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Oct 01 16:45:00 crc kubenswrapper[4869]: I1001 16:45:00.214886 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Oct 01 16:45:00 crc kubenswrapper[4869]: I1001 16:45:00.231988 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29322285-9zcfs"] Oct 01 16:45:00 crc kubenswrapper[4869]: I1001 16:45:00.390203 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/703d1662-e59c-42ef-8ec1-d7e23c01a189-secret-volume\") pod \"collect-profiles-29322285-9zcfs\" (UID: \"703d1662-e59c-42ef-8ec1-d7e23c01a189\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29322285-9zcfs" Oct 01 16:45:00 crc kubenswrapper[4869]: I1001 16:45:00.390362 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/703d1662-e59c-42ef-8ec1-d7e23c01a189-config-volume\") pod \"collect-profiles-29322285-9zcfs\" (UID: \"703d1662-e59c-42ef-8ec1-d7e23c01a189\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29322285-9zcfs" Oct 01 16:45:00 crc kubenswrapper[4869]: I1001 16:45:00.390394 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fglms\" (UniqueName: \"kubernetes.io/projected/703d1662-e59c-42ef-8ec1-d7e23c01a189-kube-api-access-fglms\") pod \"collect-profiles-29322285-9zcfs\" (UID: \"703d1662-e59c-42ef-8ec1-d7e23c01a189\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29322285-9zcfs" Oct 01 16:45:00 crc kubenswrapper[4869]: I1001 16:45:00.493092 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/703d1662-e59c-42ef-8ec1-d7e23c01a189-secret-volume\") pod \"collect-profiles-29322285-9zcfs\" (UID: \"703d1662-e59c-42ef-8ec1-d7e23c01a189\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29322285-9zcfs" Oct 01 16:45:00 crc kubenswrapper[4869]: I1001 16:45:00.493759 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/703d1662-e59c-42ef-8ec1-d7e23c01a189-config-volume\") pod \"collect-profiles-29322285-9zcfs\" (UID: \"703d1662-e59c-42ef-8ec1-d7e23c01a189\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29322285-9zcfs" Oct 01 16:45:00 crc kubenswrapper[4869]: I1001 16:45:00.493822 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fglms\" (UniqueName: \"kubernetes.io/projected/703d1662-e59c-42ef-8ec1-d7e23c01a189-kube-api-access-fglms\") pod \"collect-profiles-29322285-9zcfs\" (UID: \"703d1662-e59c-42ef-8ec1-d7e23c01a189\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29322285-9zcfs" Oct 01 16:45:00 crc kubenswrapper[4869]: I1001 16:45:00.495798 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/703d1662-e59c-42ef-8ec1-d7e23c01a189-config-volume\") pod \"collect-profiles-29322285-9zcfs\" (UID: \"703d1662-e59c-42ef-8ec1-d7e23c01a189\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29322285-9zcfs" Oct 01 16:45:00 crc kubenswrapper[4869]: I1001 16:45:00.498457 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/703d1662-e59c-42ef-8ec1-d7e23c01a189-secret-volume\") pod \"collect-profiles-29322285-9zcfs\" (UID: \"703d1662-e59c-42ef-8ec1-d7e23c01a189\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29322285-9zcfs" Oct 01 16:45:00 crc kubenswrapper[4869]: I1001 16:45:00.515601 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fglms\" (UniqueName: \"kubernetes.io/projected/703d1662-e59c-42ef-8ec1-d7e23c01a189-kube-api-access-fglms\") pod \"collect-profiles-29322285-9zcfs\" (UID: \"703d1662-e59c-42ef-8ec1-d7e23c01a189\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29322285-9zcfs" Oct 01 16:45:00 crc kubenswrapper[4869]: I1001 16:45:00.538236 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29322285-9zcfs" Oct 01 16:45:01 crc kubenswrapper[4869]: I1001 16:45:01.007460 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29322285-9zcfs"] Oct 01 16:45:01 crc kubenswrapper[4869]: I1001 16:45:01.710209 4869 generic.go:334] "Generic (PLEG): container finished" podID="703d1662-e59c-42ef-8ec1-d7e23c01a189" containerID="1ad9b1fd939cbe74759e3376acf9bace48e71f9d1c3a318f5ce6b8c4c9ec6e3f" exitCode=0 Oct 01 16:45:01 crc kubenswrapper[4869]: I1001 16:45:01.710938 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29322285-9zcfs" event={"ID":"703d1662-e59c-42ef-8ec1-d7e23c01a189","Type":"ContainerDied","Data":"1ad9b1fd939cbe74759e3376acf9bace48e71f9d1c3a318f5ce6b8c4c9ec6e3f"} Oct 01 16:45:01 crc kubenswrapper[4869]: I1001 16:45:01.711382 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29322285-9zcfs" event={"ID":"703d1662-e59c-42ef-8ec1-d7e23c01a189","Type":"ContainerStarted","Data":"5c72834f125ecdfe38288646e67b4663b631352b71e09d0d4987e75d816ff8fa"} Oct 01 16:45:03 crc kubenswrapper[4869]: I1001 16:45:03.213097 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29322285-9zcfs" Oct 01 16:45:03 crc kubenswrapper[4869]: I1001 16:45:03.263704 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/703d1662-e59c-42ef-8ec1-d7e23c01a189-config-volume\") pod \"703d1662-e59c-42ef-8ec1-d7e23c01a189\" (UID: \"703d1662-e59c-42ef-8ec1-d7e23c01a189\") " Oct 01 16:45:03 crc kubenswrapper[4869]: I1001 16:45:03.263988 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fglms\" (UniqueName: \"kubernetes.io/projected/703d1662-e59c-42ef-8ec1-d7e23c01a189-kube-api-access-fglms\") pod \"703d1662-e59c-42ef-8ec1-d7e23c01a189\" (UID: \"703d1662-e59c-42ef-8ec1-d7e23c01a189\") " Oct 01 16:45:03 crc kubenswrapper[4869]: I1001 16:45:03.264068 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/703d1662-e59c-42ef-8ec1-d7e23c01a189-secret-volume\") pod \"703d1662-e59c-42ef-8ec1-d7e23c01a189\" (UID: \"703d1662-e59c-42ef-8ec1-d7e23c01a189\") " Oct 01 16:45:03 crc kubenswrapper[4869]: I1001 16:45:03.265024 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/703d1662-e59c-42ef-8ec1-d7e23c01a189-config-volume" (OuterVolumeSpecName: "config-volume") pod "703d1662-e59c-42ef-8ec1-d7e23c01a189" (UID: "703d1662-e59c-42ef-8ec1-d7e23c01a189"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 16:45:03 crc kubenswrapper[4869]: I1001 16:45:03.270094 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/703d1662-e59c-42ef-8ec1-d7e23c01a189-kube-api-access-fglms" (OuterVolumeSpecName: "kube-api-access-fglms") pod "703d1662-e59c-42ef-8ec1-d7e23c01a189" (UID: "703d1662-e59c-42ef-8ec1-d7e23c01a189"). InnerVolumeSpecName "kube-api-access-fglms". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 16:45:03 crc kubenswrapper[4869]: I1001 16:45:03.285311 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/703d1662-e59c-42ef-8ec1-d7e23c01a189-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "703d1662-e59c-42ef-8ec1-d7e23c01a189" (UID: "703d1662-e59c-42ef-8ec1-d7e23c01a189"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 16:45:03 crc kubenswrapper[4869]: I1001 16:45:03.366637 4869 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/703d1662-e59c-42ef-8ec1-d7e23c01a189-secret-volume\") on node \"crc\" DevicePath \"\"" Oct 01 16:45:03 crc kubenswrapper[4869]: I1001 16:45:03.366681 4869 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/703d1662-e59c-42ef-8ec1-d7e23c01a189-config-volume\") on node \"crc\" DevicePath \"\"" Oct 01 16:45:03 crc kubenswrapper[4869]: I1001 16:45:03.366691 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fglms\" (UniqueName: \"kubernetes.io/projected/703d1662-e59c-42ef-8ec1-d7e23c01a189-kube-api-access-fglms\") on node \"crc\" DevicePath \"\"" Oct 01 16:45:03 crc kubenswrapper[4869]: I1001 16:45:03.731009 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29322285-9zcfs" event={"ID":"703d1662-e59c-42ef-8ec1-d7e23c01a189","Type":"ContainerDied","Data":"5c72834f125ecdfe38288646e67b4663b631352b71e09d0d4987e75d816ff8fa"} Oct 01 16:45:03 crc kubenswrapper[4869]: I1001 16:45:03.731045 4869 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="5c72834f125ecdfe38288646e67b4663b631352b71e09d0d4987e75d816ff8fa" Oct 01 16:45:03 crc kubenswrapper[4869]: I1001 16:45:03.731091 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29322285-9zcfs" Oct 01 16:45:04 crc kubenswrapper[4869]: I1001 16:45:04.298607 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29322240-s2nk4"] Oct 01 16:45:04 crc kubenswrapper[4869]: I1001 16:45:04.311274 4869 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29322240-s2nk4"] Oct 01 16:45:05 crc kubenswrapper[4869]: I1001 16:45:05.592158 4869 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9aae4936-0f51-48a4-8298-7583d486c6ee" path="/var/lib/kubelet/pods/9aae4936-0f51-48a4-8298-7583d486c6ee/volumes" Oct 01 16:45:18 crc kubenswrapper[4869]: I1001 16:45:18.300764 4869 scope.go:117] "RemoveContainer" containerID="cc1a2821d018f6fee189dff271dc1ed21ec61a9f8f3804fd2d83b5f1f246145f" Oct 01 16:45:43 crc kubenswrapper[4869]: I1001 16:45:43.353997 4869 patch_prober.go:28] interesting pod/machine-config-daemon-c86m8 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 01 16:45:43 crc kubenswrapper[4869]: I1001 16:45:43.354483 4869 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-c86m8" podUID="a4b64f7f-0b03-4f47-965b-9fde048b735c" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 01 16:46:05 crc kubenswrapper[4869]: I1001 16:46:05.180546 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-s67d2"] Oct 01 16:46:05 crc kubenswrapper[4869]: E1001 16:46:05.183960 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="703d1662-e59c-42ef-8ec1-d7e23c01a189" containerName="collect-profiles" Oct 01 16:46:05 crc kubenswrapper[4869]: I1001 16:46:05.183992 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="703d1662-e59c-42ef-8ec1-d7e23c01a189" containerName="collect-profiles" Oct 01 16:46:05 crc kubenswrapper[4869]: I1001 16:46:05.184261 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="703d1662-e59c-42ef-8ec1-d7e23c01a189" containerName="collect-profiles" Oct 01 16:46:05 crc kubenswrapper[4869]: I1001 16:46:05.186003 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-s67d2" Oct 01 16:46:05 crc kubenswrapper[4869]: I1001 16:46:05.201084 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-s67d2"] Oct 01 16:46:05 crc kubenswrapper[4869]: I1001 16:46:05.217240 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f074ee13-3e77-41d3-bc31-3f36806e16f0-utilities\") pod \"redhat-operators-s67d2\" (UID: \"f074ee13-3e77-41d3-bc31-3f36806e16f0\") " pod="openshift-marketplace/redhat-operators-s67d2" Oct 01 16:46:05 crc kubenswrapper[4869]: I1001 16:46:05.217378 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f074ee13-3e77-41d3-bc31-3f36806e16f0-catalog-content\") pod \"redhat-operators-s67d2\" (UID: \"f074ee13-3e77-41d3-bc31-3f36806e16f0\") " pod="openshift-marketplace/redhat-operators-s67d2" Oct 01 16:46:05 crc kubenswrapper[4869]: I1001 16:46:05.217536 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mv7rb\" (UniqueName: \"kubernetes.io/projected/f074ee13-3e77-41d3-bc31-3f36806e16f0-kube-api-access-mv7rb\") pod \"redhat-operators-s67d2\" (UID: \"f074ee13-3e77-41d3-bc31-3f36806e16f0\") " pod="openshift-marketplace/redhat-operators-s67d2" Oct 01 16:46:05 crc kubenswrapper[4869]: I1001 16:46:05.319183 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f074ee13-3e77-41d3-bc31-3f36806e16f0-catalog-content\") pod \"redhat-operators-s67d2\" (UID: \"f074ee13-3e77-41d3-bc31-3f36806e16f0\") " pod="openshift-marketplace/redhat-operators-s67d2" Oct 01 16:46:05 crc kubenswrapper[4869]: I1001 16:46:05.319488 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mv7rb\" (UniqueName: \"kubernetes.io/projected/f074ee13-3e77-41d3-bc31-3f36806e16f0-kube-api-access-mv7rb\") pod \"redhat-operators-s67d2\" (UID: \"f074ee13-3e77-41d3-bc31-3f36806e16f0\") " pod="openshift-marketplace/redhat-operators-s67d2" Oct 01 16:46:05 crc kubenswrapper[4869]: I1001 16:46:05.319839 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f074ee13-3e77-41d3-bc31-3f36806e16f0-catalog-content\") pod \"redhat-operators-s67d2\" (UID: \"f074ee13-3e77-41d3-bc31-3f36806e16f0\") " pod="openshift-marketplace/redhat-operators-s67d2" Oct 01 16:46:05 crc kubenswrapper[4869]: I1001 16:46:05.319840 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f074ee13-3e77-41d3-bc31-3f36806e16f0-utilities\") pod \"redhat-operators-s67d2\" (UID: \"f074ee13-3e77-41d3-bc31-3f36806e16f0\") " pod="openshift-marketplace/redhat-operators-s67d2" Oct 01 16:46:05 crc kubenswrapper[4869]: I1001 16:46:05.320197 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f074ee13-3e77-41d3-bc31-3f36806e16f0-utilities\") pod \"redhat-operators-s67d2\" (UID: \"f074ee13-3e77-41d3-bc31-3f36806e16f0\") " pod="openshift-marketplace/redhat-operators-s67d2" Oct 01 16:46:05 crc kubenswrapper[4869]: I1001 16:46:05.338860 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mv7rb\" (UniqueName: \"kubernetes.io/projected/f074ee13-3e77-41d3-bc31-3f36806e16f0-kube-api-access-mv7rb\") pod \"redhat-operators-s67d2\" (UID: \"f074ee13-3e77-41d3-bc31-3f36806e16f0\") " pod="openshift-marketplace/redhat-operators-s67d2" Oct 01 16:46:05 crc kubenswrapper[4869]: I1001 16:46:05.544504 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-s67d2" Oct 01 16:46:06 crc kubenswrapper[4869]: I1001 16:46:06.069230 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-s67d2"] Oct 01 16:46:06 crc kubenswrapper[4869]: I1001 16:46:06.356205 4869 generic.go:334] "Generic (PLEG): container finished" podID="f074ee13-3e77-41d3-bc31-3f36806e16f0" containerID="35902f3f330cd6553ac2526827df6f43059c1a52f18e92d2fd499969aa9157e4" exitCode=0 Oct 01 16:46:06 crc kubenswrapper[4869]: I1001 16:46:06.356533 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-s67d2" event={"ID":"f074ee13-3e77-41d3-bc31-3f36806e16f0","Type":"ContainerDied","Data":"35902f3f330cd6553ac2526827df6f43059c1a52f18e92d2fd499969aa9157e4"} Oct 01 16:46:06 crc kubenswrapper[4869]: I1001 16:46:06.356559 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-s67d2" event={"ID":"f074ee13-3e77-41d3-bc31-3f36806e16f0","Type":"ContainerStarted","Data":"e78aa28326b61427cab14ffa395d6b3d021756c2a631343d795566bf992b9b05"} Oct 01 16:46:06 crc kubenswrapper[4869]: I1001 16:46:06.359472 4869 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Oct 01 16:46:08 crc kubenswrapper[4869]: I1001 16:46:08.381210 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-s67d2" event={"ID":"f074ee13-3e77-41d3-bc31-3f36806e16f0","Type":"ContainerStarted","Data":"a0476f20110c31a9b116de4907bc89402bb45e7eb74c2aae7e48a311fe6851ef"} Oct 01 16:46:09 crc kubenswrapper[4869]: I1001 16:46:09.392574 4869 generic.go:334] "Generic (PLEG): container finished" podID="f074ee13-3e77-41d3-bc31-3f36806e16f0" containerID="a0476f20110c31a9b116de4907bc89402bb45e7eb74c2aae7e48a311fe6851ef" exitCode=0 Oct 01 16:46:09 crc kubenswrapper[4869]: I1001 16:46:09.392892 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-s67d2" event={"ID":"f074ee13-3e77-41d3-bc31-3f36806e16f0","Type":"ContainerDied","Data":"a0476f20110c31a9b116de4907bc89402bb45e7eb74c2aae7e48a311fe6851ef"} Oct 01 16:46:10 crc kubenswrapper[4869]: I1001 16:46:10.406158 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-s67d2" event={"ID":"f074ee13-3e77-41d3-bc31-3f36806e16f0","Type":"ContainerStarted","Data":"501fbc81fbeab211237ed5a753ba769cab49c9db71c34f4ad6774f90d8dd24b8"} Oct 01 16:46:10 crc kubenswrapper[4869]: I1001 16:46:10.427386 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-s67d2" podStartSLOduration=1.8977420569999999 podStartE2EDuration="5.427369345s" podCreationTimestamp="2025-10-01 16:46:05 +0000 UTC" firstStartedPulling="2025-10-01 16:46:06.359156415 +0000 UTC m=+6075.505999541" lastFinishedPulling="2025-10-01 16:46:09.888783713 +0000 UTC m=+6079.035626829" observedRunningTime="2025-10-01 16:46:10.424885992 +0000 UTC m=+6079.571729108" watchObservedRunningTime="2025-10-01 16:46:10.427369345 +0000 UTC m=+6079.574212461" Oct 01 16:46:13 crc kubenswrapper[4869]: I1001 16:46:13.354172 4869 patch_prober.go:28] interesting pod/machine-config-daemon-c86m8 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 01 16:46:13 crc kubenswrapper[4869]: I1001 16:46:13.354752 4869 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-c86m8" podUID="a4b64f7f-0b03-4f47-965b-9fde048b735c" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 01 16:46:15 crc kubenswrapper[4869]: I1001 16:46:15.545717 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-s67d2" Oct 01 16:46:15 crc kubenswrapper[4869]: I1001 16:46:15.545799 4869 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-s67d2" Oct 01 16:46:15 crc kubenswrapper[4869]: I1001 16:46:15.616922 4869 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-s67d2" Oct 01 16:46:16 crc kubenswrapper[4869]: I1001 16:46:16.507984 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-s67d2" Oct 01 16:46:16 crc kubenswrapper[4869]: I1001 16:46:16.560397 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-s67d2"] Oct 01 16:46:18 crc kubenswrapper[4869]: I1001 16:46:18.484535 4869 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-s67d2" podUID="f074ee13-3e77-41d3-bc31-3f36806e16f0" containerName="registry-server" containerID="cri-o://501fbc81fbeab211237ed5a753ba769cab49c9db71c34f4ad6774f90d8dd24b8" gracePeriod=2 Oct 01 16:46:19 crc kubenswrapper[4869]: I1001 16:46:19.495967 4869 generic.go:334] "Generic (PLEG): container finished" podID="f074ee13-3e77-41d3-bc31-3f36806e16f0" containerID="501fbc81fbeab211237ed5a753ba769cab49c9db71c34f4ad6774f90d8dd24b8" exitCode=0 Oct 01 16:46:19 crc kubenswrapper[4869]: I1001 16:46:19.496090 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-s67d2" event={"ID":"f074ee13-3e77-41d3-bc31-3f36806e16f0","Type":"ContainerDied","Data":"501fbc81fbeab211237ed5a753ba769cab49c9db71c34f4ad6774f90d8dd24b8"} Oct 01 16:46:19 crc kubenswrapper[4869]: I1001 16:46:19.496748 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-s67d2" event={"ID":"f074ee13-3e77-41d3-bc31-3f36806e16f0","Type":"ContainerDied","Data":"e78aa28326b61427cab14ffa395d6b3d021756c2a631343d795566bf992b9b05"} Oct 01 16:46:19 crc kubenswrapper[4869]: I1001 16:46:19.496765 4869 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="e78aa28326b61427cab14ffa395d6b3d021756c2a631343d795566bf992b9b05" Oct 01 16:46:19 crc kubenswrapper[4869]: I1001 16:46:19.517708 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-s67d2" Oct 01 16:46:19 crc kubenswrapper[4869]: I1001 16:46:19.609651 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f074ee13-3e77-41d3-bc31-3f36806e16f0-utilities\") pod \"f074ee13-3e77-41d3-bc31-3f36806e16f0\" (UID: \"f074ee13-3e77-41d3-bc31-3f36806e16f0\") " Oct 01 16:46:19 crc kubenswrapper[4869]: I1001 16:46:19.609802 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f074ee13-3e77-41d3-bc31-3f36806e16f0-catalog-content\") pod \"f074ee13-3e77-41d3-bc31-3f36806e16f0\" (UID: \"f074ee13-3e77-41d3-bc31-3f36806e16f0\") " Oct 01 16:46:19 crc kubenswrapper[4869]: I1001 16:46:19.609834 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mv7rb\" (UniqueName: \"kubernetes.io/projected/f074ee13-3e77-41d3-bc31-3f36806e16f0-kube-api-access-mv7rb\") pod \"f074ee13-3e77-41d3-bc31-3f36806e16f0\" (UID: \"f074ee13-3e77-41d3-bc31-3f36806e16f0\") " Oct 01 16:46:19 crc kubenswrapper[4869]: I1001 16:46:19.610632 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f074ee13-3e77-41d3-bc31-3f36806e16f0-utilities" (OuterVolumeSpecName: "utilities") pod "f074ee13-3e77-41d3-bc31-3f36806e16f0" (UID: "f074ee13-3e77-41d3-bc31-3f36806e16f0"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 16:46:19 crc kubenswrapper[4869]: I1001 16:46:19.615170 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f074ee13-3e77-41d3-bc31-3f36806e16f0-kube-api-access-mv7rb" (OuterVolumeSpecName: "kube-api-access-mv7rb") pod "f074ee13-3e77-41d3-bc31-3f36806e16f0" (UID: "f074ee13-3e77-41d3-bc31-3f36806e16f0"). InnerVolumeSpecName "kube-api-access-mv7rb". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 16:46:19 crc kubenswrapper[4869]: I1001 16:46:19.695670 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f074ee13-3e77-41d3-bc31-3f36806e16f0-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "f074ee13-3e77-41d3-bc31-3f36806e16f0" (UID: "f074ee13-3e77-41d3-bc31-3f36806e16f0"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 16:46:19 crc kubenswrapper[4869]: I1001 16:46:19.712649 4869 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f074ee13-3e77-41d3-bc31-3f36806e16f0-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 01 16:46:19 crc kubenswrapper[4869]: I1001 16:46:19.712686 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mv7rb\" (UniqueName: \"kubernetes.io/projected/f074ee13-3e77-41d3-bc31-3f36806e16f0-kube-api-access-mv7rb\") on node \"crc\" DevicePath \"\"" Oct 01 16:46:19 crc kubenswrapper[4869]: I1001 16:46:19.712697 4869 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f074ee13-3e77-41d3-bc31-3f36806e16f0-utilities\") on node \"crc\" DevicePath \"\"" Oct 01 16:46:20 crc kubenswrapper[4869]: I1001 16:46:20.504846 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-s67d2" Oct 01 16:46:20 crc kubenswrapper[4869]: I1001 16:46:20.543830 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-s67d2"] Oct 01 16:46:20 crc kubenswrapper[4869]: I1001 16:46:20.557339 4869 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-s67d2"] Oct 01 16:46:21 crc kubenswrapper[4869]: I1001 16:46:21.594379 4869 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f074ee13-3e77-41d3-bc31-3f36806e16f0" path="/var/lib/kubelet/pods/f074ee13-3e77-41d3-bc31-3f36806e16f0/volumes" Oct 01 16:46:43 crc kubenswrapper[4869]: I1001 16:46:43.354027 4869 patch_prober.go:28] interesting pod/machine-config-daemon-c86m8 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 01 16:46:43 crc kubenswrapper[4869]: I1001 16:46:43.355987 4869 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-c86m8" podUID="a4b64f7f-0b03-4f47-965b-9fde048b735c" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 01 16:46:43 crc kubenswrapper[4869]: I1001 16:46:43.356051 4869 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-c86m8" Oct 01 16:46:43 crc kubenswrapper[4869]: I1001 16:46:43.357087 4869 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"9215450ce030b33b2df0e062d278173daf0447c034a566b1a9acbc4e50d0060b"} pod="openshift-machine-config-operator/machine-config-daemon-c86m8" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Oct 01 16:46:43 crc kubenswrapper[4869]: I1001 16:46:43.358437 4869 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-c86m8" podUID="a4b64f7f-0b03-4f47-965b-9fde048b735c" containerName="machine-config-daemon" containerID="cri-o://9215450ce030b33b2df0e062d278173daf0447c034a566b1a9acbc4e50d0060b" gracePeriod=600 Oct 01 16:46:43 crc kubenswrapper[4869]: I1001 16:46:43.738986 4869 generic.go:334] "Generic (PLEG): container finished" podID="a4b64f7f-0b03-4f47-965b-9fde048b735c" containerID="9215450ce030b33b2df0e062d278173daf0447c034a566b1a9acbc4e50d0060b" exitCode=0 Oct 01 16:46:43 crc kubenswrapper[4869]: I1001 16:46:43.739160 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-c86m8" event={"ID":"a4b64f7f-0b03-4f47-965b-9fde048b735c","Type":"ContainerDied","Data":"9215450ce030b33b2df0e062d278173daf0447c034a566b1a9acbc4e50d0060b"} Oct 01 16:46:43 crc kubenswrapper[4869]: I1001 16:46:43.739580 4869 scope.go:117] "RemoveContainer" containerID="7dd9c90d275bfe93ccea8e2e9638d51d1466814860669199622dcb3c3bbdd327" Oct 01 16:46:44 crc kubenswrapper[4869]: I1001 16:46:44.752459 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-c86m8" event={"ID":"a4b64f7f-0b03-4f47-965b-9fde048b735c","Type":"ContainerStarted","Data":"d9281499446ba735acc8b48aef748c3e2b86ec9baa190cbca8c27b3fd0bb1351"} Oct 01 16:48:43 crc kubenswrapper[4869]: I1001 16:48:43.354527 4869 patch_prober.go:28] interesting pod/machine-config-daemon-c86m8 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 01 16:48:43 crc kubenswrapper[4869]: I1001 16:48:43.355133 4869 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-c86m8" podUID="a4b64f7f-0b03-4f47-965b-9fde048b735c" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 01 16:49:13 crc kubenswrapper[4869]: I1001 16:49:13.353769 4869 patch_prober.go:28] interesting pod/machine-config-daemon-c86m8 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 01 16:49:13 crc kubenswrapper[4869]: I1001 16:49:13.354296 4869 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-c86m8" podUID="a4b64f7f-0b03-4f47-965b-9fde048b735c" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 01 16:49:43 crc kubenswrapper[4869]: I1001 16:49:43.353970 4869 patch_prober.go:28] interesting pod/machine-config-daemon-c86m8 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 01 16:49:43 crc kubenswrapper[4869]: I1001 16:49:43.354667 4869 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-c86m8" podUID="a4b64f7f-0b03-4f47-965b-9fde048b735c" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 01 16:49:43 crc kubenswrapper[4869]: I1001 16:49:43.354736 4869 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-c86m8" Oct 01 16:49:43 crc kubenswrapper[4869]: I1001 16:49:43.355934 4869 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"d9281499446ba735acc8b48aef748c3e2b86ec9baa190cbca8c27b3fd0bb1351"} pod="openshift-machine-config-operator/machine-config-daemon-c86m8" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Oct 01 16:49:43 crc kubenswrapper[4869]: I1001 16:49:43.356042 4869 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-c86m8" podUID="a4b64f7f-0b03-4f47-965b-9fde048b735c" containerName="machine-config-daemon" containerID="cri-o://d9281499446ba735acc8b48aef748c3e2b86ec9baa190cbca8c27b3fd0bb1351" gracePeriod=600 Oct 01 16:49:43 crc kubenswrapper[4869]: E1001 16:49:43.491989 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-c86m8_openshift-machine-config-operator(a4b64f7f-0b03-4f47-965b-9fde048b735c)\"" pod="openshift-machine-config-operator/machine-config-daemon-c86m8" podUID="a4b64f7f-0b03-4f47-965b-9fde048b735c" Oct 01 16:49:43 crc kubenswrapper[4869]: I1001 16:49:43.547578 4869 generic.go:334] "Generic (PLEG): container finished" podID="a4b64f7f-0b03-4f47-965b-9fde048b735c" containerID="d9281499446ba735acc8b48aef748c3e2b86ec9baa190cbca8c27b3fd0bb1351" exitCode=0 Oct 01 16:49:43 crc kubenswrapper[4869]: I1001 16:49:43.547629 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-c86m8" event={"ID":"a4b64f7f-0b03-4f47-965b-9fde048b735c","Type":"ContainerDied","Data":"d9281499446ba735acc8b48aef748c3e2b86ec9baa190cbca8c27b3fd0bb1351"} Oct 01 16:49:43 crc kubenswrapper[4869]: I1001 16:49:43.547665 4869 scope.go:117] "RemoveContainer" containerID="9215450ce030b33b2df0e062d278173daf0447c034a566b1a9acbc4e50d0060b" Oct 01 16:49:43 crc kubenswrapper[4869]: I1001 16:49:43.548573 4869 scope.go:117] "RemoveContainer" containerID="d9281499446ba735acc8b48aef748c3e2b86ec9baa190cbca8c27b3fd0bb1351" Oct 01 16:49:43 crc kubenswrapper[4869]: E1001 16:49:43.549030 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-c86m8_openshift-machine-config-operator(a4b64f7f-0b03-4f47-965b-9fde048b735c)\"" pod="openshift-machine-config-operator/machine-config-daemon-c86m8" podUID="a4b64f7f-0b03-4f47-965b-9fde048b735c" Oct 01 16:49:55 crc kubenswrapper[4869]: I1001 16:49:55.581680 4869 scope.go:117] "RemoveContainer" containerID="d9281499446ba735acc8b48aef748c3e2b86ec9baa190cbca8c27b3fd0bb1351" Oct 01 16:49:55 crc kubenswrapper[4869]: E1001 16:49:55.582785 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-c86m8_openshift-machine-config-operator(a4b64f7f-0b03-4f47-965b-9fde048b735c)\"" pod="openshift-machine-config-operator/machine-config-daemon-c86m8" podUID="a4b64f7f-0b03-4f47-965b-9fde048b735c" Oct 01 16:50:07 crc kubenswrapper[4869]: I1001 16:50:07.580961 4869 scope.go:117] "RemoveContainer" containerID="d9281499446ba735acc8b48aef748c3e2b86ec9baa190cbca8c27b3fd0bb1351" Oct 01 16:50:07 crc kubenswrapper[4869]: E1001 16:50:07.583445 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-c86m8_openshift-machine-config-operator(a4b64f7f-0b03-4f47-965b-9fde048b735c)\"" pod="openshift-machine-config-operator/machine-config-daemon-c86m8" podUID="a4b64f7f-0b03-4f47-965b-9fde048b735c" Oct 01 16:50:20 crc kubenswrapper[4869]: I1001 16:50:20.582071 4869 scope.go:117] "RemoveContainer" containerID="d9281499446ba735acc8b48aef748c3e2b86ec9baa190cbca8c27b3fd0bb1351" Oct 01 16:50:20 crc kubenswrapper[4869]: E1001 16:50:20.583051 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-c86m8_openshift-machine-config-operator(a4b64f7f-0b03-4f47-965b-9fde048b735c)\"" pod="openshift-machine-config-operator/machine-config-daemon-c86m8" podUID="a4b64f7f-0b03-4f47-965b-9fde048b735c" Oct 01 16:50:33 crc kubenswrapper[4869]: I1001 16:50:33.580898 4869 scope.go:117] "RemoveContainer" containerID="d9281499446ba735acc8b48aef748c3e2b86ec9baa190cbca8c27b3fd0bb1351" Oct 01 16:50:33 crc kubenswrapper[4869]: E1001 16:50:33.581692 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-c86m8_openshift-machine-config-operator(a4b64f7f-0b03-4f47-965b-9fde048b735c)\"" pod="openshift-machine-config-operator/machine-config-daemon-c86m8" podUID="a4b64f7f-0b03-4f47-965b-9fde048b735c" Oct 01 16:50:40 crc kubenswrapper[4869]: I1001 16:50:40.904297 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-ffg5l"] Oct 01 16:50:40 crc kubenswrapper[4869]: E1001 16:50:40.905510 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f074ee13-3e77-41d3-bc31-3f36806e16f0" containerName="registry-server" Oct 01 16:50:40 crc kubenswrapper[4869]: I1001 16:50:40.905532 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="f074ee13-3e77-41d3-bc31-3f36806e16f0" containerName="registry-server" Oct 01 16:50:40 crc kubenswrapper[4869]: E1001 16:50:40.905567 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f074ee13-3e77-41d3-bc31-3f36806e16f0" containerName="extract-utilities" Oct 01 16:50:40 crc kubenswrapper[4869]: I1001 16:50:40.905582 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="f074ee13-3e77-41d3-bc31-3f36806e16f0" containerName="extract-utilities" Oct 01 16:50:40 crc kubenswrapper[4869]: E1001 16:50:40.905615 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f074ee13-3e77-41d3-bc31-3f36806e16f0" containerName="extract-content" Oct 01 16:50:40 crc kubenswrapper[4869]: I1001 16:50:40.905629 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="f074ee13-3e77-41d3-bc31-3f36806e16f0" containerName="extract-content" Oct 01 16:50:40 crc kubenswrapper[4869]: I1001 16:50:40.905967 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="f074ee13-3e77-41d3-bc31-3f36806e16f0" containerName="registry-server" Oct 01 16:50:40 crc kubenswrapper[4869]: I1001 16:50:40.908643 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-ffg5l" Oct 01 16:50:40 crc kubenswrapper[4869]: I1001 16:50:40.926821 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-ffg5l"] Oct 01 16:50:41 crc kubenswrapper[4869]: I1001 16:50:41.046961 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/306a7da6-413c-488c-b4da-5e00b113ac4b-catalog-content\") pod \"certified-operators-ffg5l\" (UID: \"306a7da6-413c-488c-b4da-5e00b113ac4b\") " pod="openshift-marketplace/certified-operators-ffg5l" Oct 01 16:50:41 crc kubenswrapper[4869]: I1001 16:50:41.047050 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bz47g\" (UniqueName: \"kubernetes.io/projected/306a7da6-413c-488c-b4da-5e00b113ac4b-kube-api-access-bz47g\") pod \"certified-operators-ffg5l\" (UID: \"306a7da6-413c-488c-b4da-5e00b113ac4b\") " pod="openshift-marketplace/certified-operators-ffg5l" Oct 01 16:50:41 crc kubenswrapper[4869]: I1001 16:50:41.047089 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/306a7da6-413c-488c-b4da-5e00b113ac4b-utilities\") pod \"certified-operators-ffg5l\" (UID: \"306a7da6-413c-488c-b4da-5e00b113ac4b\") " pod="openshift-marketplace/certified-operators-ffg5l" Oct 01 16:50:41 crc kubenswrapper[4869]: I1001 16:50:41.149555 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/306a7da6-413c-488c-b4da-5e00b113ac4b-catalog-content\") pod \"certified-operators-ffg5l\" (UID: \"306a7da6-413c-488c-b4da-5e00b113ac4b\") " pod="openshift-marketplace/certified-operators-ffg5l" Oct 01 16:50:41 crc kubenswrapper[4869]: I1001 16:50:41.149645 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bz47g\" (UniqueName: \"kubernetes.io/projected/306a7da6-413c-488c-b4da-5e00b113ac4b-kube-api-access-bz47g\") pod \"certified-operators-ffg5l\" (UID: \"306a7da6-413c-488c-b4da-5e00b113ac4b\") " pod="openshift-marketplace/certified-operators-ffg5l" Oct 01 16:50:41 crc kubenswrapper[4869]: I1001 16:50:41.149684 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/306a7da6-413c-488c-b4da-5e00b113ac4b-utilities\") pod \"certified-operators-ffg5l\" (UID: \"306a7da6-413c-488c-b4da-5e00b113ac4b\") " pod="openshift-marketplace/certified-operators-ffg5l" Oct 01 16:50:41 crc kubenswrapper[4869]: I1001 16:50:41.150076 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/306a7da6-413c-488c-b4da-5e00b113ac4b-catalog-content\") pod \"certified-operators-ffg5l\" (UID: \"306a7da6-413c-488c-b4da-5e00b113ac4b\") " pod="openshift-marketplace/certified-operators-ffg5l" Oct 01 16:50:41 crc kubenswrapper[4869]: I1001 16:50:41.150190 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/306a7da6-413c-488c-b4da-5e00b113ac4b-utilities\") pod \"certified-operators-ffg5l\" (UID: \"306a7da6-413c-488c-b4da-5e00b113ac4b\") " pod="openshift-marketplace/certified-operators-ffg5l" Oct 01 16:50:41 crc kubenswrapper[4869]: I1001 16:50:41.175016 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bz47g\" (UniqueName: \"kubernetes.io/projected/306a7da6-413c-488c-b4da-5e00b113ac4b-kube-api-access-bz47g\") pod \"certified-operators-ffg5l\" (UID: \"306a7da6-413c-488c-b4da-5e00b113ac4b\") " pod="openshift-marketplace/certified-operators-ffg5l" Oct 01 16:50:41 crc kubenswrapper[4869]: I1001 16:50:41.232436 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-ffg5l" Oct 01 16:50:41 crc kubenswrapper[4869]: I1001 16:50:41.715819 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-ffg5l"] Oct 01 16:50:42 crc kubenswrapper[4869]: I1001 16:50:42.149380 4869 generic.go:334] "Generic (PLEG): container finished" podID="306a7da6-413c-488c-b4da-5e00b113ac4b" containerID="8d514099a92d9586b24cc1acec035b5e8eb2a7bc955253a883a9c3a9e6766934" exitCode=0 Oct 01 16:50:42 crc kubenswrapper[4869]: I1001 16:50:42.149457 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-ffg5l" event={"ID":"306a7da6-413c-488c-b4da-5e00b113ac4b","Type":"ContainerDied","Data":"8d514099a92d9586b24cc1acec035b5e8eb2a7bc955253a883a9c3a9e6766934"} Oct 01 16:50:42 crc kubenswrapper[4869]: I1001 16:50:42.149516 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-ffg5l" event={"ID":"306a7da6-413c-488c-b4da-5e00b113ac4b","Type":"ContainerStarted","Data":"d49291849f82e7e638064ffaae3b7a4de73db07875180d0d24141bb8acdb63a5"} Oct 01 16:50:43 crc kubenswrapper[4869]: I1001 16:50:43.159254 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-ffg5l" event={"ID":"306a7da6-413c-488c-b4da-5e00b113ac4b","Type":"ContainerStarted","Data":"10c5e2f61ddb9b5689d32bf0a14b46ed3d365b169aaf3e236db308108c878a38"} Oct 01 16:50:43 crc kubenswrapper[4869]: E1001 16:50:43.467851 4869 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod306a7da6_413c_488c_b4da_5e00b113ac4b.slice/crio-10c5e2f61ddb9b5689d32bf0a14b46ed3d365b169aaf3e236db308108c878a38.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod306a7da6_413c_488c_b4da_5e00b113ac4b.slice/crio-conmon-10c5e2f61ddb9b5689d32bf0a14b46ed3d365b169aaf3e236db308108c878a38.scope\": RecentStats: unable to find data in memory cache]" Oct 01 16:50:44 crc kubenswrapper[4869]: I1001 16:50:44.169425 4869 generic.go:334] "Generic (PLEG): container finished" podID="306a7da6-413c-488c-b4da-5e00b113ac4b" containerID="10c5e2f61ddb9b5689d32bf0a14b46ed3d365b169aaf3e236db308108c878a38" exitCode=0 Oct 01 16:50:44 crc kubenswrapper[4869]: I1001 16:50:44.169534 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-ffg5l" event={"ID":"306a7da6-413c-488c-b4da-5e00b113ac4b","Type":"ContainerDied","Data":"10c5e2f61ddb9b5689d32bf0a14b46ed3d365b169aaf3e236db308108c878a38"} Oct 01 16:50:44 crc kubenswrapper[4869]: I1001 16:50:44.281202 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-sj82l"] Oct 01 16:50:44 crc kubenswrapper[4869]: I1001 16:50:44.283678 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-sj82l" Oct 01 16:50:44 crc kubenswrapper[4869]: I1001 16:50:44.291599 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-sj82l"] Oct 01 16:50:44 crc kubenswrapper[4869]: I1001 16:50:44.337882 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-b75sw\" (UniqueName: \"kubernetes.io/projected/9b89155b-ec76-4078-afa0-6a66766726c1-kube-api-access-b75sw\") pod \"redhat-marketplace-sj82l\" (UID: \"9b89155b-ec76-4078-afa0-6a66766726c1\") " pod="openshift-marketplace/redhat-marketplace-sj82l" Oct 01 16:50:44 crc kubenswrapper[4869]: I1001 16:50:44.338133 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9b89155b-ec76-4078-afa0-6a66766726c1-utilities\") pod \"redhat-marketplace-sj82l\" (UID: \"9b89155b-ec76-4078-afa0-6a66766726c1\") " pod="openshift-marketplace/redhat-marketplace-sj82l" Oct 01 16:50:44 crc kubenswrapper[4869]: I1001 16:50:44.338562 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9b89155b-ec76-4078-afa0-6a66766726c1-catalog-content\") pod \"redhat-marketplace-sj82l\" (UID: \"9b89155b-ec76-4078-afa0-6a66766726c1\") " pod="openshift-marketplace/redhat-marketplace-sj82l" Oct 01 16:50:44 crc kubenswrapper[4869]: I1001 16:50:44.440900 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9b89155b-ec76-4078-afa0-6a66766726c1-catalog-content\") pod \"redhat-marketplace-sj82l\" (UID: \"9b89155b-ec76-4078-afa0-6a66766726c1\") " pod="openshift-marketplace/redhat-marketplace-sj82l" Oct 01 16:50:44 crc kubenswrapper[4869]: I1001 16:50:44.441004 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-b75sw\" (UniqueName: \"kubernetes.io/projected/9b89155b-ec76-4078-afa0-6a66766726c1-kube-api-access-b75sw\") pod \"redhat-marketplace-sj82l\" (UID: \"9b89155b-ec76-4078-afa0-6a66766726c1\") " pod="openshift-marketplace/redhat-marketplace-sj82l" Oct 01 16:50:44 crc kubenswrapper[4869]: I1001 16:50:44.441076 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9b89155b-ec76-4078-afa0-6a66766726c1-utilities\") pod \"redhat-marketplace-sj82l\" (UID: \"9b89155b-ec76-4078-afa0-6a66766726c1\") " pod="openshift-marketplace/redhat-marketplace-sj82l" Oct 01 16:50:44 crc kubenswrapper[4869]: I1001 16:50:44.441412 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9b89155b-ec76-4078-afa0-6a66766726c1-catalog-content\") pod \"redhat-marketplace-sj82l\" (UID: \"9b89155b-ec76-4078-afa0-6a66766726c1\") " pod="openshift-marketplace/redhat-marketplace-sj82l" Oct 01 16:50:44 crc kubenswrapper[4869]: I1001 16:50:44.441584 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9b89155b-ec76-4078-afa0-6a66766726c1-utilities\") pod \"redhat-marketplace-sj82l\" (UID: \"9b89155b-ec76-4078-afa0-6a66766726c1\") " pod="openshift-marketplace/redhat-marketplace-sj82l" Oct 01 16:50:44 crc kubenswrapper[4869]: I1001 16:50:44.469044 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-b75sw\" (UniqueName: \"kubernetes.io/projected/9b89155b-ec76-4078-afa0-6a66766726c1-kube-api-access-b75sw\") pod \"redhat-marketplace-sj82l\" (UID: \"9b89155b-ec76-4078-afa0-6a66766726c1\") " pod="openshift-marketplace/redhat-marketplace-sj82l" Oct 01 16:50:44 crc kubenswrapper[4869]: I1001 16:50:44.606490 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-sj82l" Oct 01 16:50:45 crc kubenswrapper[4869]: I1001 16:50:45.066132 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-sj82l"] Oct 01 16:50:45 crc kubenswrapper[4869]: W1001 16:50:45.067596 4869 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod9b89155b_ec76_4078_afa0_6a66766726c1.slice/crio-6a62a3b7ea70922d0d4f3b4cb9c87b2f1e50cefe972358d57155d0444bb683cc WatchSource:0}: Error finding container 6a62a3b7ea70922d0d4f3b4cb9c87b2f1e50cefe972358d57155d0444bb683cc: Status 404 returned error can't find the container with id 6a62a3b7ea70922d0d4f3b4cb9c87b2f1e50cefe972358d57155d0444bb683cc Oct 01 16:50:45 crc kubenswrapper[4869]: I1001 16:50:45.181001 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-ffg5l" event={"ID":"306a7da6-413c-488c-b4da-5e00b113ac4b","Type":"ContainerStarted","Data":"fc8edd5fbce0ab1ce5604b215400a79751a73536d463cca7322720a237c7a6dc"} Oct 01 16:50:45 crc kubenswrapper[4869]: I1001 16:50:45.184000 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-sj82l" event={"ID":"9b89155b-ec76-4078-afa0-6a66766726c1","Type":"ContainerStarted","Data":"6a62a3b7ea70922d0d4f3b4cb9c87b2f1e50cefe972358d57155d0444bb683cc"} Oct 01 16:50:45 crc kubenswrapper[4869]: I1001 16:50:45.581349 4869 scope.go:117] "RemoveContainer" containerID="d9281499446ba735acc8b48aef748c3e2b86ec9baa190cbca8c27b3fd0bb1351" Oct 01 16:50:45 crc kubenswrapper[4869]: E1001 16:50:45.581951 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-c86m8_openshift-machine-config-operator(a4b64f7f-0b03-4f47-965b-9fde048b735c)\"" pod="openshift-machine-config-operator/machine-config-daemon-c86m8" podUID="a4b64f7f-0b03-4f47-965b-9fde048b735c" Oct 01 16:50:46 crc kubenswrapper[4869]: I1001 16:50:46.193962 4869 generic.go:334] "Generic (PLEG): container finished" podID="9b89155b-ec76-4078-afa0-6a66766726c1" containerID="b2b292cccc20995257ee93f0639beb64cf4a28b46d18caa824066bb1e45ddf5d" exitCode=0 Oct 01 16:50:46 crc kubenswrapper[4869]: I1001 16:50:46.194586 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-sj82l" event={"ID":"9b89155b-ec76-4078-afa0-6a66766726c1","Type":"ContainerDied","Data":"b2b292cccc20995257ee93f0639beb64cf4a28b46d18caa824066bb1e45ddf5d"} Oct 01 16:50:46 crc kubenswrapper[4869]: I1001 16:50:46.219601 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-ffg5l" podStartSLOduration=3.7013282739999998 podStartE2EDuration="6.219582656s" podCreationTimestamp="2025-10-01 16:50:40 +0000 UTC" firstStartedPulling="2025-10-01 16:50:42.151572732 +0000 UTC m=+6351.298415848" lastFinishedPulling="2025-10-01 16:50:44.669827114 +0000 UTC m=+6353.816670230" observedRunningTime="2025-10-01 16:50:45.201694106 +0000 UTC m=+6354.348537222" watchObservedRunningTime="2025-10-01 16:50:46.219582656 +0000 UTC m=+6355.366425762" Oct 01 16:50:48 crc kubenswrapper[4869]: I1001 16:50:48.222363 4869 generic.go:334] "Generic (PLEG): container finished" podID="9b89155b-ec76-4078-afa0-6a66766726c1" containerID="4de1f5b56b3f9ce66b5e9b3aecadc352a3430bde18697c3035f75b4f29b3ef2d" exitCode=0 Oct 01 16:50:48 crc kubenswrapper[4869]: I1001 16:50:48.222484 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-sj82l" event={"ID":"9b89155b-ec76-4078-afa0-6a66766726c1","Type":"ContainerDied","Data":"4de1f5b56b3f9ce66b5e9b3aecadc352a3430bde18697c3035f75b4f29b3ef2d"} Oct 01 16:50:49 crc kubenswrapper[4869]: I1001 16:50:49.233777 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-sj82l" event={"ID":"9b89155b-ec76-4078-afa0-6a66766726c1","Type":"ContainerStarted","Data":"26cfd3a94f3269c0c565de9d4fea2d7264c8acd114541ce839265de242d965ef"} Oct 01 16:50:49 crc kubenswrapper[4869]: I1001 16:50:49.260718 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-sj82l" podStartSLOduration=2.8251096430000002 podStartE2EDuration="5.260692882s" podCreationTimestamp="2025-10-01 16:50:44 +0000 UTC" firstStartedPulling="2025-10-01 16:50:46.197334813 +0000 UTC m=+6355.344177919" lastFinishedPulling="2025-10-01 16:50:48.632918042 +0000 UTC m=+6357.779761158" observedRunningTime="2025-10-01 16:50:49.255603594 +0000 UTC m=+6358.402446710" watchObservedRunningTime="2025-10-01 16:50:49.260692882 +0000 UTC m=+6358.407536008" Oct 01 16:50:51 crc kubenswrapper[4869]: I1001 16:50:51.233116 4869 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-ffg5l" Oct 01 16:50:51 crc kubenswrapper[4869]: I1001 16:50:51.233570 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-ffg5l" Oct 01 16:50:51 crc kubenswrapper[4869]: I1001 16:50:51.307732 4869 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-ffg5l" Oct 01 16:50:51 crc kubenswrapper[4869]: I1001 16:50:51.402489 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-ffg5l" Oct 01 16:50:54 crc kubenswrapper[4869]: I1001 16:50:54.606657 4869 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-sj82l" Oct 01 16:50:54 crc kubenswrapper[4869]: I1001 16:50:54.607213 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-sj82l" Oct 01 16:50:54 crc kubenswrapper[4869]: I1001 16:50:54.680630 4869 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-sj82l" Oct 01 16:50:55 crc kubenswrapper[4869]: I1001 16:50:55.392102 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-sj82l" Oct 01 16:50:56 crc kubenswrapper[4869]: I1001 16:50:56.581993 4869 scope.go:117] "RemoveContainer" containerID="d9281499446ba735acc8b48aef748c3e2b86ec9baa190cbca8c27b3fd0bb1351" Oct 01 16:50:56 crc kubenswrapper[4869]: E1001 16:50:56.582583 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-c86m8_openshift-machine-config-operator(a4b64f7f-0b03-4f47-965b-9fde048b735c)\"" pod="openshift-machine-config-operator/machine-config-daemon-c86m8" podUID="a4b64f7f-0b03-4f47-965b-9fde048b735c" Oct 01 16:50:57 crc kubenswrapper[4869]: I1001 16:50:57.678579 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-ffg5l"] Oct 01 16:50:57 crc kubenswrapper[4869]: I1001 16:50:57.679252 4869 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-ffg5l" podUID="306a7da6-413c-488c-b4da-5e00b113ac4b" containerName="registry-server" containerID="cri-o://fc8edd5fbce0ab1ce5604b215400a79751a73536d463cca7322720a237c7a6dc" gracePeriod=2 Oct 01 16:50:58 crc kubenswrapper[4869]: I1001 16:50:58.073268 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-sj82l"] Oct 01 16:50:58 crc kubenswrapper[4869]: I1001 16:50:58.073783 4869 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-sj82l" podUID="9b89155b-ec76-4078-afa0-6a66766726c1" containerName="registry-server" containerID="cri-o://26cfd3a94f3269c0c565de9d4fea2d7264c8acd114541ce839265de242d965ef" gracePeriod=2 Oct 01 16:50:58 crc kubenswrapper[4869]: I1001 16:50:58.262488 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-ffg5l" Oct 01 16:50:58 crc kubenswrapper[4869]: I1001 16:50:58.349743 4869 generic.go:334] "Generic (PLEG): container finished" podID="9b89155b-ec76-4078-afa0-6a66766726c1" containerID="26cfd3a94f3269c0c565de9d4fea2d7264c8acd114541ce839265de242d965ef" exitCode=0 Oct 01 16:50:58 crc kubenswrapper[4869]: I1001 16:50:58.349823 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-sj82l" event={"ID":"9b89155b-ec76-4078-afa0-6a66766726c1","Type":"ContainerDied","Data":"26cfd3a94f3269c0c565de9d4fea2d7264c8acd114541ce839265de242d965ef"} Oct 01 16:50:58 crc kubenswrapper[4869]: I1001 16:50:58.354286 4869 generic.go:334] "Generic (PLEG): container finished" podID="306a7da6-413c-488c-b4da-5e00b113ac4b" containerID="fc8edd5fbce0ab1ce5604b215400a79751a73536d463cca7322720a237c7a6dc" exitCode=0 Oct 01 16:50:58 crc kubenswrapper[4869]: I1001 16:50:58.354328 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-ffg5l" event={"ID":"306a7da6-413c-488c-b4da-5e00b113ac4b","Type":"ContainerDied","Data":"fc8edd5fbce0ab1ce5604b215400a79751a73536d463cca7322720a237c7a6dc"} Oct 01 16:50:58 crc kubenswrapper[4869]: I1001 16:50:58.354357 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-ffg5l" event={"ID":"306a7da6-413c-488c-b4da-5e00b113ac4b","Type":"ContainerDied","Data":"d49291849f82e7e638064ffaae3b7a4de73db07875180d0d24141bb8acdb63a5"} Oct 01 16:50:58 crc kubenswrapper[4869]: I1001 16:50:58.354375 4869 scope.go:117] "RemoveContainer" containerID="fc8edd5fbce0ab1ce5604b215400a79751a73536d463cca7322720a237c7a6dc" Oct 01 16:50:58 crc kubenswrapper[4869]: I1001 16:50:58.354515 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-ffg5l" Oct 01 16:50:58 crc kubenswrapper[4869]: I1001 16:50:58.380232 4869 scope.go:117] "RemoveContainer" containerID="10c5e2f61ddb9b5689d32bf0a14b46ed3d365b169aaf3e236db308108c878a38" Oct 01 16:50:58 crc kubenswrapper[4869]: I1001 16:50:58.419490 4869 scope.go:117] "RemoveContainer" containerID="8d514099a92d9586b24cc1acec035b5e8eb2a7bc955253a883a9c3a9e6766934" Oct 01 16:50:58 crc kubenswrapper[4869]: I1001 16:50:58.447047 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/306a7da6-413c-488c-b4da-5e00b113ac4b-catalog-content\") pod \"306a7da6-413c-488c-b4da-5e00b113ac4b\" (UID: \"306a7da6-413c-488c-b4da-5e00b113ac4b\") " Oct 01 16:50:58 crc kubenswrapper[4869]: I1001 16:50:58.447096 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bz47g\" (UniqueName: \"kubernetes.io/projected/306a7da6-413c-488c-b4da-5e00b113ac4b-kube-api-access-bz47g\") pod \"306a7da6-413c-488c-b4da-5e00b113ac4b\" (UID: \"306a7da6-413c-488c-b4da-5e00b113ac4b\") " Oct 01 16:50:58 crc kubenswrapper[4869]: I1001 16:50:58.447108 4869 scope.go:117] "RemoveContainer" containerID="fc8edd5fbce0ab1ce5604b215400a79751a73536d463cca7322720a237c7a6dc" Oct 01 16:50:58 crc kubenswrapper[4869]: I1001 16:50:58.447123 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/306a7da6-413c-488c-b4da-5e00b113ac4b-utilities\") pod \"306a7da6-413c-488c-b4da-5e00b113ac4b\" (UID: \"306a7da6-413c-488c-b4da-5e00b113ac4b\") " Oct 01 16:50:58 crc kubenswrapper[4869]: E1001 16:50:58.447976 4869 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"fc8edd5fbce0ab1ce5604b215400a79751a73536d463cca7322720a237c7a6dc\": container with ID starting with fc8edd5fbce0ab1ce5604b215400a79751a73536d463cca7322720a237c7a6dc not found: ID does not exist" containerID="fc8edd5fbce0ab1ce5604b215400a79751a73536d463cca7322720a237c7a6dc" Oct 01 16:50:58 crc kubenswrapper[4869]: I1001 16:50:58.448004 4869 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"fc8edd5fbce0ab1ce5604b215400a79751a73536d463cca7322720a237c7a6dc"} err="failed to get container status \"fc8edd5fbce0ab1ce5604b215400a79751a73536d463cca7322720a237c7a6dc\": rpc error: code = NotFound desc = could not find container \"fc8edd5fbce0ab1ce5604b215400a79751a73536d463cca7322720a237c7a6dc\": container with ID starting with fc8edd5fbce0ab1ce5604b215400a79751a73536d463cca7322720a237c7a6dc not found: ID does not exist" Oct 01 16:50:58 crc kubenswrapper[4869]: I1001 16:50:58.448029 4869 scope.go:117] "RemoveContainer" containerID="10c5e2f61ddb9b5689d32bf0a14b46ed3d365b169aaf3e236db308108c878a38" Oct 01 16:50:58 crc kubenswrapper[4869]: I1001 16:50:58.448026 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/306a7da6-413c-488c-b4da-5e00b113ac4b-utilities" (OuterVolumeSpecName: "utilities") pod "306a7da6-413c-488c-b4da-5e00b113ac4b" (UID: "306a7da6-413c-488c-b4da-5e00b113ac4b"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 16:50:58 crc kubenswrapper[4869]: E1001 16:50:58.448187 4869 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"10c5e2f61ddb9b5689d32bf0a14b46ed3d365b169aaf3e236db308108c878a38\": container with ID starting with 10c5e2f61ddb9b5689d32bf0a14b46ed3d365b169aaf3e236db308108c878a38 not found: ID does not exist" containerID="10c5e2f61ddb9b5689d32bf0a14b46ed3d365b169aaf3e236db308108c878a38" Oct 01 16:50:58 crc kubenswrapper[4869]: I1001 16:50:58.448210 4869 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"10c5e2f61ddb9b5689d32bf0a14b46ed3d365b169aaf3e236db308108c878a38"} err="failed to get container status \"10c5e2f61ddb9b5689d32bf0a14b46ed3d365b169aaf3e236db308108c878a38\": rpc error: code = NotFound desc = could not find container \"10c5e2f61ddb9b5689d32bf0a14b46ed3d365b169aaf3e236db308108c878a38\": container with ID starting with 10c5e2f61ddb9b5689d32bf0a14b46ed3d365b169aaf3e236db308108c878a38 not found: ID does not exist" Oct 01 16:50:58 crc kubenswrapper[4869]: I1001 16:50:58.448222 4869 scope.go:117] "RemoveContainer" containerID="8d514099a92d9586b24cc1acec035b5e8eb2a7bc955253a883a9c3a9e6766934" Oct 01 16:50:58 crc kubenswrapper[4869]: E1001 16:50:58.448386 4869 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8d514099a92d9586b24cc1acec035b5e8eb2a7bc955253a883a9c3a9e6766934\": container with ID starting with 8d514099a92d9586b24cc1acec035b5e8eb2a7bc955253a883a9c3a9e6766934 not found: ID does not exist" containerID="8d514099a92d9586b24cc1acec035b5e8eb2a7bc955253a883a9c3a9e6766934" Oct 01 16:50:58 crc kubenswrapper[4869]: I1001 16:50:58.448408 4869 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8d514099a92d9586b24cc1acec035b5e8eb2a7bc955253a883a9c3a9e6766934"} err="failed to get container status \"8d514099a92d9586b24cc1acec035b5e8eb2a7bc955253a883a9c3a9e6766934\": rpc error: code = NotFound desc = could not find container \"8d514099a92d9586b24cc1acec035b5e8eb2a7bc955253a883a9c3a9e6766934\": container with ID starting with 8d514099a92d9586b24cc1acec035b5e8eb2a7bc955253a883a9c3a9e6766934 not found: ID does not exist" Oct 01 16:50:58 crc kubenswrapper[4869]: I1001 16:50:58.453428 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/306a7da6-413c-488c-b4da-5e00b113ac4b-kube-api-access-bz47g" (OuterVolumeSpecName: "kube-api-access-bz47g") pod "306a7da6-413c-488c-b4da-5e00b113ac4b" (UID: "306a7da6-413c-488c-b4da-5e00b113ac4b"). InnerVolumeSpecName "kube-api-access-bz47g". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 16:50:58 crc kubenswrapper[4869]: I1001 16:50:58.488465 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/306a7da6-413c-488c-b4da-5e00b113ac4b-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "306a7da6-413c-488c-b4da-5e00b113ac4b" (UID: "306a7da6-413c-488c-b4da-5e00b113ac4b"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 16:50:58 crc kubenswrapper[4869]: I1001 16:50:58.519346 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-sj82l" Oct 01 16:50:58 crc kubenswrapper[4869]: I1001 16:50:58.549859 4869 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/306a7da6-413c-488c-b4da-5e00b113ac4b-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 01 16:50:58 crc kubenswrapper[4869]: I1001 16:50:58.549892 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bz47g\" (UniqueName: \"kubernetes.io/projected/306a7da6-413c-488c-b4da-5e00b113ac4b-kube-api-access-bz47g\") on node \"crc\" DevicePath \"\"" Oct 01 16:50:58 crc kubenswrapper[4869]: I1001 16:50:58.549902 4869 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/306a7da6-413c-488c-b4da-5e00b113ac4b-utilities\") on node \"crc\" DevicePath \"\"" Oct 01 16:50:58 crc kubenswrapper[4869]: I1001 16:50:58.650959 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9b89155b-ec76-4078-afa0-6a66766726c1-catalog-content\") pod \"9b89155b-ec76-4078-afa0-6a66766726c1\" (UID: \"9b89155b-ec76-4078-afa0-6a66766726c1\") " Oct 01 16:50:58 crc kubenswrapper[4869]: I1001 16:50:58.651157 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-b75sw\" (UniqueName: \"kubernetes.io/projected/9b89155b-ec76-4078-afa0-6a66766726c1-kube-api-access-b75sw\") pod \"9b89155b-ec76-4078-afa0-6a66766726c1\" (UID: \"9b89155b-ec76-4078-afa0-6a66766726c1\") " Oct 01 16:50:58 crc kubenswrapper[4869]: I1001 16:50:58.651353 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9b89155b-ec76-4078-afa0-6a66766726c1-utilities\") pod \"9b89155b-ec76-4078-afa0-6a66766726c1\" (UID: \"9b89155b-ec76-4078-afa0-6a66766726c1\") " Oct 01 16:50:58 crc kubenswrapper[4869]: I1001 16:50:58.652276 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/9b89155b-ec76-4078-afa0-6a66766726c1-utilities" (OuterVolumeSpecName: "utilities") pod "9b89155b-ec76-4078-afa0-6a66766726c1" (UID: "9b89155b-ec76-4078-afa0-6a66766726c1"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 16:50:58 crc kubenswrapper[4869]: I1001 16:50:58.655572 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9b89155b-ec76-4078-afa0-6a66766726c1-kube-api-access-b75sw" (OuterVolumeSpecName: "kube-api-access-b75sw") pod "9b89155b-ec76-4078-afa0-6a66766726c1" (UID: "9b89155b-ec76-4078-afa0-6a66766726c1"). InnerVolumeSpecName "kube-api-access-b75sw". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 16:50:58 crc kubenswrapper[4869]: I1001 16:50:58.665023 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/9b89155b-ec76-4078-afa0-6a66766726c1-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "9b89155b-ec76-4078-afa0-6a66766726c1" (UID: "9b89155b-ec76-4078-afa0-6a66766726c1"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 16:50:58 crc kubenswrapper[4869]: I1001 16:50:58.702176 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-ffg5l"] Oct 01 16:50:58 crc kubenswrapper[4869]: I1001 16:50:58.712077 4869 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-ffg5l"] Oct 01 16:50:58 crc kubenswrapper[4869]: I1001 16:50:58.754421 4869 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9b89155b-ec76-4078-afa0-6a66766726c1-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 01 16:50:58 crc kubenswrapper[4869]: I1001 16:50:58.754458 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-b75sw\" (UniqueName: \"kubernetes.io/projected/9b89155b-ec76-4078-afa0-6a66766726c1-kube-api-access-b75sw\") on node \"crc\" DevicePath \"\"" Oct 01 16:50:58 crc kubenswrapper[4869]: I1001 16:50:58.754476 4869 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9b89155b-ec76-4078-afa0-6a66766726c1-utilities\") on node \"crc\" DevicePath \"\"" Oct 01 16:50:59 crc kubenswrapper[4869]: I1001 16:50:59.368838 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-sj82l" event={"ID":"9b89155b-ec76-4078-afa0-6a66766726c1","Type":"ContainerDied","Data":"6a62a3b7ea70922d0d4f3b4cb9c87b2f1e50cefe972358d57155d0444bb683cc"} Oct 01 16:50:59 crc kubenswrapper[4869]: I1001 16:50:59.369194 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-sj82l" Oct 01 16:50:59 crc kubenswrapper[4869]: I1001 16:50:59.369201 4869 scope.go:117] "RemoveContainer" containerID="26cfd3a94f3269c0c565de9d4fea2d7264c8acd114541ce839265de242d965ef" Oct 01 16:50:59 crc kubenswrapper[4869]: I1001 16:50:59.418238 4869 scope.go:117] "RemoveContainer" containerID="4de1f5b56b3f9ce66b5e9b3aecadc352a3430bde18697c3035f75b4f29b3ef2d" Oct 01 16:50:59 crc kubenswrapper[4869]: I1001 16:50:59.428142 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-sj82l"] Oct 01 16:50:59 crc kubenswrapper[4869]: I1001 16:50:59.444800 4869 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-sj82l"] Oct 01 16:50:59 crc kubenswrapper[4869]: I1001 16:50:59.449345 4869 scope.go:117] "RemoveContainer" containerID="b2b292cccc20995257ee93f0639beb64cf4a28b46d18caa824066bb1e45ddf5d" Oct 01 16:50:59 crc kubenswrapper[4869]: I1001 16:50:59.598460 4869 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="306a7da6-413c-488c-b4da-5e00b113ac4b" path="/var/lib/kubelet/pods/306a7da6-413c-488c-b4da-5e00b113ac4b/volumes" Oct 01 16:50:59 crc kubenswrapper[4869]: I1001 16:50:59.599961 4869 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9b89155b-ec76-4078-afa0-6a66766726c1" path="/var/lib/kubelet/pods/9b89155b-ec76-4078-afa0-6a66766726c1/volumes" Oct 01 16:51:09 crc kubenswrapper[4869]: I1001 16:51:09.587763 4869 scope.go:117] "RemoveContainer" containerID="d9281499446ba735acc8b48aef748c3e2b86ec9baa190cbca8c27b3fd0bb1351" Oct 01 16:51:09 crc kubenswrapper[4869]: E1001 16:51:09.588377 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-c86m8_openshift-machine-config-operator(a4b64f7f-0b03-4f47-965b-9fde048b735c)\"" pod="openshift-machine-config-operator/machine-config-daemon-c86m8" podUID="a4b64f7f-0b03-4f47-965b-9fde048b735c" Oct 01 16:51:23 crc kubenswrapper[4869]: I1001 16:51:23.582857 4869 scope.go:117] "RemoveContainer" containerID="d9281499446ba735acc8b48aef748c3e2b86ec9baa190cbca8c27b3fd0bb1351" Oct 01 16:51:23 crc kubenswrapper[4869]: E1001 16:51:23.584731 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-c86m8_openshift-machine-config-operator(a4b64f7f-0b03-4f47-965b-9fde048b735c)\"" pod="openshift-machine-config-operator/machine-config-daemon-c86m8" podUID="a4b64f7f-0b03-4f47-965b-9fde048b735c" Oct 01 16:51:37 crc kubenswrapper[4869]: I1001 16:51:37.581084 4869 scope.go:117] "RemoveContainer" containerID="d9281499446ba735acc8b48aef748c3e2b86ec9baa190cbca8c27b3fd0bb1351" Oct 01 16:51:37 crc kubenswrapper[4869]: E1001 16:51:37.582391 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-c86m8_openshift-machine-config-operator(a4b64f7f-0b03-4f47-965b-9fde048b735c)\"" pod="openshift-machine-config-operator/machine-config-daemon-c86m8" podUID="a4b64f7f-0b03-4f47-965b-9fde048b735c" Oct 01 16:51:52 crc kubenswrapper[4869]: I1001 16:51:52.581951 4869 scope.go:117] "RemoveContainer" containerID="d9281499446ba735acc8b48aef748c3e2b86ec9baa190cbca8c27b3fd0bb1351" Oct 01 16:51:52 crc kubenswrapper[4869]: E1001 16:51:52.583898 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-c86m8_openshift-machine-config-operator(a4b64f7f-0b03-4f47-965b-9fde048b735c)\"" pod="openshift-machine-config-operator/machine-config-daemon-c86m8" podUID="a4b64f7f-0b03-4f47-965b-9fde048b735c" Oct 01 16:52:05 crc kubenswrapper[4869]: I1001 16:52:05.581136 4869 scope.go:117] "RemoveContainer" containerID="d9281499446ba735acc8b48aef748c3e2b86ec9baa190cbca8c27b3fd0bb1351" Oct 01 16:52:05 crc kubenswrapper[4869]: E1001 16:52:05.582406 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-c86m8_openshift-machine-config-operator(a4b64f7f-0b03-4f47-965b-9fde048b735c)\"" pod="openshift-machine-config-operator/machine-config-daemon-c86m8" podUID="a4b64f7f-0b03-4f47-965b-9fde048b735c" Oct 01 16:52:18 crc kubenswrapper[4869]: I1001 16:52:18.529015 4869 scope.go:117] "RemoveContainer" containerID="35902f3f330cd6553ac2526827df6f43059c1a52f18e92d2fd499969aa9157e4" Oct 01 16:52:18 crc kubenswrapper[4869]: I1001 16:52:18.553792 4869 scope.go:117] "RemoveContainer" containerID="a0476f20110c31a9b116de4907bc89402bb45e7eb74c2aae7e48a311fe6851ef" Oct 01 16:52:18 crc kubenswrapper[4869]: I1001 16:52:18.598288 4869 scope.go:117] "RemoveContainer" containerID="501fbc81fbeab211237ed5a753ba769cab49c9db71c34f4ad6774f90d8dd24b8" Oct 01 16:52:19 crc kubenswrapper[4869]: I1001 16:52:19.581777 4869 scope.go:117] "RemoveContainer" containerID="d9281499446ba735acc8b48aef748c3e2b86ec9baa190cbca8c27b3fd0bb1351" Oct 01 16:52:19 crc kubenswrapper[4869]: E1001 16:52:19.582671 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-c86m8_openshift-machine-config-operator(a4b64f7f-0b03-4f47-965b-9fde048b735c)\"" pod="openshift-machine-config-operator/machine-config-daemon-c86m8" podUID="a4b64f7f-0b03-4f47-965b-9fde048b735c" Oct 01 16:52:31 crc kubenswrapper[4869]: I1001 16:52:31.587065 4869 scope.go:117] "RemoveContainer" containerID="d9281499446ba735acc8b48aef748c3e2b86ec9baa190cbca8c27b3fd0bb1351" Oct 01 16:52:31 crc kubenswrapper[4869]: E1001 16:52:31.591512 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-c86m8_openshift-machine-config-operator(a4b64f7f-0b03-4f47-965b-9fde048b735c)\"" pod="openshift-machine-config-operator/machine-config-daemon-c86m8" podUID="a4b64f7f-0b03-4f47-965b-9fde048b735c" Oct 01 16:52:44 crc kubenswrapper[4869]: I1001 16:52:44.581798 4869 scope.go:117] "RemoveContainer" containerID="d9281499446ba735acc8b48aef748c3e2b86ec9baa190cbca8c27b3fd0bb1351" Oct 01 16:52:44 crc kubenswrapper[4869]: E1001 16:52:44.582764 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-c86m8_openshift-machine-config-operator(a4b64f7f-0b03-4f47-965b-9fde048b735c)\"" pod="openshift-machine-config-operator/machine-config-daemon-c86m8" podUID="a4b64f7f-0b03-4f47-965b-9fde048b735c" Oct 01 16:52:55 crc kubenswrapper[4869]: I1001 16:52:55.581672 4869 scope.go:117] "RemoveContainer" containerID="d9281499446ba735acc8b48aef748c3e2b86ec9baa190cbca8c27b3fd0bb1351" Oct 01 16:52:55 crc kubenswrapper[4869]: E1001 16:52:55.582661 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-c86m8_openshift-machine-config-operator(a4b64f7f-0b03-4f47-965b-9fde048b735c)\"" pod="openshift-machine-config-operator/machine-config-daemon-c86m8" podUID="a4b64f7f-0b03-4f47-965b-9fde048b735c" Oct 01 16:53:08 crc kubenswrapper[4869]: I1001 16:53:08.580390 4869 scope.go:117] "RemoveContainer" containerID="d9281499446ba735acc8b48aef748c3e2b86ec9baa190cbca8c27b3fd0bb1351" Oct 01 16:53:08 crc kubenswrapper[4869]: E1001 16:53:08.581131 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-c86m8_openshift-machine-config-operator(a4b64f7f-0b03-4f47-965b-9fde048b735c)\"" pod="openshift-machine-config-operator/machine-config-daemon-c86m8" podUID="a4b64f7f-0b03-4f47-965b-9fde048b735c" Oct 01 16:53:22 crc kubenswrapper[4869]: I1001 16:53:22.582117 4869 scope.go:117] "RemoveContainer" containerID="d9281499446ba735acc8b48aef748c3e2b86ec9baa190cbca8c27b3fd0bb1351" Oct 01 16:53:22 crc kubenswrapper[4869]: E1001 16:53:22.583435 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-c86m8_openshift-machine-config-operator(a4b64f7f-0b03-4f47-965b-9fde048b735c)\"" pod="openshift-machine-config-operator/machine-config-daemon-c86m8" podUID="a4b64f7f-0b03-4f47-965b-9fde048b735c" Oct 01 16:53:37 crc kubenswrapper[4869]: I1001 16:53:37.582845 4869 scope.go:117] "RemoveContainer" containerID="d9281499446ba735acc8b48aef748c3e2b86ec9baa190cbca8c27b3fd0bb1351" Oct 01 16:53:37 crc kubenswrapper[4869]: E1001 16:53:37.584211 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-c86m8_openshift-machine-config-operator(a4b64f7f-0b03-4f47-965b-9fde048b735c)\"" pod="openshift-machine-config-operator/machine-config-daemon-c86m8" podUID="a4b64f7f-0b03-4f47-965b-9fde048b735c" Oct 01 16:53:51 crc kubenswrapper[4869]: I1001 16:53:51.589080 4869 scope.go:117] "RemoveContainer" containerID="d9281499446ba735acc8b48aef748c3e2b86ec9baa190cbca8c27b3fd0bb1351" Oct 01 16:53:51 crc kubenswrapper[4869]: E1001 16:53:51.589920 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-c86m8_openshift-machine-config-operator(a4b64f7f-0b03-4f47-965b-9fde048b735c)\"" pod="openshift-machine-config-operator/machine-config-daemon-c86m8" podUID="a4b64f7f-0b03-4f47-965b-9fde048b735c" Oct 01 16:54:04 crc kubenswrapper[4869]: I1001 16:54:04.582078 4869 scope.go:117] "RemoveContainer" containerID="d9281499446ba735acc8b48aef748c3e2b86ec9baa190cbca8c27b3fd0bb1351" Oct 01 16:54:04 crc kubenswrapper[4869]: E1001 16:54:04.583447 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-c86m8_openshift-machine-config-operator(a4b64f7f-0b03-4f47-965b-9fde048b735c)\"" pod="openshift-machine-config-operator/machine-config-daemon-c86m8" podUID="a4b64f7f-0b03-4f47-965b-9fde048b735c" Oct 01 16:54:16 crc kubenswrapper[4869]: I1001 16:54:16.581459 4869 scope.go:117] "RemoveContainer" containerID="d9281499446ba735acc8b48aef748c3e2b86ec9baa190cbca8c27b3fd0bb1351" Oct 01 16:54:16 crc kubenswrapper[4869]: E1001 16:54:16.582385 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-c86m8_openshift-machine-config-operator(a4b64f7f-0b03-4f47-965b-9fde048b735c)\"" pod="openshift-machine-config-operator/machine-config-daemon-c86m8" podUID="a4b64f7f-0b03-4f47-965b-9fde048b735c" Oct 01 16:54:27 crc kubenswrapper[4869]: I1001 16:54:27.582035 4869 scope.go:117] "RemoveContainer" containerID="d9281499446ba735acc8b48aef748c3e2b86ec9baa190cbca8c27b3fd0bb1351" Oct 01 16:54:27 crc kubenswrapper[4869]: E1001 16:54:27.583654 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-c86m8_openshift-machine-config-operator(a4b64f7f-0b03-4f47-965b-9fde048b735c)\"" pod="openshift-machine-config-operator/machine-config-daemon-c86m8" podUID="a4b64f7f-0b03-4f47-965b-9fde048b735c" Oct 01 16:54:42 crc kubenswrapper[4869]: I1001 16:54:42.581042 4869 scope.go:117] "RemoveContainer" containerID="d9281499446ba735acc8b48aef748c3e2b86ec9baa190cbca8c27b3fd0bb1351" Oct 01 16:54:42 crc kubenswrapper[4869]: E1001 16:54:42.581904 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-c86m8_openshift-machine-config-operator(a4b64f7f-0b03-4f47-965b-9fde048b735c)\"" pod="openshift-machine-config-operator/machine-config-daemon-c86m8" podUID="a4b64f7f-0b03-4f47-965b-9fde048b735c" Oct 01 16:54:57 crc kubenswrapper[4869]: I1001 16:54:57.581710 4869 scope.go:117] "RemoveContainer" containerID="d9281499446ba735acc8b48aef748c3e2b86ec9baa190cbca8c27b3fd0bb1351" Oct 01 16:54:57 crc kubenswrapper[4869]: I1001 16:54:57.975828 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-c86m8" event={"ID":"a4b64f7f-0b03-4f47-965b-9fde048b735c","Type":"ContainerStarted","Data":"f2358938d07dae570aa0749158f73ada6c9c893bad82912fd6adaf5ef37d3137"} Oct 01 16:57:13 crc kubenswrapper[4869]: I1001 16:57:13.354004 4869 patch_prober.go:28] interesting pod/machine-config-daemon-c86m8 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 01 16:57:13 crc kubenswrapper[4869]: I1001 16:57:13.354554 4869 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-c86m8" podUID="a4b64f7f-0b03-4f47-965b-9fde048b735c" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 01 16:57:18 crc kubenswrapper[4869]: I1001 16:57:18.630610 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-bp6nw"] Oct 01 16:57:18 crc kubenswrapper[4869]: E1001 16:57:18.631521 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="306a7da6-413c-488c-b4da-5e00b113ac4b" containerName="registry-server" Oct 01 16:57:18 crc kubenswrapper[4869]: I1001 16:57:18.631536 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="306a7da6-413c-488c-b4da-5e00b113ac4b" containerName="registry-server" Oct 01 16:57:18 crc kubenswrapper[4869]: E1001 16:57:18.631558 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="306a7da6-413c-488c-b4da-5e00b113ac4b" containerName="extract-content" Oct 01 16:57:18 crc kubenswrapper[4869]: I1001 16:57:18.631565 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="306a7da6-413c-488c-b4da-5e00b113ac4b" containerName="extract-content" Oct 01 16:57:18 crc kubenswrapper[4869]: E1001 16:57:18.631577 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9b89155b-ec76-4078-afa0-6a66766726c1" containerName="registry-server" Oct 01 16:57:18 crc kubenswrapper[4869]: I1001 16:57:18.631585 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="9b89155b-ec76-4078-afa0-6a66766726c1" containerName="registry-server" Oct 01 16:57:18 crc kubenswrapper[4869]: E1001 16:57:18.631593 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9b89155b-ec76-4078-afa0-6a66766726c1" containerName="extract-utilities" Oct 01 16:57:18 crc kubenswrapper[4869]: I1001 16:57:18.631599 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="9b89155b-ec76-4078-afa0-6a66766726c1" containerName="extract-utilities" Oct 01 16:57:18 crc kubenswrapper[4869]: E1001 16:57:18.631613 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="306a7da6-413c-488c-b4da-5e00b113ac4b" containerName="extract-utilities" Oct 01 16:57:18 crc kubenswrapper[4869]: I1001 16:57:18.631620 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="306a7da6-413c-488c-b4da-5e00b113ac4b" containerName="extract-utilities" Oct 01 16:57:18 crc kubenswrapper[4869]: E1001 16:57:18.631636 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9b89155b-ec76-4078-afa0-6a66766726c1" containerName="extract-content" Oct 01 16:57:18 crc kubenswrapper[4869]: I1001 16:57:18.631642 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="9b89155b-ec76-4078-afa0-6a66766726c1" containerName="extract-content" Oct 01 16:57:18 crc kubenswrapper[4869]: I1001 16:57:18.631819 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="9b89155b-ec76-4078-afa0-6a66766726c1" containerName="registry-server" Oct 01 16:57:18 crc kubenswrapper[4869]: I1001 16:57:18.631840 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="306a7da6-413c-488c-b4da-5e00b113ac4b" containerName="registry-server" Oct 01 16:57:18 crc kubenswrapper[4869]: I1001 16:57:18.634493 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-bp6nw" Oct 01 16:57:18 crc kubenswrapper[4869]: I1001 16:57:18.658664 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-bp6nw"] Oct 01 16:57:18 crc kubenswrapper[4869]: I1001 16:57:18.777721 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fa97cf9b-d960-49b0-8332-17410c98c394-utilities\") pod \"redhat-operators-bp6nw\" (UID: \"fa97cf9b-d960-49b0-8332-17410c98c394\") " pod="openshift-marketplace/redhat-operators-bp6nw" Oct 01 16:57:18 crc kubenswrapper[4869]: I1001 16:57:18.777925 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fa97cf9b-d960-49b0-8332-17410c98c394-catalog-content\") pod \"redhat-operators-bp6nw\" (UID: \"fa97cf9b-d960-49b0-8332-17410c98c394\") " pod="openshift-marketplace/redhat-operators-bp6nw" Oct 01 16:57:18 crc kubenswrapper[4869]: I1001 16:57:18.778534 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ldjbz\" (UniqueName: \"kubernetes.io/projected/fa97cf9b-d960-49b0-8332-17410c98c394-kube-api-access-ldjbz\") pod \"redhat-operators-bp6nw\" (UID: \"fa97cf9b-d960-49b0-8332-17410c98c394\") " pod="openshift-marketplace/redhat-operators-bp6nw" Oct 01 16:57:18 crc kubenswrapper[4869]: I1001 16:57:18.880034 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fa97cf9b-d960-49b0-8332-17410c98c394-catalog-content\") pod \"redhat-operators-bp6nw\" (UID: \"fa97cf9b-d960-49b0-8332-17410c98c394\") " pod="openshift-marketplace/redhat-operators-bp6nw" Oct 01 16:57:18 crc kubenswrapper[4869]: I1001 16:57:18.880247 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ldjbz\" (UniqueName: \"kubernetes.io/projected/fa97cf9b-d960-49b0-8332-17410c98c394-kube-api-access-ldjbz\") pod \"redhat-operators-bp6nw\" (UID: \"fa97cf9b-d960-49b0-8332-17410c98c394\") " pod="openshift-marketplace/redhat-operators-bp6nw" Oct 01 16:57:18 crc kubenswrapper[4869]: I1001 16:57:18.880601 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fa97cf9b-d960-49b0-8332-17410c98c394-catalog-content\") pod \"redhat-operators-bp6nw\" (UID: \"fa97cf9b-d960-49b0-8332-17410c98c394\") " pod="openshift-marketplace/redhat-operators-bp6nw" Oct 01 16:57:18 crc kubenswrapper[4869]: I1001 16:57:18.880646 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fa97cf9b-d960-49b0-8332-17410c98c394-utilities\") pod \"redhat-operators-bp6nw\" (UID: \"fa97cf9b-d960-49b0-8332-17410c98c394\") " pod="openshift-marketplace/redhat-operators-bp6nw" Oct 01 16:57:18 crc kubenswrapper[4869]: I1001 16:57:18.880854 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fa97cf9b-d960-49b0-8332-17410c98c394-utilities\") pod \"redhat-operators-bp6nw\" (UID: \"fa97cf9b-d960-49b0-8332-17410c98c394\") " pod="openshift-marketplace/redhat-operators-bp6nw" Oct 01 16:57:18 crc kubenswrapper[4869]: I1001 16:57:18.899945 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ldjbz\" (UniqueName: \"kubernetes.io/projected/fa97cf9b-d960-49b0-8332-17410c98c394-kube-api-access-ldjbz\") pod \"redhat-operators-bp6nw\" (UID: \"fa97cf9b-d960-49b0-8332-17410c98c394\") " pod="openshift-marketplace/redhat-operators-bp6nw" Oct 01 16:57:18 crc kubenswrapper[4869]: I1001 16:57:18.953964 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-bp6nw" Oct 01 16:57:19 crc kubenswrapper[4869]: I1001 16:57:19.434642 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-bp6nw"] Oct 01 16:57:19 crc kubenswrapper[4869]: I1001 16:57:19.525669 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-bp6nw" event={"ID":"fa97cf9b-d960-49b0-8332-17410c98c394","Type":"ContainerStarted","Data":"455efdea04992685da207589084552228b45ad37888f60b027ff35a1ee34b241"} Oct 01 16:57:20 crc kubenswrapper[4869]: I1001 16:57:20.537983 4869 generic.go:334] "Generic (PLEG): container finished" podID="fa97cf9b-d960-49b0-8332-17410c98c394" containerID="aa3d22c97f687165679ab04d47acb773a221806be532147f26090a4c1715f6a5" exitCode=0 Oct 01 16:57:20 crc kubenswrapper[4869]: I1001 16:57:20.538247 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-bp6nw" event={"ID":"fa97cf9b-d960-49b0-8332-17410c98c394","Type":"ContainerDied","Data":"aa3d22c97f687165679ab04d47acb773a221806be532147f26090a4c1715f6a5"} Oct 01 16:57:20 crc kubenswrapper[4869]: I1001 16:57:20.541493 4869 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Oct 01 16:57:22 crc kubenswrapper[4869]: I1001 16:57:22.556398 4869 generic.go:334] "Generic (PLEG): container finished" podID="fa97cf9b-d960-49b0-8332-17410c98c394" containerID="46daa541a01a4f894a4efba9cda5fce4550205d46cb169d458f12329eafed91b" exitCode=0 Oct 01 16:57:22 crc kubenswrapper[4869]: I1001 16:57:22.556450 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-bp6nw" event={"ID":"fa97cf9b-d960-49b0-8332-17410c98c394","Type":"ContainerDied","Data":"46daa541a01a4f894a4efba9cda5fce4550205d46cb169d458f12329eafed91b"} Oct 01 16:57:23 crc kubenswrapper[4869]: I1001 16:57:23.570671 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-bp6nw" event={"ID":"fa97cf9b-d960-49b0-8332-17410c98c394","Type":"ContainerStarted","Data":"2600ce2a67047444f71e13ba4cb8222c42a8d2b71c3bc363d94bf3748fb80a76"} Oct 01 16:57:23 crc kubenswrapper[4869]: I1001 16:57:23.601663 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-bp6nw" podStartSLOduration=3.181742204 podStartE2EDuration="5.601641906s" podCreationTimestamp="2025-10-01 16:57:18 +0000 UTC" firstStartedPulling="2025-10-01 16:57:20.541223832 +0000 UTC m=+6749.688066948" lastFinishedPulling="2025-10-01 16:57:22.961123534 +0000 UTC m=+6752.107966650" observedRunningTime="2025-10-01 16:57:23.600644991 +0000 UTC m=+6752.747488117" watchObservedRunningTime="2025-10-01 16:57:23.601641906 +0000 UTC m=+6752.748485022" Oct 01 16:57:28 crc kubenswrapper[4869]: I1001 16:57:28.956066 4869 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-bp6nw" Oct 01 16:57:28 crc kubenswrapper[4869]: I1001 16:57:28.956773 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-bp6nw" Oct 01 16:57:29 crc kubenswrapper[4869]: I1001 16:57:29.028903 4869 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-bp6nw" Oct 01 16:57:29 crc kubenswrapper[4869]: I1001 16:57:29.684379 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-bp6nw" Oct 01 16:57:29 crc kubenswrapper[4869]: I1001 16:57:29.739881 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-bp6nw"] Oct 01 16:57:31 crc kubenswrapper[4869]: I1001 16:57:31.662293 4869 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-bp6nw" podUID="fa97cf9b-d960-49b0-8332-17410c98c394" containerName="registry-server" containerID="cri-o://2600ce2a67047444f71e13ba4cb8222c42a8d2b71c3bc363d94bf3748fb80a76" gracePeriod=2 Oct 01 16:57:32 crc kubenswrapper[4869]: I1001 16:57:32.239896 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-bp6nw" Oct 01 16:57:32 crc kubenswrapper[4869]: I1001 16:57:32.284326 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fa97cf9b-d960-49b0-8332-17410c98c394-utilities\") pod \"fa97cf9b-d960-49b0-8332-17410c98c394\" (UID: \"fa97cf9b-d960-49b0-8332-17410c98c394\") " Oct 01 16:57:32 crc kubenswrapper[4869]: I1001 16:57:32.284507 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fa97cf9b-d960-49b0-8332-17410c98c394-catalog-content\") pod \"fa97cf9b-d960-49b0-8332-17410c98c394\" (UID: \"fa97cf9b-d960-49b0-8332-17410c98c394\") " Oct 01 16:57:32 crc kubenswrapper[4869]: I1001 16:57:32.284600 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ldjbz\" (UniqueName: \"kubernetes.io/projected/fa97cf9b-d960-49b0-8332-17410c98c394-kube-api-access-ldjbz\") pod \"fa97cf9b-d960-49b0-8332-17410c98c394\" (UID: \"fa97cf9b-d960-49b0-8332-17410c98c394\") " Oct 01 16:57:32 crc kubenswrapper[4869]: I1001 16:57:32.287006 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/fa97cf9b-d960-49b0-8332-17410c98c394-utilities" (OuterVolumeSpecName: "utilities") pod "fa97cf9b-d960-49b0-8332-17410c98c394" (UID: "fa97cf9b-d960-49b0-8332-17410c98c394"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 16:57:32 crc kubenswrapper[4869]: I1001 16:57:32.294535 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fa97cf9b-d960-49b0-8332-17410c98c394-kube-api-access-ldjbz" (OuterVolumeSpecName: "kube-api-access-ldjbz") pod "fa97cf9b-d960-49b0-8332-17410c98c394" (UID: "fa97cf9b-d960-49b0-8332-17410c98c394"). InnerVolumeSpecName "kube-api-access-ldjbz". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 16:57:32 crc kubenswrapper[4869]: I1001 16:57:32.387255 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ldjbz\" (UniqueName: \"kubernetes.io/projected/fa97cf9b-d960-49b0-8332-17410c98c394-kube-api-access-ldjbz\") on node \"crc\" DevicePath \"\"" Oct 01 16:57:32 crc kubenswrapper[4869]: I1001 16:57:32.387570 4869 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fa97cf9b-d960-49b0-8332-17410c98c394-utilities\") on node \"crc\" DevicePath \"\"" Oct 01 16:57:32 crc kubenswrapper[4869]: I1001 16:57:32.396144 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/fa97cf9b-d960-49b0-8332-17410c98c394-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "fa97cf9b-d960-49b0-8332-17410c98c394" (UID: "fa97cf9b-d960-49b0-8332-17410c98c394"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 16:57:32 crc kubenswrapper[4869]: I1001 16:57:32.488573 4869 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fa97cf9b-d960-49b0-8332-17410c98c394-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 01 16:57:32 crc kubenswrapper[4869]: I1001 16:57:32.681770 4869 generic.go:334] "Generic (PLEG): container finished" podID="fa97cf9b-d960-49b0-8332-17410c98c394" containerID="2600ce2a67047444f71e13ba4cb8222c42a8d2b71c3bc363d94bf3748fb80a76" exitCode=0 Oct 01 16:57:32 crc kubenswrapper[4869]: I1001 16:57:32.681838 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-bp6nw" event={"ID":"fa97cf9b-d960-49b0-8332-17410c98c394","Type":"ContainerDied","Data":"2600ce2a67047444f71e13ba4cb8222c42a8d2b71c3bc363d94bf3748fb80a76"} Oct 01 16:57:32 crc kubenswrapper[4869]: I1001 16:57:32.681888 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-bp6nw" event={"ID":"fa97cf9b-d960-49b0-8332-17410c98c394","Type":"ContainerDied","Data":"455efdea04992685da207589084552228b45ad37888f60b027ff35a1ee34b241"} Oct 01 16:57:32 crc kubenswrapper[4869]: I1001 16:57:32.681898 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-bp6nw" Oct 01 16:57:32 crc kubenswrapper[4869]: I1001 16:57:32.681930 4869 scope.go:117] "RemoveContainer" containerID="2600ce2a67047444f71e13ba4cb8222c42a8d2b71c3bc363d94bf3748fb80a76" Oct 01 16:57:32 crc kubenswrapper[4869]: I1001 16:57:32.718117 4869 scope.go:117] "RemoveContainer" containerID="46daa541a01a4f894a4efba9cda5fce4550205d46cb169d458f12329eafed91b" Oct 01 16:57:32 crc kubenswrapper[4869]: I1001 16:57:32.775631 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-bp6nw"] Oct 01 16:57:32 crc kubenswrapper[4869]: I1001 16:57:32.779243 4869 scope.go:117] "RemoveContainer" containerID="aa3d22c97f687165679ab04d47acb773a221806be532147f26090a4c1715f6a5" Oct 01 16:57:32 crc kubenswrapper[4869]: I1001 16:57:32.792593 4869 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-bp6nw"] Oct 01 16:57:32 crc kubenswrapper[4869]: I1001 16:57:32.812305 4869 scope.go:117] "RemoveContainer" containerID="2600ce2a67047444f71e13ba4cb8222c42a8d2b71c3bc363d94bf3748fb80a76" Oct 01 16:57:32 crc kubenswrapper[4869]: E1001 16:57:32.812850 4869 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2600ce2a67047444f71e13ba4cb8222c42a8d2b71c3bc363d94bf3748fb80a76\": container with ID starting with 2600ce2a67047444f71e13ba4cb8222c42a8d2b71c3bc363d94bf3748fb80a76 not found: ID does not exist" containerID="2600ce2a67047444f71e13ba4cb8222c42a8d2b71c3bc363d94bf3748fb80a76" Oct 01 16:57:32 crc kubenswrapper[4869]: I1001 16:57:32.812891 4869 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2600ce2a67047444f71e13ba4cb8222c42a8d2b71c3bc363d94bf3748fb80a76"} err="failed to get container status \"2600ce2a67047444f71e13ba4cb8222c42a8d2b71c3bc363d94bf3748fb80a76\": rpc error: code = NotFound desc = could not find container \"2600ce2a67047444f71e13ba4cb8222c42a8d2b71c3bc363d94bf3748fb80a76\": container with ID starting with 2600ce2a67047444f71e13ba4cb8222c42a8d2b71c3bc363d94bf3748fb80a76 not found: ID does not exist" Oct 01 16:57:32 crc kubenswrapper[4869]: I1001 16:57:32.812939 4869 scope.go:117] "RemoveContainer" containerID="46daa541a01a4f894a4efba9cda5fce4550205d46cb169d458f12329eafed91b" Oct 01 16:57:32 crc kubenswrapper[4869]: E1001 16:57:32.813246 4869 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"46daa541a01a4f894a4efba9cda5fce4550205d46cb169d458f12329eafed91b\": container with ID starting with 46daa541a01a4f894a4efba9cda5fce4550205d46cb169d458f12329eafed91b not found: ID does not exist" containerID="46daa541a01a4f894a4efba9cda5fce4550205d46cb169d458f12329eafed91b" Oct 01 16:57:32 crc kubenswrapper[4869]: I1001 16:57:32.813378 4869 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"46daa541a01a4f894a4efba9cda5fce4550205d46cb169d458f12329eafed91b"} err="failed to get container status \"46daa541a01a4f894a4efba9cda5fce4550205d46cb169d458f12329eafed91b\": rpc error: code = NotFound desc = could not find container \"46daa541a01a4f894a4efba9cda5fce4550205d46cb169d458f12329eafed91b\": container with ID starting with 46daa541a01a4f894a4efba9cda5fce4550205d46cb169d458f12329eafed91b not found: ID does not exist" Oct 01 16:57:32 crc kubenswrapper[4869]: I1001 16:57:32.813397 4869 scope.go:117] "RemoveContainer" containerID="aa3d22c97f687165679ab04d47acb773a221806be532147f26090a4c1715f6a5" Oct 01 16:57:32 crc kubenswrapper[4869]: E1001 16:57:32.813863 4869 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"aa3d22c97f687165679ab04d47acb773a221806be532147f26090a4c1715f6a5\": container with ID starting with aa3d22c97f687165679ab04d47acb773a221806be532147f26090a4c1715f6a5 not found: ID does not exist" containerID="aa3d22c97f687165679ab04d47acb773a221806be532147f26090a4c1715f6a5" Oct 01 16:57:32 crc kubenswrapper[4869]: I1001 16:57:32.813912 4869 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"aa3d22c97f687165679ab04d47acb773a221806be532147f26090a4c1715f6a5"} err="failed to get container status \"aa3d22c97f687165679ab04d47acb773a221806be532147f26090a4c1715f6a5\": rpc error: code = NotFound desc = could not find container \"aa3d22c97f687165679ab04d47acb773a221806be532147f26090a4c1715f6a5\": container with ID starting with aa3d22c97f687165679ab04d47acb773a221806be532147f26090a4c1715f6a5 not found: ID does not exist" Oct 01 16:57:33 crc kubenswrapper[4869]: I1001 16:57:33.593998 4869 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fa97cf9b-d960-49b0-8332-17410c98c394" path="/var/lib/kubelet/pods/fa97cf9b-d960-49b0-8332-17410c98c394/volumes" Oct 01 16:57:43 crc kubenswrapper[4869]: I1001 16:57:43.354027 4869 patch_prober.go:28] interesting pod/machine-config-daemon-c86m8 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 01 16:57:43 crc kubenswrapper[4869]: I1001 16:57:43.354611 4869 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-c86m8" podUID="a4b64f7f-0b03-4f47-965b-9fde048b735c" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 01 16:58:13 crc kubenswrapper[4869]: I1001 16:58:13.354337 4869 patch_prober.go:28] interesting pod/machine-config-daemon-c86m8 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 01 16:58:13 crc kubenswrapper[4869]: I1001 16:58:13.355075 4869 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-c86m8" podUID="a4b64f7f-0b03-4f47-965b-9fde048b735c" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 01 16:58:13 crc kubenswrapper[4869]: I1001 16:58:13.355140 4869 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-c86m8" Oct 01 16:58:13 crc kubenswrapper[4869]: I1001 16:58:13.356170 4869 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"f2358938d07dae570aa0749158f73ada6c9c893bad82912fd6adaf5ef37d3137"} pod="openshift-machine-config-operator/machine-config-daemon-c86m8" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Oct 01 16:58:13 crc kubenswrapper[4869]: I1001 16:58:13.356303 4869 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-c86m8" podUID="a4b64f7f-0b03-4f47-965b-9fde048b735c" containerName="machine-config-daemon" containerID="cri-o://f2358938d07dae570aa0749158f73ada6c9c893bad82912fd6adaf5ef37d3137" gracePeriod=600 Oct 01 16:58:14 crc kubenswrapper[4869]: I1001 16:58:14.137459 4869 generic.go:334] "Generic (PLEG): container finished" podID="a4b64f7f-0b03-4f47-965b-9fde048b735c" containerID="f2358938d07dae570aa0749158f73ada6c9c893bad82912fd6adaf5ef37d3137" exitCode=0 Oct 01 16:58:14 crc kubenswrapper[4869]: I1001 16:58:14.137544 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-c86m8" event={"ID":"a4b64f7f-0b03-4f47-965b-9fde048b735c","Type":"ContainerDied","Data":"f2358938d07dae570aa0749158f73ada6c9c893bad82912fd6adaf5ef37d3137"} Oct 01 16:58:14 crc kubenswrapper[4869]: I1001 16:58:14.138253 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-c86m8" event={"ID":"a4b64f7f-0b03-4f47-965b-9fde048b735c","Type":"ContainerStarted","Data":"13e59c672543e057ea5825aec1ce436345edb91e96c5ed564629c2560312008b"} Oct 01 16:58:14 crc kubenswrapper[4869]: I1001 16:58:14.138287 4869 scope.go:117] "RemoveContainer" containerID="d9281499446ba735acc8b48aef748c3e2b86ec9baa190cbca8c27b3fd0bb1351" Oct 01 16:58:40 crc kubenswrapper[4869]: I1001 16:58:40.604011 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-l2n9x"] Oct 01 16:58:40 crc kubenswrapper[4869]: E1001 16:58:40.605145 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fa97cf9b-d960-49b0-8332-17410c98c394" containerName="registry-server" Oct 01 16:58:40 crc kubenswrapper[4869]: I1001 16:58:40.605161 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="fa97cf9b-d960-49b0-8332-17410c98c394" containerName="registry-server" Oct 01 16:58:40 crc kubenswrapper[4869]: E1001 16:58:40.605197 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fa97cf9b-d960-49b0-8332-17410c98c394" containerName="extract-utilities" Oct 01 16:58:40 crc kubenswrapper[4869]: I1001 16:58:40.605210 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="fa97cf9b-d960-49b0-8332-17410c98c394" containerName="extract-utilities" Oct 01 16:58:40 crc kubenswrapper[4869]: E1001 16:58:40.605232 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fa97cf9b-d960-49b0-8332-17410c98c394" containerName="extract-content" Oct 01 16:58:40 crc kubenswrapper[4869]: I1001 16:58:40.605240 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="fa97cf9b-d960-49b0-8332-17410c98c394" containerName="extract-content" Oct 01 16:58:40 crc kubenswrapper[4869]: I1001 16:58:40.605528 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="fa97cf9b-d960-49b0-8332-17410c98c394" containerName="registry-server" Oct 01 16:58:40 crc kubenswrapper[4869]: I1001 16:58:40.607385 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-l2n9x" Oct 01 16:58:40 crc kubenswrapper[4869]: I1001 16:58:40.629897 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-l2n9x"] Oct 01 16:58:40 crc kubenswrapper[4869]: I1001 16:58:40.779977 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3573a2fb-d139-41d2-bd5a-83e01bf68c5a-utilities\") pod \"community-operators-l2n9x\" (UID: \"3573a2fb-d139-41d2-bd5a-83e01bf68c5a\") " pod="openshift-marketplace/community-operators-l2n9x" Oct 01 16:58:40 crc kubenswrapper[4869]: I1001 16:58:40.780030 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3573a2fb-d139-41d2-bd5a-83e01bf68c5a-catalog-content\") pod \"community-operators-l2n9x\" (UID: \"3573a2fb-d139-41d2-bd5a-83e01bf68c5a\") " pod="openshift-marketplace/community-operators-l2n9x" Oct 01 16:58:40 crc kubenswrapper[4869]: I1001 16:58:40.780057 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-s2mz2\" (UniqueName: \"kubernetes.io/projected/3573a2fb-d139-41d2-bd5a-83e01bf68c5a-kube-api-access-s2mz2\") pod \"community-operators-l2n9x\" (UID: \"3573a2fb-d139-41d2-bd5a-83e01bf68c5a\") " pod="openshift-marketplace/community-operators-l2n9x" Oct 01 16:58:40 crc kubenswrapper[4869]: I1001 16:58:40.881924 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3573a2fb-d139-41d2-bd5a-83e01bf68c5a-utilities\") pod \"community-operators-l2n9x\" (UID: \"3573a2fb-d139-41d2-bd5a-83e01bf68c5a\") " pod="openshift-marketplace/community-operators-l2n9x" Oct 01 16:58:40 crc kubenswrapper[4869]: I1001 16:58:40.882207 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3573a2fb-d139-41d2-bd5a-83e01bf68c5a-catalog-content\") pod \"community-operators-l2n9x\" (UID: \"3573a2fb-d139-41d2-bd5a-83e01bf68c5a\") " pod="openshift-marketplace/community-operators-l2n9x" Oct 01 16:58:40 crc kubenswrapper[4869]: I1001 16:58:40.882234 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2mz2\" (UniqueName: \"kubernetes.io/projected/3573a2fb-d139-41d2-bd5a-83e01bf68c5a-kube-api-access-s2mz2\") pod \"community-operators-l2n9x\" (UID: \"3573a2fb-d139-41d2-bd5a-83e01bf68c5a\") " pod="openshift-marketplace/community-operators-l2n9x" Oct 01 16:58:40 crc kubenswrapper[4869]: I1001 16:58:40.882539 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3573a2fb-d139-41d2-bd5a-83e01bf68c5a-catalog-content\") pod \"community-operators-l2n9x\" (UID: \"3573a2fb-d139-41d2-bd5a-83e01bf68c5a\") " pod="openshift-marketplace/community-operators-l2n9x" Oct 01 16:58:40 crc kubenswrapper[4869]: I1001 16:58:40.882770 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3573a2fb-d139-41d2-bd5a-83e01bf68c5a-utilities\") pod \"community-operators-l2n9x\" (UID: \"3573a2fb-d139-41d2-bd5a-83e01bf68c5a\") " pod="openshift-marketplace/community-operators-l2n9x" Oct 01 16:58:40 crc kubenswrapper[4869]: I1001 16:58:40.906923 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s2mz2\" (UniqueName: \"kubernetes.io/projected/3573a2fb-d139-41d2-bd5a-83e01bf68c5a-kube-api-access-s2mz2\") pod \"community-operators-l2n9x\" (UID: \"3573a2fb-d139-41d2-bd5a-83e01bf68c5a\") " pod="openshift-marketplace/community-operators-l2n9x" Oct 01 16:58:40 crc kubenswrapper[4869]: I1001 16:58:40.935955 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-l2n9x" Oct 01 16:58:41 crc kubenswrapper[4869]: I1001 16:58:41.445665 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-l2n9x"] Oct 01 16:58:42 crc kubenswrapper[4869]: I1001 16:58:42.422458 4869 generic.go:334] "Generic (PLEG): container finished" podID="3573a2fb-d139-41d2-bd5a-83e01bf68c5a" containerID="124edf377cd273af80d282c4e7f2a21bcb3aaf30f9d0c56f6ab91e67efd5c2cd" exitCode=0 Oct 01 16:58:42 crc kubenswrapper[4869]: I1001 16:58:42.422554 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-l2n9x" event={"ID":"3573a2fb-d139-41d2-bd5a-83e01bf68c5a","Type":"ContainerDied","Data":"124edf377cd273af80d282c4e7f2a21bcb3aaf30f9d0c56f6ab91e67efd5c2cd"} Oct 01 16:58:42 crc kubenswrapper[4869]: I1001 16:58:42.423238 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-l2n9x" event={"ID":"3573a2fb-d139-41d2-bd5a-83e01bf68c5a","Type":"ContainerStarted","Data":"1ff89ea8ea5d40a851e468e1485d09532828e9550e08c2901d0182475bf3459f"} Oct 01 16:58:44 crc kubenswrapper[4869]: I1001 16:58:44.453242 4869 generic.go:334] "Generic (PLEG): container finished" podID="3573a2fb-d139-41d2-bd5a-83e01bf68c5a" containerID="95c6ca022457c8944c37d2680765ba1d2db32c75f7c923004bc6ebd54df26ac1" exitCode=0 Oct 01 16:58:44 crc kubenswrapper[4869]: I1001 16:58:44.453462 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-l2n9x" event={"ID":"3573a2fb-d139-41d2-bd5a-83e01bf68c5a","Type":"ContainerDied","Data":"95c6ca022457c8944c37d2680765ba1d2db32c75f7c923004bc6ebd54df26ac1"} Oct 01 16:58:45 crc kubenswrapper[4869]: I1001 16:58:45.462578 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-l2n9x" event={"ID":"3573a2fb-d139-41d2-bd5a-83e01bf68c5a","Type":"ContainerStarted","Data":"28cbcf7826fe218edeb2a3d34a95babe8db818c59d8517a8eb9845c7ce124dc7"} Oct 01 16:58:45 crc kubenswrapper[4869]: I1001 16:58:45.496647 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-l2n9x" podStartSLOduration=3.058287927 podStartE2EDuration="5.496623646s" podCreationTimestamp="2025-10-01 16:58:40 +0000 UTC" firstStartedPulling="2025-10-01 16:58:42.425545571 +0000 UTC m=+6831.572388687" lastFinishedPulling="2025-10-01 16:58:44.86388128 +0000 UTC m=+6834.010724406" observedRunningTime="2025-10-01 16:58:45.485399272 +0000 UTC m=+6834.632242398" watchObservedRunningTime="2025-10-01 16:58:45.496623646 +0000 UTC m=+6834.643466782" Oct 01 16:58:50 crc kubenswrapper[4869]: I1001 16:58:50.936700 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-l2n9x" Oct 01 16:58:50 crc kubenswrapper[4869]: I1001 16:58:50.937349 4869 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-l2n9x" Oct 01 16:58:51 crc kubenswrapper[4869]: I1001 16:58:51.009643 4869 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-l2n9x" Oct 01 16:58:51 crc kubenswrapper[4869]: I1001 16:58:51.631137 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-l2n9x" Oct 01 16:58:51 crc kubenswrapper[4869]: I1001 16:58:51.686730 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-l2n9x"] Oct 01 16:58:53 crc kubenswrapper[4869]: I1001 16:58:53.562722 4869 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-l2n9x" podUID="3573a2fb-d139-41d2-bd5a-83e01bf68c5a" containerName="registry-server" containerID="cri-o://28cbcf7826fe218edeb2a3d34a95babe8db818c59d8517a8eb9845c7ce124dc7" gracePeriod=2 Oct 01 16:58:54 crc kubenswrapper[4869]: I1001 16:58:54.110634 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-l2n9x" Oct 01 16:58:54 crc kubenswrapper[4869]: I1001 16:58:54.292545 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3573a2fb-d139-41d2-bd5a-83e01bf68c5a-catalog-content\") pod \"3573a2fb-d139-41d2-bd5a-83e01bf68c5a\" (UID: \"3573a2fb-d139-41d2-bd5a-83e01bf68c5a\") " Oct 01 16:58:54 crc kubenswrapper[4869]: I1001 16:58:54.292745 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3573a2fb-d139-41d2-bd5a-83e01bf68c5a-utilities\") pod \"3573a2fb-d139-41d2-bd5a-83e01bf68c5a\" (UID: \"3573a2fb-d139-41d2-bd5a-83e01bf68c5a\") " Oct 01 16:58:54 crc kubenswrapper[4869]: I1001 16:58:54.292799 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-s2mz2\" (UniqueName: \"kubernetes.io/projected/3573a2fb-d139-41d2-bd5a-83e01bf68c5a-kube-api-access-s2mz2\") pod \"3573a2fb-d139-41d2-bd5a-83e01bf68c5a\" (UID: \"3573a2fb-d139-41d2-bd5a-83e01bf68c5a\") " Oct 01 16:58:54 crc kubenswrapper[4869]: I1001 16:58:54.294216 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/3573a2fb-d139-41d2-bd5a-83e01bf68c5a-utilities" (OuterVolumeSpecName: "utilities") pod "3573a2fb-d139-41d2-bd5a-83e01bf68c5a" (UID: "3573a2fb-d139-41d2-bd5a-83e01bf68c5a"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 16:58:54 crc kubenswrapper[4869]: I1001 16:58:54.301064 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3573a2fb-d139-41d2-bd5a-83e01bf68c5a-kube-api-access-s2mz2" (OuterVolumeSpecName: "kube-api-access-s2mz2") pod "3573a2fb-d139-41d2-bd5a-83e01bf68c5a" (UID: "3573a2fb-d139-41d2-bd5a-83e01bf68c5a"). InnerVolumeSpecName "kube-api-access-s2mz2". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 16:58:54 crc kubenswrapper[4869]: I1001 16:58:54.395980 4869 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3573a2fb-d139-41d2-bd5a-83e01bf68c5a-utilities\") on node \"crc\" DevicePath \"\"" Oct 01 16:58:54 crc kubenswrapper[4869]: I1001 16:58:54.396037 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-s2mz2\" (UniqueName: \"kubernetes.io/projected/3573a2fb-d139-41d2-bd5a-83e01bf68c5a-kube-api-access-s2mz2\") on node \"crc\" DevicePath \"\"" Oct 01 16:58:54 crc kubenswrapper[4869]: I1001 16:58:54.555184 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/3573a2fb-d139-41d2-bd5a-83e01bf68c5a-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "3573a2fb-d139-41d2-bd5a-83e01bf68c5a" (UID: "3573a2fb-d139-41d2-bd5a-83e01bf68c5a"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 16:58:54 crc kubenswrapper[4869]: I1001 16:58:54.575653 4869 generic.go:334] "Generic (PLEG): container finished" podID="3573a2fb-d139-41d2-bd5a-83e01bf68c5a" containerID="28cbcf7826fe218edeb2a3d34a95babe8db818c59d8517a8eb9845c7ce124dc7" exitCode=0 Oct 01 16:58:54 crc kubenswrapper[4869]: I1001 16:58:54.575700 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-l2n9x" event={"ID":"3573a2fb-d139-41d2-bd5a-83e01bf68c5a","Type":"ContainerDied","Data":"28cbcf7826fe218edeb2a3d34a95babe8db818c59d8517a8eb9845c7ce124dc7"} Oct 01 16:58:54 crc kubenswrapper[4869]: I1001 16:58:54.575719 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-l2n9x" Oct 01 16:58:54 crc kubenswrapper[4869]: I1001 16:58:54.575736 4869 scope.go:117] "RemoveContainer" containerID="28cbcf7826fe218edeb2a3d34a95babe8db818c59d8517a8eb9845c7ce124dc7" Oct 01 16:58:54 crc kubenswrapper[4869]: I1001 16:58:54.575726 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-l2n9x" event={"ID":"3573a2fb-d139-41d2-bd5a-83e01bf68c5a","Type":"ContainerDied","Data":"1ff89ea8ea5d40a851e468e1485d09532828e9550e08c2901d0182475bf3459f"} Oct 01 16:58:54 crc kubenswrapper[4869]: I1001 16:58:54.601382 4869 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3573a2fb-d139-41d2-bd5a-83e01bf68c5a-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 01 16:58:54 crc kubenswrapper[4869]: I1001 16:58:54.614385 4869 scope.go:117] "RemoveContainer" containerID="95c6ca022457c8944c37d2680765ba1d2db32c75f7c923004bc6ebd54df26ac1" Oct 01 16:58:54 crc kubenswrapper[4869]: I1001 16:58:54.623426 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-l2n9x"] Oct 01 16:58:54 crc kubenswrapper[4869]: I1001 16:58:54.634453 4869 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-l2n9x"] Oct 01 16:58:54 crc kubenswrapper[4869]: I1001 16:58:54.639741 4869 scope.go:117] "RemoveContainer" containerID="124edf377cd273af80d282c4e7f2a21bcb3aaf30f9d0c56f6ab91e67efd5c2cd" Oct 01 16:58:54 crc kubenswrapper[4869]: I1001 16:58:54.673798 4869 scope.go:117] "RemoveContainer" containerID="28cbcf7826fe218edeb2a3d34a95babe8db818c59d8517a8eb9845c7ce124dc7" Oct 01 16:58:54 crc kubenswrapper[4869]: E1001 16:58:54.674296 4869 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"28cbcf7826fe218edeb2a3d34a95babe8db818c59d8517a8eb9845c7ce124dc7\": container with ID starting with 28cbcf7826fe218edeb2a3d34a95babe8db818c59d8517a8eb9845c7ce124dc7 not found: ID does not exist" containerID="28cbcf7826fe218edeb2a3d34a95babe8db818c59d8517a8eb9845c7ce124dc7" Oct 01 16:58:54 crc kubenswrapper[4869]: I1001 16:58:54.674352 4869 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"28cbcf7826fe218edeb2a3d34a95babe8db818c59d8517a8eb9845c7ce124dc7"} err="failed to get container status \"28cbcf7826fe218edeb2a3d34a95babe8db818c59d8517a8eb9845c7ce124dc7\": rpc error: code = NotFound desc = could not find container \"28cbcf7826fe218edeb2a3d34a95babe8db818c59d8517a8eb9845c7ce124dc7\": container with ID starting with 28cbcf7826fe218edeb2a3d34a95babe8db818c59d8517a8eb9845c7ce124dc7 not found: ID does not exist" Oct 01 16:58:54 crc kubenswrapper[4869]: I1001 16:58:54.674384 4869 scope.go:117] "RemoveContainer" containerID="95c6ca022457c8944c37d2680765ba1d2db32c75f7c923004bc6ebd54df26ac1" Oct 01 16:58:54 crc kubenswrapper[4869]: E1001 16:58:54.674795 4869 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"95c6ca022457c8944c37d2680765ba1d2db32c75f7c923004bc6ebd54df26ac1\": container with ID starting with 95c6ca022457c8944c37d2680765ba1d2db32c75f7c923004bc6ebd54df26ac1 not found: ID does not exist" containerID="95c6ca022457c8944c37d2680765ba1d2db32c75f7c923004bc6ebd54df26ac1" Oct 01 16:58:54 crc kubenswrapper[4869]: I1001 16:58:54.674815 4869 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"95c6ca022457c8944c37d2680765ba1d2db32c75f7c923004bc6ebd54df26ac1"} err="failed to get container status \"95c6ca022457c8944c37d2680765ba1d2db32c75f7c923004bc6ebd54df26ac1\": rpc error: code = NotFound desc = could not find container \"95c6ca022457c8944c37d2680765ba1d2db32c75f7c923004bc6ebd54df26ac1\": container with ID starting with 95c6ca022457c8944c37d2680765ba1d2db32c75f7c923004bc6ebd54df26ac1 not found: ID does not exist" Oct 01 16:58:54 crc kubenswrapper[4869]: I1001 16:58:54.674828 4869 scope.go:117] "RemoveContainer" containerID="124edf377cd273af80d282c4e7f2a21bcb3aaf30f9d0c56f6ab91e67efd5c2cd" Oct 01 16:58:54 crc kubenswrapper[4869]: E1001 16:58:54.675100 4869 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"124edf377cd273af80d282c4e7f2a21bcb3aaf30f9d0c56f6ab91e67efd5c2cd\": container with ID starting with 124edf377cd273af80d282c4e7f2a21bcb3aaf30f9d0c56f6ab91e67efd5c2cd not found: ID does not exist" containerID="124edf377cd273af80d282c4e7f2a21bcb3aaf30f9d0c56f6ab91e67efd5c2cd" Oct 01 16:58:54 crc kubenswrapper[4869]: I1001 16:58:54.675122 4869 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"124edf377cd273af80d282c4e7f2a21bcb3aaf30f9d0c56f6ab91e67efd5c2cd"} err="failed to get container status \"124edf377cd273af80d282c4e7f2a21bcb3aaf30f9d0c56f6ab91e67efd5c2cd\": rpc error: code = NotFound desc = could not find container \"124edf377cd273af80d282c4e7f2a21bcb3aaf30f9d0c56f6ab91e67efd5c2cd\": container with ID starting with 124edf377cd273af80d282c4e7f2a21bcb3aaf30f9d0c56f6ab91e67efd5c2cd not found: ID does not exist" Oct 01 16:58:55 crc kubenswrapper[4869]: I1001 16:58:55.603634 4869 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3573a2fb-d139-41d2-bd5a-83e01bf68c5a" path="/var/lib/kubelet/pods/3573a2fb-d139-41d2-bd5a-83e01bf68c5a/volumes" Oct 01 17:00:00 crc kubenswrapper[4869]: I1001 17:00:00.216953 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29322300-2pbjk"] Oct 01 17:00:00 crc kubenswrapper[4869]: E1001 17:00:00.218094 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3573a2fb-d139-41d2-bd5a-83e01bf68c5a" containerName="extract-content" Oct 01 17:00:00 crc kubenswrapper[4869]: I1001 17:00:00.218111 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="3573a2fb-d139-41d2-bd5a-83e01bf68c5a" containerName="extract-content" Oct 01 17:00:00 crc kubenswrapper[4869]: E1001 17:00:00.218148 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3573a2fb-d139-41d2-bd5a-83e01bf68c5a" containerName="registry-server" Oct 01 17:00:00 crc kubenswrapper[4869]: I1001 17:00:00.218156 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="3573a2fb-d139-41d2-bd5a-83e01bf68c5a" containerName="registry-server" Oct 01 17:00:00 crc kubenswrapper[4869]: E1001 17:00:00.218174 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3573a2fb-d139-41d2-bd5a-83e01bf68c5a" containerName="extract-utilities" Oct 01 17:00:00 crc kubenswrapper[4869]: I1001 17:00:00.218184 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="3573a2fb-d139-41d2-bd5a-83e01bf68c5a" containerName="extract-utilities" Oct 01 17:00:00 crc kubenswrapper[4869]: I1001 17:00:00.218412 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="3573a2fb-d139-41d2-bd5a-83e01bf68c5a" containerName="registry-server" Oct 01 17:00:00 crc kubenswrapper[4869]: I1001 17:00:00.219187 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29322300-2pbjk" Oct 01 17:00:00 crc kubenswrapper[4869]: I1001 17:00:00.221738 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Oct 01 17:00:00 crc kubenswrapper[4869]: I1001 17:00:00.221750 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Oct 01 17:00:00 crc kubenswrapper[4869]: I1001 17:00:00.224799 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29322300-2pbjk"] Oct 01 17:00:00 crc kubenswrapper[4869]: I1001 17:00:00.358451 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9xb6m\" (UniqueName: \"kubernetes.io/projected/d71ab737-0511-4685-bd12-412d851b1594-kube-api-access-9xb6m\") pod \"collect-profiles-29322300-2pbjk\" (UID: \"d71ab737-0511-4685-bd12-412d851b1594\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29322300-2pbjk" Oct 01 17:00:00 crc kubenswrapper[4869]: I1001 17:00:00.358514 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/d71ab737-0511-4685-bd12-412d851b1594-config-volume\") pod \"collect-profiles-29322300-2pbjk\" (UID: \"d71ab737-0511-4685-bd12-412d851b1594\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29322300-2pbjk" Oct 01 17:00:00 crc kubenswrapper[4869]: I1001 17:00:00.358683 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/d71ab737-0511-4685-bd12-412d851b1594-secret-volume\") pod \"collect-profiles-29322300-2pbjk\" (UID: \"d71ab737-0511-4685-bd12-412d851b1594\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29322300-2pbjk" Oct 01 17:00:00 crc kubenswrapper[4869]: I1001 17:00:00.460971 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9xb6m\" (UniqueName: \"kubernetes.io/projected/d71ab737-0511-4685-bd12-412d851b1594-kube-api-access-9xb6m\") pod \"collect-profiles-29322300-2pbjk\" (UID: \"d71ab737-0511-4685-bd12-412d851b1594\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29322300-2pbjk" Oct 01 17:00:00 crc kubenswrapper[4869]: I1001 17:00:00.461036 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/d71ab737-0511-4685-bd12-412d851b1594-config-volume\") pod \"collect-profiles-29322300-2pbjk\" (UID: \"d71ab737-0511-4685-bd12-412d851b1594\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29322300-2pbjk" Oct 01 17:00:00 crc kubenswrapper[4869]: I1001 17:00:00.461083 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/d71ab737-0511-4685-bd12-412d851b1594-secret-volume\") pod \"collect-profiles-29322300-2pbjk\" (UID: \"d71ab737-0511-4685-bd12-412d851b1594\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29322300-2pbjk" Oct 01 17:00:00 crc kubenswrapper[4869]: I1001 17:00:00.462812 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/d71ab737-0511-4685-bd12-412d851b1594-config-volume\") pod \"collect-profiles-29322300-2pbjk\" (UID: \"d71ab737-0511-4685-bd12-412d851b1594\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29322300-2pbjk" Oct 01 17:00:00 crc kubenswrapper[4869]: I1001 17:00:00.469168 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/d71ab737-0511-4685-bd12-412d851b1594-secret-volume\") pod \"collect-profiles-29322300-2pbjk\" (UID: \"d71ab737-0511-4685-bd12-412d851b1594\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29322300-2pbjk" Oct 01 17:00:00 crc kubenswrapper[4869]: I1001 17:00:00.479187 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9xb6m\" (UniqueName: \"kubernetes.io/projected/d71ab737-0511-4685-bd12-412d851b1594-kube-api-access-9xb6m\") pod \"collect-profiles-29322300-2pbjk\" (UID: \"d71ab737-0511-4685-bd12-412d851b1594\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29322300-2pbjk" Oct 01 17:00:00 crc kubenswrapper[4869]: I1001 17:00:00.553106 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29322300-2pbjk" Oct 01 17:00:01 crc kubenswrapper[4869]: I1001 17:00:01.085384 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29322300-2pbjk"] Oct 01 17:00:01 crc kubenswrapper[4869]: I1001 17:00:01.349618 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29322300-2pbjk" event={"ID":"d71ab737-0511-4685-bd12-412d851b1594","Type":"ContainerStarted","Data":"ba07d16bdd117a528fd25f5bd3e423d0172c0be7fe3588db3cc9a6b2025d73e0"} Oct 01 17:00:01 crc kubenswrapper[4869]: I1001 17:00:01.349998 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29322300-2pbjk" event={"ID":"d71ab737-0511-4685-bd12-412d851b1594","Type":"ContainerStarted","Data":"7292c5707d3dd7024756d01dd7df759c6ef15120e0839ae39780e9644e71df98"} Oct 01 17:00:01 crc kubenswrapper[4869]: I1001 17:00:01.377569 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/collect-profiles-29322300-2pbjk" podStartSLOduration=1.377543389 podStartE2EDuration="1.377543389s" podCreationTimestamp="2025-10-01 17:00:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-01 17:00:01.364835567 +0000 UTC m=+6910.511678683" watchObservedRunningTime="2025-10-01 17:00:01.377543389 +0000 UTC m=+6910.524386505" Oct 01 17:00:02 crc kubenswrapper[4869]: I1001 17:00:02.362578 4869 generic.go:334] "Generic (PLEG): container finished" podID="d71ab737-0511-4685-bd12-412d851b1594" containerID="ba07d16bdd117a528fd25f5bd3e423d0172c0be7fe3588db3cc9a6b2025d73e0" exitCode=0 Oct 01 17:00:02 crc kubenswrapper[4869]: I1001 17:00:02.362630 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29322300-2pbjk" event={"ID":"d71ab737-0511-4685-bd12-412d851b1594","Type":"ContainerDied","Data":"ba07d16bdd117a528fd25f5bd3e423d0172c0be7fe3588db3cc9a6b2025d73e0"} Oct 01 17:00:03 crc kubenswrapper[4869]: I1001 17:00:03.783727 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29322300-2pbjk" Oct 01 17:00:03 crc kubenswrapper[4869]: I1001 17:00:03.936958 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/d71ab737-0511-4685-bd12-412d851b1594-config-volume\") pod \"d71ab737-0511-4685-bd12-412d851b1594\" (UID: \"d71ab737-0511-4685-bd12-412d851b1594\") " Oct 01 17:00:03 crc kubenswrapper[4869]: I1001 17:00:03.937173 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/d71ab737-0511-4685-bd12-412d851b1594-secret-volume\") pod \"d71ab737-0511-4685-bd12-412d851b1594\" (UID: \"d71ab737-0511-4685-bd12-412d851b1594\") " Oct 01 17:00:03 crc kubenswrapper[4869]: I1001 17:00:03.937471 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9xb6m\" (UniqueName: \"kubernetes.io/projected/d71ab737-0511-4685-bd12-412d851b1594-kube-api-access-9xb6m\") pod \"d71ab737-0511-4685-bd12-412d851b1594\" (UID: \"d71ab737-0511-4685-bd12-412d851b1594\") " Oct 01 17:00:03 crc kubenswrapper[4869]: I1001 17:00:03.937945 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d71ab737-0511-4685-bd12-412d851b1594-config-volume" (OuterVolumeSpecName: "config-volume") pod "d71ab737-0511-4685-bd12-412d851b1594" (UID: "d71ab737-0511-4685-bd12-412d851b1594"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 17:00:03 crc kubenswrapper[4869]: I1001 17:00:03.938177 4869 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/d71ab737-0511-4685-bd12-412d851b1594-config-volume\") on node \"crc\" DevicePath \"\"" Oct 01 17:00:03 crc kubenswrapper[4869]: I1001 17:00:03.942616 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d71ab737-0511-4685-bd12-412d851b1594-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "d71ab737-0511-4685-bd12-412d851b1594" (UID: "d71ab737-0511-4685-bd12-412d851b1594"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 17:00:03 crc kubenswrapper[4869]: I1001 17:00:03.943063 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d71ab737-0511-4685-bd12-412d851b1594-kube-api-access-9xb6m" (OuterVolumeSpecName: "kube-api-access-9xb6m") pod "d71ab737-0511-4685-bd12-412d851b1594" (UID: "d71ab737-0511-4685-bd12-412d851b1594"). InnerVolumeSpecName "kube-api-access-9xb6m". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 17:00:04 crc kubenswrapper[4869]: I1001 17:00:04.042852 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9xb6m\" (UniqueName: \"kubernetes.io/projected/d71ab737-0511-4685-bd12-412d851b1594-kube-api-access-9xb6m\") on node \"crc\" DevicePath \"\"" Oct 01 17:00:04 crc kubenswrapper[4869]: I1001 17:00:04.042891 4869 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/d71ab737-0511-4685-bd12-412d851b1594-secret-volume\") on node \"crc\" DevicePath \"\"" Oct 01 17:00:04 crc kubenswrapper[4869]: I1001 17:00:04.384355 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29322300-2pbjk" event={"ID":"d71ab737-0511-4685-bd12-412d851b1594","Type":"ContainerDied","Data":"7292c5707d3dd7024756d01dd7df759c6ef15120e0839ae39780e9644e71df98"} Oct 01 17:00:04 crc kubenswrapper[4869]: I1001 17:00:04.384983 4869 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="7292c5707d3dd7024756d01dd7df759c6ef15120e0839ae39780e9644e71df98" Oct 01 17:00:04 crc kubenswrapper[4869]: I1001 17:00:04.384421 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29322300-2pbjk" Oct 01 17:00:04 crc kubenswrapper[4869]: I1001 17:00:04.510490 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29322255-pnzkx"] Oct 01 17:00:04 crc kubenswrapper[4869]: I1001 17:00:04.520359 4869 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29322255-pnzkx"] Oct 01 17:00:05 crc kubenswrapper[4869]: I1001 17:00:05.596401 4869 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4c7e9681-1077-4754-b7bb-a8ebb308ed4e" path="/var/lib/kubelet/pods/4c7e9681-1077-4754-b7bb-a8ebb308ed4e/volumes" Oct 01 17:00:13 crc kubenswrapper[4869]: I1001 17:00:13.354358 4869 patch_prober.go:28] interesting pod/machine-config-daemon-c86m8 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 01 17:00:13 crc kubenswrapper[4869]: I1001 17:00:13.355141 4869 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-c86m8" podUID="a4b64f7f-0b03-4f47-965b-9fde048b735c" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 01 17:00:18 crc kubenswrapper[4869]: I1001 17:00:18.857323 4869 scope.go:117] "RemoveContainer" containerID="0f214d87a1cb3868b9ece6047f64e3f16ba06fb91650611da15d9f47f4bfd521" Oct 01 17:00:43 crc kubenswrapper[4869]: I1001 17:00:43.353973 4869 patch_prober.go:28] interesting pod/machine-config-daemon-c86m8 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 01 17:00:43 crc kubenswrapper[4869]: I1001 17:00:43.354747 4869 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-c86m8" podUID="a4b64f7f-0b03-4f47-965b-9fde048b735c" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 01 17:01:00 crc kubenswrapper[4869]: I1001 17:01:00.170406 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-cron-29322301-hpmjg"] Oct 01 17:01:00 crc kubenswrapper[4869]: E1001 17:01:00.171559 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d71ab737-0511-4685-bd12-412d851b1594" containerName="collect-profiles" Oct 01 17:01:00 crc kubenswrapper[4869]: I1001 17:01:00.171576 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="d71ab737-0511-4685-bd12-412d851b1594" containerName="collect-profiles" Oct 01 17:01:00 crc kubenswrapper[4869]: I1001 17:01:00.171778 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="d71ab737-0511-4685-bd12-412d851b1594" containerName="collect-profiles" Oct 01 17:01:00 crc kubenswrapper[4869]: I1001 17:01:00.172387 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-cron-29322301-hpmjg" Oct 01 17:01:00 crc kubenswrapper[4869]: I1001 17:01:00.189550 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-cron-29322301-hpmjg"] Oct 01 17:01:00 crc kubenswrapper[4869]: I1001 17:01:00.317879 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-z5d5v\" (UniqueName: \"kubernetes.io/projected/701f9b68-20f9-4937-8541-8e2f104908ef-kube-api-access-z5d5v\") pod \"keystone-cron-29322301-hpmjg\" (UID: \"701f9b68-20f9-4937-8541-8e2f104908ef\") " pod="openstack/keystone-cron-29322301-hpmjg" Oct 01 17:01:00 crc kubenswrapper[4869]: I1001 17:01:00.318009 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/701f9b68-20f9-4937-8541-8e2f104908ef-combined-ca-bundle\") pod \"keystone-cron-29322301-hpmjg\" (UID: \"701f9b68-20f9-4937-8541-8e2f104908ef\") " pod="openstack/keystone-cron-29322301-hpmjg" Oct 01 17:01:00 crc kubenswrapper[4869]: I1001 17:01:00.318039 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/701f9b68-20f9-4937-8541-8e2f104908ef-config-data\") pod \"keystone-cron-29322301-hpmjg\" (UID: \"701f9b68-20f9-4937-8541-8e2f104908ef\") " pod="openstack/keystone-cron-29322301-hpmjg" Oct 01 17:01:00 crc kubenswrapper[4869]: I1001 17:01:00.318097 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/701f9b68-20f9-4937-8541-8e2f104908ef-fernet-keys\") pod \"keystone-cron-29322301-hpmjg\" (UID: \"701f9b68-20f9-4937-8541-8e2f104908ef\") " pod="openstack/keystone-cron-29322301-hpmjg" Oct 01 17:01:00 crc kubenswrapper[4869]: I1001 17:01:00.420146 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/701f9b68-20f9-4937-8541-8e2f104908ef-fernet-keys\") pod \"keystone-cron-29322301-hpmjg\" (UID: \"701f9b68-20f9-4937-8541-8e2f104908ef\") " pod="openstack/keystone-cron-29322301-hpmjg" Oct 01 17:01:00 crc kubenswrapper[4869]: I1001 17:01:00.420271 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-z5d5v\" (UniqueName: \"kubernetes.io/projected/701f9b68-20f9-4937-8541-8e2f104908ef-kube-api-access-z5d5v\") pod \"keystone-cron-29322301-hpmjg\" (UID: \"701f9b68-20f9-4937-8541-8e2f104908ef\") " pod="openstack/keystone-cron-29322301-hpmjg" Oct 01 17:01:00 crc kubenswrapper[4869]: I1001 17:01:00.420362 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/701f9b68-20f9-4937-8541-8e2f104908ef-combined-ca-bundle\") pod \"keystone-cron-29322301-hpmjg\" (UID: \"701f9b68-20f9-4937-8541-8e2f104908ef\") " pod="openstack/keystone-cron-29322301-hpmjg" Oct 01 17:01:00 crc kubenswrapper[4869]: I1001 17:01:00.420390 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/701f9b68-20f9-4937-8541-8e2f104908ef-config-data\") pod \"keystone-cron-29322301-hpmjg\" (UID: \"701f9b68-20f9-4937-8541-8e2f104908ef\") " pod="openstack/keystone-cron-29322301-hpmjg" Oct 01 17:01:00 crc kubenswrapper[4869]: I1001 17:01:00.427424 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/701f9b68-20f9-4937-8541-8e2f104908ef-combined-ca-bundle\") pod \"keystone-cron-29322301-hpmjg\" (UID: \"701f9b68-20f9-4937-8541-8e2f104908ef\") " pod="openstack/keystone-cron-29322301-hpmjg" Oct 01 17:01:00 crc kubenswrapper[4869]: I1001 17:01:00.427597 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/701f9b68-20f9-4937-8541-8e2f104908ef-config-data\") pod \"keystone-cron-29322301-hpmjg\" (UID: \"701f9b68-20f9-4937-8541-8e2f104908ef\") " pod="openstack/keystone-cron-29322301-hpmjg" Oct 01 17:01:00 crc kubenswrapper[4869]: I1001 17:01:00.428387 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/701f9b68-20f9-4937-8541-8e2f104908ef-fernet-keys\") pod \"keystone-cron-29322301-hpmjg\" (UID: \"701f9b68-20f9-4937-8541-8e2f104908ef\") " pod="openstack/keystone-cron-29322301-hpmjg" Oct 01 17:01:00 crc kubenswrapper[4869]: I1001 17:01:00.437402 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-z5d5v\" (UniqueName: \"kubernetes.io/projected/701f9b68-20f9-4937-8541-8e2f104908ef-kube-api-access-z5d5v\") pod \"keystone-cron-29322301-hpmjg\" (UID: \"701f9b68-20f9-4937-8541-8e2f104908ef\") " pod="openstack/keystone-cron-29322301-hpmjg" Oct 01 17:01:00 crc kubenswrapper[4869]: I1001 17:01:00.496917 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-cron-29322301-hpmjg" Oct 01 17:01:00 crc kubenswrapper[4869]: I1001 17:01:00.941409 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-cron-29322301-hpmjg"] Oct 01 17:01:01 crc kubenswrapper[4869]: I1001 17:01:01.021119 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-cron-29322301-hpmjg" event={"ID":"701f9b68-20f9-4937-8541-8e2f104908ef","Type":"ContainerStarted","Data":"17f43194ac530535da508b3e771096d92ff6d2e0c5df2458073b999c4f05fd48"} Oct 01 17:01:02 crc kubenswrapper[4869]: I1001 17:01:02.034915 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-cron-29322301-hpmjg" event={"ID":"701f9b68-20f9-4937-8541-8e2f104908ef","Type":"ContainerStarted","Data":"6ad165e46d35d6c456515bb9820fd499adbbfe3dc93747153e803aeee2318b5a"} Oct 01 17:01:02 crc kubenswrapper[4869]: I1001 17:01:02.062598 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-cron-29322301-hpmjg" podStartSLOduration=2.062580716 podStartE2EDuration="2.062580716s" podCreationTimestamp="2025-10-01 17:01:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-01 17:01:02.055202009 +0000 UTC m=+6971.202045135" watchObservedRunningTime="2025-10-01 17:01:02.062580716 +0000 UTC m=+6971.209423822" Oct 01 17:01:05 crc kubenswrapper[4869]: I1001 17:01:05.068563 4869 generic.go:334] "Generic (PLEG): container finished" podID="701f9b68-20f9-4937-8541-8e2f104908ef" containerID="6ad165e46d35d6c456515bb9820fd499adbbfe3dc93747153e803aeee2318b5a" exitCode=0 Oct 01 17:01:05 crc kubenswrapper[4869]: I1001 17:01:05.068862 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-cron-29322301-hpmjg" event={"ID":"701f9b68-20f9-4937-8541-8e2f104908ef","Type":"ContainerDied","Data":"6ad165e46d35d6c456515bb9820fd499adbbfe3dc93747153e803aeee2318b5a"} Oct 01 17:01:06 crc kubenswrapper[4869]: I1001 17:01:06.552604 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-cron-29322301-hpmjg" Oct 01 17:01:06 crc kubenswrapper[4869]: I1001 17:01:06.670528 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/701f9b68-20f9-4937-8541-8e2f104908ef-combined-ca-bundle\") pod \"701f9b68-20f9-4937-8541-8e2f104908ef\" (UID: \"701f9b68-20f9-4937-8541-8e2f104908ef\") " Oct 01 17:01:06 crc kubenswrapper[4869]: I1001 17:01:06.670787 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-z5d5v\" (UniqueName: \"kubernetes.io/projected/701f9b68-20f9-4937-8541-8e2f104908ef-kube-api-access-z5d5v\") pod \"701f9b68-20f9-4937-8541-8e2f104908ef\" (UID: \"701f9b68-20f9-4937-8541-8e2f104908ef\") " Oct 01 17:01:06 crc kubenswrapper[4869]: I1001 17:01:06.670962 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/701f9b68-20f9-4937-8541-8e2f104908ef-fernet-keys\") pod \"701f9b68-20f9-4937-8541-8e2f104908ef\" (UID: \"701f9b68-20f9-4937-8541-8e2f104908ef\") " Oct 01 17:01:06 crc kubenswrapper[4869]: I1001 17:01:06.670999 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/701f9b68-20f9-4937-8541-8e2f104908ef-config-data\") pod \"701f9b68-20f9-4937-8541-8e2f104908ef\" (UID: \"701f9b68-20f9-4937-8541-8e2f104908ef\") " Oct 01 17:01:06 crc kubenswrapper[4869]: I1001 17:01:06.677006 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/701f9b68-20f9-4937-8541-8e2f104908ef-fernet-keys" (OuterVolumeSpecName: "fernet-keys") pod "701f9b68-20f9-4937-8541-8e2f104908ef" (UID: "701f9b68-20f9-4937-8541-8e2f104908ef"). InnerVolumeSpecName "fernet-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 17:01:06 crc kubenswrapper[4869]: I1001 17:01:06.677651 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/701f9b68-20f9-4937-8541-8e2f104908ef-kube-api-access-z5d5v" (OuterVolumeSpecName: "kube-api-access-z5d5v") pod "701f9b68-20f9-4937-8541-8e2f104908ef" (UID: "701f9b68-20f9-4937-8541-8e2f104908ef"). InnerVolumeSpecName "kube-api-access-z5d5v". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 17:01:06 crc kubenswrapper[4869]: I1001 17:01:06.703401 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/701f9b68-20f9-4937-8541-8e2f104908ef-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "701f9b68-20f9-4937-8541-8e2f104908ef" (UID: "701f9b68-20f9-4937-8541-8e2f104908ef"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 17:01:06 crc kubenswrapper[4869]: I1001 17:01:06.743169 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/701f9b68-20f9-4937-8541-8e2f104908ef-config-data" (OuterVolumeSpecName: "config-data") pod "701f9b68-20f9-4937-8541-8e2f104908ef" (UID: "701f9b68-20f9-4937-8541-8e2f104908ef"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 17:01:06 crc kubenswrapper[4869]: I1001 17:01:06.773648 4869 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/701f9b68-20f9-4937-8541-8e2f104908ef-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 01 17:01:06 crc kubenswrapper[4869]: I1001 17:01:06.773673 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-z5d5v\" (UniqueName: \"kubernetes.io/projected/701f9b68-20f9-4937-8541-8e2f104908ef-kube-api-access-z5d5v\") on node \"crc\" DevicePath \"\"" Oct 01 17:01:06 crc kubenswrapper[4869]: I1001 17:01:06.773686 4869 reconciler_common.go:293] "Volume detached for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/701f9b68-20f9-4937-8541-8e2f104908ef-fernet-keys\") on node \"crc\" DevicePath \"\"" Oct 01 17:01:06 crc kubenswrapper[4869]: I1001 17:01:06.773695 4869 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/701f9b68-20f9-4937-8541-8e2f104908ef-config-data\") on node \"crc\" DevicePath \"\"" Oct 01 17:01:07 crc kubenswrapper[4869]: I1001 17:01:07.094583 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-cron-29322301-hpmjg" event={"ID":"701f9b68-20f9-4937-8541-8e2f104908ef","Type":"ContainerDied","Data":"17f43194ac530535da508b3e771096d92ff6d2e0c5df2458073b999c4f05fd48"} Oct 01 17:01:07 crc kubenswrapper[4869]: I1001 17:01:07.094628 4869 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="17f43194ac530535da508b3e771096d92ff6d2e0c5df2458073b999c4f05fd48" Oct 01 17:01:07 crc kubenswrapper[4869]: I1001 17:01:07.094689 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-cron-29322301-hpmjg" Oct 01 17:01:13 crc kubenswrapper[4869]: I1001 17:01:13.354645 4869 patch_prober.go:28] interesting pod/machine-config-daemon-c86m8 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 01 17:01:13 crc kubenswrapper[4869]: I1001 17:01:13.355594 4869 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-c86m8" podUID="a4b64f7f-0b03-4f47-965b-9fde048b735c" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 01 17:01:13 crc kubenswrapper[4869]: I1001 17:01:13.355658 4869 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-c86m8" Oct 01 17:01:13 crc kubenswrapper[4869]: I1001 17:01:13.356836 4869 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"13e59c672543e057ea5825aec1ce436345edb91e96c5ed564629c2560312008b"} pod="openshift-machine-config-operator/machine-config-daemon-c86m8" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Oct 01 17:01:13 crc kubenswrapper[4869]: I1001 17:01:13.356914 4869 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-c86m8" podUID="a4b64f7f-0b03-4f47-965b-9fde048b735c" containerName="machine-config-daemon" containerID="cri-o://13e59c672543e057ea5825aec1ce436345edb91e96c5ed564629c2560312008b" gracePeriod=600 Oct 01 17:01:13 crc kubenswrapper[4869]: E1001 17:01:13.484476 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-c86m8_openshift-machine-config-operator(a4b64f7f-0b03-4f47-965b-9fde048b735c)\"" pod="openshift-machine-config-operator/machine-config-daemon-c86m8" podUID="a4b64f7f-0b03-4f47-965b-9fde048b735c" Oct 01 17:01:14 crc kubenswrapper[4869]: I1001 17:01:14.173560 4869 generic.go:334] "Generic (PLEG): container finished" podID="a4b64f7f-0b03-4f47-965b-9fde048b735c" containerID="13e59c672543e057ea5825aec1ce436345edb91e96c5ed564629c2560312008b" exitCode=0 Oct 01 17:01:14 crc kubenswrapper[4869]: I1001 17:01:14.173601 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-c86m8" event={"ID":"a4b64f7f-0b03-4f47-965b-9fde048b735c","Type":"ContainerDied","Data":"13e59c672543e057ea5825aec1ce436345edb91e96c5ed564629c2560312008b"} Oct 01 17:01:14 crc kubenswrapper[4869]: I1001 17:01:14.173693 4869 scope.go:117] "RemoveContainer" containerID="f2358938d07dae570aa0749158f73ada6c9c893bad82912fd6adaf5ef37d3137" Oct 01 17:01:14 crc kubenswrapper[4869]: I1001 17:01:14.174465 4869 scope.go:117] "RemoveContainer" containerID="13e59c672543e057ea5825aec1ce436345edb91e96c5ed564629c2560312008b" Oct 01 17:01:14 crc kubenswrapper[4869]: E1001 17:01:14.174856 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-c86m8_openshift-machine-config-operator(a4b64f7f-0b03-4f47-965b-9fde048b735c)\"" pod="openshift-machine-config-operator/machine-config-daemon-c86m8" podUID="a4b64f7f-0b03-4f47-965b-9fde048b735c" Oct 01 17:01:29 crc kubenswrapper[4869]: I1001 17:01:29.581041 4869 scope.go:117] "RemoveContainer" containerID="13e59c672543e057ea5825aec1ce436345edb91e96c5ed564629c2560312008b" Oct 01 17:01:29 crc kubenswrapper[4869]: E1001 17:01:29.582383 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-c86m8_openshift-machine-config-operator(a4b64f7f-0b03-4f47-965b-9fde048b735c)\"" pod="openshift-machine-config-operator/machine-config-daemon-c86m8" podUID="a4b64f7f-0b03-4f47-965b-9fde048b735c" Oct 01 17:01:43 crc kubenswrapper[4869]: I1001 17:01:43.581721 4869 scope.go:117] "RemoveContainer" containerID="13e59c672543e057ea5825aec1ce436345edb91e96c5ed564629c2560312008b" Oct 01 17:01:43 crc kubenswrapper[4869]: E1001 17:01:43.582699 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-c86m8_openshift-machine-config-operator(a4b64f7f-0b03-4f47-965b-9fde048b735c)\"" pod="openshift-machine-config-operator/machine-config-daemon-c86m8" podUID="a4b64f7f-0b03-4f47-965b-9fde048b735c" Oct 01 17:01:48 crc kubenswrapper[4869]: I1001 17:01:48.476140 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-4whvq"] Oct 01 17:01:48 crc kubenswrapper[4869]: E1001 17:01:48.477453 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="701f9b68-20f9-4937-8541-8e2f104908ef" containerName="keystone-cron" Oct 01 17:01:48 crc kubenswrapper[4869]: I1001 17:01:48.477470 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="701f9b68-20f9-4937-8541-8e2f104908ef" containerName="keystone-cron" Oct 01 17:01:48 crc kubenswrapper[4869]: I1001 17:01:48.477711 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="701f9b68-20f9-4937-8541-8e2f104908ef" containerName="keystone-cron" Oct 01 17:01:48 crc kubenswrapper[4869]: I1001 17:01:48.479526 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-4whvq" Oct 01 17:01:48 crc kubenswrapper[4869]: I1001 17:01:48.492618 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-4whvq"] Oct 01 17:01:48 crc kubenswrapper[4869]: I1001 17:01:48.527123 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/650a55d6-dff7-4ed7-a5bb-0b5b59a003da-catalog-content\") pod \"redhat-marketplace-4whvq\" (UID: \"650a55d6-dff7-4ed7-a5bb-0b5b59a003da\") " pod="openshift-marketplace/redhat-marketplace-4whvq" Oct 01 17:01:48 crc kubenswrapper[4869]: I1001 17:01:48.527380 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-p4nv2\" (UniqueName: \"kubernetes.io/projected/650a55d6-dff7-4ed7-a5bb-0b5b59a003da-kube-api-access-p4nv2\") pod \"redhat-marketplace-4whvq\" (UID: \"650a55d6-dff7-4ed7-a5bb-0b5b59a003da\") " pod="openshift-marketplace/redhat-marketplace-4whvq" Oct 01 17:01:48 crc kubenswrapper[4869]: I1001 17:01:48.527446 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/650a55d6-dff7-4ed7-a5bb-0b5b59a003da-utilities\") pod \"redhat-marketplace-4whvq\" (UID: \"650a55d6-dff7-4ed7-a5bb-0b5b59a003da\") " pod="openshift-marketplace/redhat-marketplace-4whvq" Oct 01 17:01:48 crc kubenswrapper[4869]: I1001 17:01:48.628941 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/650a55d6-dff7-4ed7-a5bb-0b5b59a003da-catalog-content\") pod \"redhat-marketplace-4whvq\" (UID: \"650a55d6-dff7-4ed7-a5bb-0b5b59a003da\") " pod="openshift-marketplace/redhat-marketplace-4whvq" Oct 01 17:01:48 crc kubenswrapper[4869]: I1001 17:01:48.629080 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-p4nv2\" (UniqueName: \"kubernetes.io/projected/650a55d6-dff7-4ed7-a5bb-0b5b59a003da-kube-api-access-p4nv2\") pod \"redhat-marketplace-4whvq\" (UID: \"650a55d6-dff7-4ed7-a5bb-0b5b59a003da\") " pod="openshift-marketplace/redhat-marketplace-4whvq" Oct 01 17:01:48 crc kubenswrapper[4869]: I1001 17:01:48.629128 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/650a55d6-dff7-4ed7-a5bb-0b5b59a003da-utilities\") pod \"redhat-marketplace-4whvq\" (UID: \"650a55d6-dff7-4ed7-a5bb-0b5b59a003da\") " pod="openshift-marketplace/redhat-marketplace-4whvq" Oct 01 17:01:48 crc kubenswrapper[4869]: I1001 17:01:48.629675 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/650a55d6-dff7-4ed7-a5bb-0b5b59a003da-utilities\") pod \"redhat-marketplace-4whvq\" (UID: \"650a55d6-dff7-4ed7-a5bb-0b5b59a003da\") " pod="openshift-marketplace/redhat-marketplace-4whvq" Oct 01 17:01:48 crc kubenswrapper[4869]: I1001 17:01:48.630540 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/650a55d6-dff7-4ed7-a5bb-0b5b59a003da-catalog-content\") pod \"redhat-marketplace-4whvq\" (UID: \"650a55d6-dff7-4ed7-a5bb-0b5b59a003da\") " pod="openshift-marketplace/redhat-marketplace-4whvq" Oct 01 17:01:48 crc kubenswrapper[4869]: I1001 17:01:48.661054 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-p4nv2\" (UniqueName: \"kubernetes.io/projected/650a55d6-dff7-4ed7-a5bb-0b5b59a003da-kube-api-access-p4nv2\") pod \"redhat-marketplace-4whvq\" (UID: \"650a55d6-dff7-4ed7-a5bb-0b5b59a003da\") " pod="openshift-marketplace/redhat-marketplace-4whvq" Oct 01 17:01:48 crc kubenswrapper[4869]: I1001 17:01:48.810659 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-4whvq" Oct 01 17:01:49 crc kubenswrapper[4869]: W1001 17:01:49.276897 4869 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod650a55d6_dff7_4ed7_a5bb_0b5b59a003da.slice/crio-021ec8d152b7556ae8ab4c541cd8f235f677bf22660ed4a60e1e08a45eaa6785 WatchSource:0}: Error finding container 021ec8d152b7556ae8ab4c541cd8f235f677bf22660ed4a60e1e08a45eaa6785: Status 404 returned error can't find the container with id 021ec8d152b7556ae8ab4c541cd8f235f677bf22660ed4a60e1e08a45eaa6785 Oct 01 17:01:49 crc kubenswrapper[4869]: I1001 17:01:49.282869 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-4whvq"] Oct 01 17:01:49 crc kubenswrapper[4869]: I1001 17:01:49.543848 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-4whvq" event={"ID":"650a55d6-dff7-4ed7-a5bb-0b5b59a003da","Type":"ContainerStarted","Data":"021ec8d152b7556ae8ab4c541cd8f235f677bf22660ed4a60e1e08a45eaa6785"} Oct 01 17:01:50 crc kubenswrapper[4869]: I1001 17:01:50.562548 4869 generic.go:334] "Generic (PLEG): container finished" podID="650a55d6-dff7-4ed7-a5bb-0b5b59a003da" containerID="0b0683732f5d6ed9526259c8617ce5d8e4400f720c47164ef8532a14cd325f9d" exitCode=0 Oct 01 17:01:50 crc kubenswrapper[4869]: I1001 17:01:50.562966 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-4whvq" event={"ID":"650a55d6-dff7-4ed7-a5bb-0b5b59a003da","Type":"ContainerDied","Data":"0b0683732f5d6ed9526259c8617ce5d8e4400f720c47164ef8532a14cd325f9d"} Oct 01 17:01:52 crc kubenswrapper[4869]: I1001 17:01:52.583840 4869 generic.go:334] "Generic (PLEG): container finished" podID="650a55d6-dff7-4ed7-a5bb-0b5b59a003da" containerID="6638e2efa8d40bd31a5ada37197f2a05fa7ec5a80bd23092280e7c0526b09638" exitCode=0 Oct 01 17:01:52 crc kubenswrapper[4869]: I1001 17:01:52.584045 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-4whvq" event={"ID":"650a55d6-dff7-4ed7-a5bb-0b5b59a003da","Type":"ContainerDied","Data":"6638e2efa8d40bd31a5ada37197f2a05fa7ec5a80bd23092280e7c0526b09638"} Oct 01 17:01:53 crc kubenswrapper[4869]: I1001 17:01:53.596078 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-4whvq" event={"ID":"650a55d6-dff7-4ed7-a5bb-0b5b59a003da","Type":"ContainerStarted","Data":"a671614b3c49d760619a55ff06f94c4d575f4ae150293e860ec432c967ef749b"} Oct 01 17:01:53 crc kubenswrapper[4869]: I1001 17:01:53.620456 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-4whvq" podStartSLOduration=3.162432684 podStartE2EDuration="5.62043438s" podCreationTimestamp="2025-10-01 17:01:48 +0000 UTC" firstStartedPulling="2025-10-01 17:01:50.566712635 +0000 UTC m=+7019.713555751" lastFinishedPulling="2025-10-01 17:01:53.024714331 +0000 UTC m=+7022.171557447" observedRunningTime="2025-10-01 17:01:53.615294649 +0000 UTC m=+7022.762137775" watchObservedRunningTime="2025-10-01 17:01:53.62043438 +0000 UTC m=+7022.767277516" Oct 01 17:01:54 crc kubenswrapper[4869]: I1001 17:01:54.616430 4869 generic.go:334] "Generic (PLEG): container finished" podID="78543bf0-aa4b-45dc-a7c6-37a22a5be6ea" containerID="13f0c91a6a4b872edb42923b131243ff49ea806efc1b2cdc8e35bb8a45f85322" exitCode=0 Oct 01 17:01:54 crc kubenswrapper[4869]: I1001 17:01:54.616530 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/tempest-tests-tempest-s00-full" event={"ID":"78543bf0-aa4b-45dc-a7c6-37a22a5be6ea","Type":"ContainerDied","Data":"13f0c91a6a4b872edb42923b131243ff49ea806efc1b2cdc8e35bb8a45f85322"} Oct 01 17:01:55 crc kubenswrapper[4869]: I1001 17:01:55.581801 4869 scope.go:117] "RemoveContainer" containerID="13e59c672543e057ea5825aec1ce436345edb91e96c5ed564629c2560312008b" Oct 01 17:01:55 crc kubenswrapper[4869]: E1001 17:01:55.582080 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-c86m8_openshift-machine-config-operator(a4b64f7f-0b03-4f47-965b-9fde048b735c)\"" pod="openshift-machine-config-operator/machine-config-daemon-c86m8" podUID="a4b64f7f-0b03-4f47-965b-9fde048b735c" Oct 01 17:01:56 crc kubenswrapper[4869]: I1001 17:01:56.341181 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/tempest-tests-tempest-s00-full" Oct 01 17:01:56 crc kubenswrapper[4869]: I1001 17:01:56.449786 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"test-operator-logs\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"78543bf0-aa4b-45dc-a7c6-37a22a5be6ea\" (UID: \"78543bf0-aa4b-45dc-a7c6-37a22a5be6ea\") " Oct 01 17:01:56 crc kubenswrapper[4869]: I1001 17:01:56.449856 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/78543bf0-aa4b-45dc-a7c6-37a22a5be6ea-ceph\") pod \"78543bf0-aa4b-45dc-a7c6-37a22a5be6ea\" (UID: \"78543bf0-aa4b-45dc-a7c6-37a22a5be6ea\") " Oct 01 17:01:56 crc kubenswrapper[4869]: I1001 17:01:56.449977 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/78543bf0-aa4b-45dc-a7c6-37a22a5be6ea-openstack-config-secret\") pod \"78543bf0-aa4b-45dc-a7c6-37a22a5be6ea\" (UID: \"78543bf0-aa4b-45dc-a7c6-37a22a5be6ea\") " Oct 01 17:01:56 crc kubenswrapper[4869]: I1001 17:01:56.450042 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"test-operator-ephemeral-temporary\" (UniqueName: \"kubernetes.io/empty-dir/78543bf0-aa4b-45dc-a7c6-37a22a5be6ea-test-operator-ephemeral-temporary\") pod \"78543bf0-aa4b-45dc-a7c6-37a22a5be6ea\" (UID: \"78543bf0-aa4b-45dc-a7c6-37a22a5be6ea\") " Oct 01 17:01:56 crc kubenswrapper[4869]: I1001 17:01:56.450120 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"test-operator-ephemeral-workdir\" (UniqueName: \"kubernetes.io/empty-dir/78543bf0-aa4b-45dc-a7c6-37a22a5be6ea-test-operator-ephemeral-workdir\") pod \"78543bf0-aa4b-45dc-a7c6-37a22a5be6ea\" (UID: \"78543bf0-aa4b-45dc-a7c6-37a22a5be6ea\") " Oct 01 17:01:56 crc kubenswrapper[4869]: I1001 17:01:56.450920 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/78543bf0-aa4b-45dc-a7c6-37a22a5be6ea-test-operator-ephemeral-temporary" (OuterVolumeSpecName: "test-operator-ephemeral-temporary") pod "78543bf0-aa4b-45dc-a7c6-37a22a5be6ea" (UID: "78543bf0-aa4b-45dc-a7c6-37a22a5be6ea"). InnerVolumeSpecName "test-operator-ephemeral-temporary". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 17:01:56 crc kubenswrapper[4869]: I1001 17:01:56.451032 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ca-certs\" (UniqueName: \"kubernetes.io/secret/78543bf0-aa4b-45dc-a7c6-37a22a5be6ea-ca-certs\") pod \"78543bf0-aa4b-45dc-a7c6-37a22a5be6ea\" (UID: \"78543bf0-aa4b-45dc-a7c6-37a22a5be6ea\") " Oct 01 17:01:56 crc kubenswrapper[4869]: I1001 17:01:56.451386 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/78543bf0-aa4b-45dc-a7c6-37a22a5be6ea-ssh-key\") pod \"78543bf0-aa4b-45dc-a7c6-37a22a5be6ea\" (UID: \"78543bf0-aa4b-45dc-a7c6-37a22a5be6ea\") " Oct 01 17:01:56 crc kubenswrapper[4869]: I1001 17:01:56.451426 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/78543bf0-aa4b-45dc-a7c6-37a22a5be6ea-openstack-config\") pod \"78543bf0-aa4b-45dc-a7c6-37a22a5be6ea\" (UID: \"78543bf0-aa4b-45dc-a7c6-37a22a5be6ea\") " Oct 01 17:01:56 crc kubenswrapper[4869]: I1001 17:01:56.451448 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pmlps\" (UniqueName: \"kubernetes.io/projected/78543bf0-aa4b-45dc-a7c6-37a22a5be6ea-kube-api-access-pmlps\") pod \"78543bf0-aa4b-45dc-a7c6-37a22a5be6ea\" (UID: \"78543bf0-aa4b-45dc-a7c6-37a22a5be6ea\") " Oct 01 17:01:56 crc kubenswrapper[4869]: I1001 17:01:56.451471 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/78543bf0-aa4b-45dc-a7c6-37a22a5be6ea-config-data\") pod \"78543bf0-aa4b-45dc-a7c6-37a22a5be6ea\" (UID: \"78543bf0-aa4b-45dc-a7c6-37a22a5be6ea\") " Oct 01 17:01:56 crc kubenswrapper[4869]: I1001 17:01:56.452077 4869 reconciler_common.go:293] "Volume detached for volume \"test-operator-ephemeral-temporary\" (UniqueName: \"kubernetes.io/empty-dir/78543bf0-aa4b-45dc-a7c6-37a22a5be6ea-test-operator-ephemeral-temporary\") on node \"crc\" DevicePath \"\"" Oct 01 17:01:56 crc kubenswrapper[4869]: I1001 17:01:56.456356 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/tempest-tests-tempest-s01-single-test"] Oct 01 17:01:56 crc kubenswrapper[4869]: I1001 17:01:56.456847 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage08-crc" (OuterVolumeSpecName: "test-operator-logs") pod "78543bf0-aa4b-45dc-a7c6-37a22a5be6ea" (UID: "78543bf0-aa4b-45dc-a7c6-37a22a5be6ea"). InnerVolumeSpecName "local-storage08-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Oct 01 17:01:56 crc kubenswrapper[4869]: E1001 17:01:56.456909 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="78543bf0-aa4b-45dc-a7c6-37a22a5be6ea" containerName="tempest-tests-tempest-tests-runner" Oct 01 17:01:56 crc kubenswrapper[4869]: I1001 17:01:56.456928 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="78543bf0-aa4b-45dc-a7c6-37a22a5be6ea" containerName="tempest-tests-tempest-tests-runner" Oct 01 17:01:56 crc kubenswrapper[4869]: I1001 17:01:56.457070 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/78543bf0-aa4b-45dc-a7c6-37a22a5be6ea-config-data" (OuterVolumeSpecName: "config-data") pod "78543bf0-aa4b-45dc-a7c6-37a22a5be6ea" (UID: "78543bf0-aa4b-45dc-a7c6-37a22a5be6ea"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 17:01:56 crc kubenswrapper[4869]: I1001 17:01:56.457138 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="78543bf0-aa4b-45dc-a7c6-37a22a5be6ea" containerName="tempest-tests-tempest-tests-runner" Oct 01 17:01:56 crc kubenswrapper[4869]: I1001 17:01:56.457998 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/tempest-tests-tempest-s01-single-test" Oct 01 17:01:56 crc kubenswrapper[4869]: I1001 17:01:56.462444 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/78543bf0-aa4b-45dc-a7c6-37a22a5be6ea-ceph" (OuterVolumeSpecName: "ceph") pod "78543bf0-aa4b-45dc-a7c6-37a22a5be6ea" (UID: "78543bf0-aa4b-45dc-a7c6-37a22a5be6ea"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 17:01:56 crc kubenswrapper[4869]: I1001 17:01:56.463737 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"tempest-tests-tempest-custom-data-s1" Oct 01 17:01:56 crc kubenswrapper[4869]: I1001 17:01:56.464753 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"tempest-tests-tempest-env-vars-s1" Oct 01 17:01:56 crc kubenswrapper[4869]: I1001 17:01:56.475056 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/78543bf0-aa4b-45dc-a7c6-37a22a5be6ea-test-operator-ephemeral-workdir" (OuterVolumeSpecName: "test-operator-ephemeral-workdir") pod "78543bf0-aa4b-45dc-a7c6-37a22a5be6ea" (UID: "78543bf0-aa4b-45dc-a7c6-37a22a5be6ea"). InnerVolumeSpecName "test-operator-ephemeral-workdir". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 17:01:56 crc kubenswrapper[4869]: I1001 17:01:56.482980 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/tempest-tests-tempest-s01-single-test"] Oct 01 17:01:56 crc kubenswrapper[4869]: I1001 17:01:56.483233 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/78543bf0-aa4b-45dc-a7c6-37a22a5be6ea-kube-api-access-pmlps" (OuterVolumeSpecName: "kube-api-access-pmlps") pod "78543bf0-aa4b-45dc-a7c6-37a22a5be6ea" (UID: "78543bf0-aa4b-45dc-a7c6-37a22a5be6ea"). InnerVolumeSpecName "kube-api-access-pmlps". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 17:01:56 crc kubenswrapper[4869]: I1001 17:01:56.495027 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/78543bf0-aa4b-45dc-a7c6-37a22a5be6ea-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "78543bf0-aa4b-45dc-a7c6-37a22a5be6ea" (UID: "78543bf0-aa4b-45dc-a7c6-37a22a5be6ea"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 17:01:56 crc kubenswrapper[4869]: I1001 17:01:56.495578 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/78543bf0-aa4b-45dc-a7c6-37a22a5be6ea-ca-certs" (OuterVolumeSpecName: "ca-certs") pod "78543bf0-aa4b-45dc-a7c6-37a22a5be6ea" (UID: "78543bf0-aa4b-45dc-a7c6-37a22a5be6ea"). InnerVolumeSpecName "ca-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 17:01:56 crc kubenswrapper[4869]: I1001 17:01:56.504847 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/78543bf0-aa4b-45dc-a7c6-37a22a5be6ea-openstack-config-secret" (OuterVolumeSpecName: "openstack-config-secret") pod "78543bf0-aa4b-45dc-a7c6-37a22a5be6ea" (UID: "78543bf0-aa4b-45dc-a7c6-37a22a5be6ea"). InnerVolumeSpecName "openstack-config-secret". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 17:01:56 crc kubenswrapper[4869]: I1001 17:01:56.524145 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/78543bf0-aa4b-45dc-a7c6-37a22a5be6ea-openstack-config" (OuterVolumeSpecName: "openstack-config") pod "78543bf0-aa4b-45dc-a7c6-37a22a5be6ea" (UID: "78543bf0-aa4b-45dc-a7c6-37a22a5be6ea"). InnerVolumeSpecName "openstack-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 17:01:56 crc kubenswrapper[4869]: I1001 17:01:56.553853 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/26dfe6ee-15f8-424e-b16a-58a57d5bc4f8-openstack-config-secret\") pod \"tempest-tests-tempest-s01-single-test\" (UID: \"26dfe6ee-15f8-424e-b16a-58a57d5bc4f8\") " pod="openstack/tempest-tests-tempest-s01-single-test" Oct 01 17:01:56 crc kubenswrapper[4869]: I1001 17:01:56.553909 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/26dfe6ee-15f8-424e-b16a-58a57d5bc4f8-config-data\") pod \"tempest-tests-tempest-s01-single-test\" (UID: \"26dfe6ee-15f8-424e-b16a-58a57d5bc4f8\") " pod="openstack/tempest-tests-tempest-s01-single-test" Oct 01 17:01:56 crc kubenswrapper[4869]: I1001 17:01:56.554010 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/26dfe6ee-15f8-424e-b16a-58a57d5bc4f8-openstack-config\") pod \"tempest-tests-tempest-s01-single-test\" (UID: \"26dfe6ee-15f8-424e-b16a-58a57d5bc4f8\") " pod="openstack/tempest-tests-tempest-s01-single-test" Oct 01 17:01:56 crc kubenswrapper[4869]: I1001 17:01:56.554214 4869 reconciler_common.go:293] "Volume detached for volume \"test-operator-ephemeral-workdir\" (UniqueName: \"kubernetes.io/empty-dir/78543bf0-aa4b-45dc-a7c6-37a22a5be6ea-test-operator-ephemeral-workdir\") on node \"crc\" DevicePath \"\"" Oct 01 17:01:56 crc kubenswrapper[4869]: I1001 17:01:56.554238 4869 reconciler_common.go:293] "Volume detached for volume \"ca-certs\" (UniqueName: \"kubernetes.io/secret/78543bf0-aa4b-45dc-a7c6-37a22a5be6ea-ca-certs\") on node \"crc\" DevicePath \"\"" Oct 01 17:01:56 crc kubenswrapper[4869]: I1001 17:01:56.554303 4869 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/78543bf0-aa4b-45dc-a7c6-37a22a5be6ea-ssh-key\") on node \"crc\" DevicePath \"\"" Oct 01 17:01:56 crc kubenswrapper[4869]: I1001 17:01:56.554322 4869 reconciler_common.go:293] "Volume detached for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/78543bf0-aa4b-45dc-a7c6-37a22a5be6ea-openstack-config\") on node \"crc\" DevicePath \"\"" Oct 01 17:01:56 crc kubenswrapper[4869]: I1001 17:01:56.554337 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pmlps\" (UniqueName: \"kubernetes.io/projected/78543bf0-aa4b-45dc-a7c6-37a22a5be6ea-kube-api-access-pmlps\") on node \"crc\" DevicePath \"\"" Oct 01 17:01:56 crc kubenswrapper[4869]: I1001 17:01:56.554349 4869 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/78543bf0-aa4b-45dc-a7c6-37a22a5be6ea-config-data\") on node \"crc\" DevicePath \"\"" Oct 01 17:01:56 crc kubenswrapper[4869]: I1001 17:01:56.554410 4869 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") on node \"crc\" " Oct 01 17:01:56 crc kubenswrapper[4869]: I1001 17:01:56.554425 4869 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/78543bf0-aa4b-45dc-a7c6-37a22a5be6ea-ceph\") on node \"crc\" DevicePath \"\"" Oct 01 17:01:56 crc kubenswrapper[4869]: I1001 17:01:56.554437 4869 reconciler_common.go:293] "Volume detached for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/78543bf0-aa4b-45dc-a7c6-37a22a5be6ea-openstack-config-secret\") on node \"crc\" DevicePath \"\"" Oct 01 17:01:56 crc kubenswrapper[4869]: I1001 17:01:56.578291 4869 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage08-crc" (UniqueName: "kubernetes.io/local-volume/local-storage08-crc") on node "crc" Oct 01 17:01:56 crc kubenswrapper[4869]: I1001 17:01:56.633119 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/tempest-tests-tempest-s00-full" event={"ID":"78543bf0-aa4b-45dc-a7c6-37a22a5be6ea","Type":"ContainerDied","Data":"197b13cffaa032b4a9a053024f120c1da210f5edbf910d57274ab1206a907282"} Oct 01 17:01:56 crc kubenswrapper[4869]: I1001 17:01:56.633165 4869 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="197b13cffaa032b4a9a053024f120c1da210f5edbf910d57274ab1206a907282" Oct 01 17:01:56 crc kubenswrapper[4869]: I1001 17:01:56.633178 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/tempest-tests-tempest-s00-full" Oct 01 17:01:56 crc kubenswrapper[4869]: I1001 17:01:56.656080 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/26dfe6ee-15f8-424e-b16a-58a57d5bc4f8-openstack-config\") pod \"tempest-tests-tempest-s01-single-test\" (UID: \"26dfe6ee-15f8-424e-b16a-58a57d5bc4f8\") " pod="openstack/tempest-tests-tempest-s01-single-test" Oct 01 17:01:56 crc kubenswrapper[4869]: I1001 17:01:56.656125 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xtmq6\" (UniqueName: \"kubernetes.io/projected/26dfe6ee-15f8-424e-b16a-58a57d5bc4f8-kube-api-access-xtmq6\") pod \"tempest-tests-tempest-s01-single-test\" (UID: \"26dfe6ee-15f8-424e-b16a-58a57d5bc4f8\") " pod="openstack/tempest-tests-tempest-s01-single-test" Oct 01 17:01:56 crc kubenswrapper[4869]: I1001 17:01:56.656170 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ca-certs\" (UniqueName: \"kubernetes.io/secret/26dfe6ee-15f8-424e-b16a-58a57d5bc4f8-ca-certs\") pod \"tempest-tests-tempest-s01-single-test\" (UID: \"26dfe6ee-15f8-424e-b16a-58a57d5bc4f8\") " pod="openstack/tempest-tests-tempest-s01-single-test" Oct 01 17:01:56 crc kubenswrapper[4869]: I1001 17:01:56.656247 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/26dfe6ee-15f8-424e-b16a-58a57d5bc4f8-ceph\") pod \"tempest-tests-tempest-s01-single-test\" (UID: \"26dfe6ee-15f8-424e-b16a-58a57d5bc4f8\") " pod="openstack/tempest-tests-tempest-s01-single-test" Oct 01 17:01:56 crc kubenswrapper[4869]: I1001 17:01:56.656332 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"tempest-tests-tempest-s01-single-test\" (UID: \"26dfe6ee-15f8-424e-b16a-58a57d5bc4f8\") " pod="openstack/tempest-tests-tempest-s01-single-test" Oct 01 17:01:56 crc kubenswrapper[4869]: I1001 17:01:56.656376 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/26dfe6ee-15f8-424e-b16a-58a57d5bc4f8-openstack-config-secret\") pod \"tempest-tests-tempest-s01-single-test\" (UID: \"26dfe6ee-15f8-424e-b16a-58a57d5bc4f8\") " pod="openstack/tempest-tests-tempest-s01-single-test" Oct 01 17:01:56 crc kubenswrapper[4869]: I1001 17:01:56.656410 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"test-operator-ephemeral-temporary\" (UniqueName: \"kubernetes.io/empty-dir/26dfe6ee-15f8-424e-b16a-58a57d5bc4f8-test-operator-ephemeral-temporary\") pod \"tempest-tests-tempest-s01-single-test\" (UID: \"26dfe6ee-15f8-424e-b16a-58a57d5bc4f8\") " pod="openstack/tempest-tests-tempest-s01-single-test" Oct 01 17:01:56 crc kubenswrapper[4869]: I1001 17:01:56.656447 4869 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"tempest-tests-tempest-s01-single-test\" (UID: \"26dfe6ee-15f8-424e-b16a-58a57d5bc4f8\") device mount path \"/mnt/openstack/pv08\"" pod="openstack/tempest-tests-tempest-s01-single-test" Oct 01 17:01:56 crc kubenswrapper[4869]: I1001 17:01:56.656440 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/26dfe6ee-15f8-424e-b16a-58a57d5bc4f8-config-data\") pod \"tempest-tests-tempest-s01-single-test\" (UID: \"26dfe6ee-15f8-424e-b16a-58a57d5bc4f8\") " pod="openstack/tempest-tests-tempest-s01-single-test" Oct 01 17:01:56 crc kubenswrapper[4869]: I1001 17:01:56.656998 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/26dfe6ee-15f8-424e-b16a-58a57d5bc4f8-openstack-config\") pod \"tempest-tests-tempest-s01-single-test\" (UID: \"26dfe6ee-15f8-424e-b16a-58a57d5bc4f8\") " pod="openstack/tempest-tests-tempest-s01-single-test" Oct 01 17:01:56 crc kubenswrapper[4869]: I1001 17:01:56.657571 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/26dfe6ee-15f8-424e-b16a-58a57d5bc4f8-config-data\") pod \"tempest-tests-tempest-s01-single-test\" (UID: \"26dfe6ee-15f8-424e-b16a-58a57d5bc4f8\") " pod="openstack/tempest-tests-tempest-s01-single-test" Oct 01 17:01:56 crc kubenswrapper[4869]: I1001 17:01:56.657844 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"test-operator-ephemeral-workdir\" (UniqueName: \"kubernetes.io/empty-dir/26dfe6ee-15f8-424e-b16a-58a57d5bc4f8-test-operator-ephemeral-workdir\") pod \"tempest-tests-tempest-s01-single-test\" (UID: \"26dfe6ee-15f8-424e-b16a-58a57d5bc4f8\") " pod="openstack/tempest-tests-tempest-s01-single-test" Oct 01 17:01:56 crc kubenswrapper[4869]: I1001 17:01:56.657993 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/26dfe6ee-15f8-424e-b16a-58a57d5bc4f8-ssh-key\") pod \"tempest-tests-tempest-s01-single-test\" (UID: \"26dfe6ee-15f8-424e-b16a-58a57d5bc4f8\") " pod="openstack/tempest-tests-tempest-s01-single-test" Oct 01 17:01:56 crc kubenswrapper[4869]: I1001 17:01:56.660948 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/26dfe6ee-15f8-424e-b16a-58a57d5bc4f8-openstack-config-secret\") pod \"tempest-tests-tempest-s01-single-test\" (UID: \"26dfe6ee-15f8-424e-b16a-58a57d5bc4f8\") " pod="openstack/tempest-tests-tempest-s01-single-test" Oct 01 17:01:56 crc kubenswrapper[4869]: I1001 17:01:56.681450 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"tempest-tests-tempest-s01-single-test\" (UID: \"26dfe6ee-15f8-424e-b16a-58a57d5bc4f8\") " pod="openstack/tempest-tests-tempest-s01-single-test" Oct 01 17:01:56 crc kubenswrapper[4869]: I1001 17:01:56.760505 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"test-operator-ephemeral-workdir\" (UniqueName: \"kubernetes.io/empty-dir/26dfe6ee-15f8-424e-b16a-58a57d5bc4f8-test-operator-ephemeral-workdir\") pod \"tempest-tests-tempest-s01-single-test\" (UID: \"26dfe6ee-15f8-424e-b16a-58a57d5bc4f8\") " pod="openstack/tempest-tests-tempest-s01-single-test" Oct 01 17:01:56 crc kubenswrapper[4869]: I1001 17:01:56.760621 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/26dfe6ee-15f8-424e-b16a-58a57d5bc4f8-ssh-key\") pod \"tempest-tests-tempest-s01-single-test\" (UID: \"26dfe6ee-15f8-424e-b16a-58a57d5bc4f8\") " pod="openstack/tempest-tests-tempest-s01-single-test" Oct 01 17:01:56 crc kubenswrapper[4869]: I1001 17:01:56.760792 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xtmq6\" (UniqueName: \"kubernetes.io/projected/26dfe6ee-15f8-424e-b16a-58a57d5bc4f8-kube-api-access-xtmq6\") pod \"tempest-tests-tempest-s01-single-test\" (UID: \"26dfe6ee-15f8-424e-b16a-58a57d5bc4f8\") " pod="openstack/tempest-tests-tempest-s01-single-test" Oct 01 17:01:56 crc kubenswrapper[4869]: I1001 17:01:56.760890 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ca-certs\" (UniqueName: \"kubernetes.io/secret/26dfe6ee-15f8-424e-b16a-58a57d5bc4f8-ca-certs\") pod \"tempest-tests-tempest-s01-single-test\" (UID: \"26dfe6ee-15f8-424e-b16a-58a57d5bc4f8\") " pod="openstack/tempest-tests-tempest-s01-single-test" Oct 01 17:01:56 crc kubenswrapper[4869]: I1001 17:01:56.761107 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/26dfe6ee-15f8-424e-b16a-58a57d5bc4f8-ceph\") pod \"tempest-tests-tempest-s01-single-test\" (UID: \"26dfe6ee-15f8-424e-b16a-58a57d5bc4f8\") " pod="openstack/tempest-tests-tempest-s01-single-test" Oct 01 17:01:56 crc kubenswrapper[4869]: I1001 17:01:56.761383 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"test-operator-ephemeral-temporary\" (UniqueName: \"kubernetes.io/empty-dir/26dfe6ee-15f8-424e-b16a-58a57d5bc4f8-test-operator-ephemeral-temporary\") pod \"tempest-tests-tempest-s01-single-test\" (UID: \"26dfe6ee-15f8-424e-b16a-58a57d5bc4f8\") " pod="openstack/tempest-tests-tempest-s01-single-test" Oct 01 17:01:56 crc kubenswrapper[4869]: I1001 17:01:56.764290 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"test-operator-ephemeral-workdir\" (UniqueName: \"kubernetes.io/empty-dir/26dfe6ee-15f8-424e-b16a-58a57d5bc4f8-test-operator-ephemeral-workdir\") pod \"tempest-tests-tempest-s01-single-test\" (UID: \"26dfe6ee-15f8-424e-b16a-58a57d5bc4f8\") " pod="openstack/tempest-tests-tempest-s01-single-test" Oct 01 17:01:56 crc kubenswrapper[4869]: I1001 17:01:56.768206 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"test-operator-ephemeral-temporary\" (UniqueName: \"kubernetes.io/empty-dir/26dfe6ee-15f8-424e-b16a-58a57d5bc4f8-test-operator-ephemeral-temporary\") pod \"tempest-tests-tempest-s01-single-test\" (UID: \"26dfe6ee-15f8-424e-b16a-58a57d5bc4f8\") " pod="openstack/tempest-tests-tempest-s01-single-test" Oct 01 17:01:56 crc kubenswrapper[4869]: I1001 17:01:56.768535 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/26dfe6ee-15f8-424e-b16a-58a57d5bc4f8-ssh-key\") pod \"tempest-tests-tempest-s01-single-test\" (UID: \"26dfe6ee-15f8-424e-b16a-58a57d5bc4f8\") " pod="openstack/tempest-tests-tempest-s01-single-test" Oct 01 17:01:56 crc kubenswrapper[4869]: I1001 17:01:56.770043 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/26dfe6ee-15f8-424e-b16a-58a57d5bc4f8-ceph\") pod \"tempest-tests-tempest-s01-single-test\" (UID: \"26dfe6ee-15f8-424e-b16a-58a57d5bc4f8\") " pod="openstack/tempest-tests-tempest-s01-single-test" Oct 01 17:01:56 crc kubenswrapper[4869]: I1001 17:01:56.770881 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ca-certs\" (UniqueName: \"kubernetes.io/secret/26dfe6ee-15f8-424e-b16a-58a57d5bc4f8-ca-certs\") pod \"tempest-tests-tempest-s01-single-test\" (UID: \"26dfe6ee-15f8-424e-b16a-58a57d5bc4f8\") " pod="openstack/tempest-tests-tempest-s01-single-test" Oct 01 17:01:56 crc kubenswrapper[4869]: I1001 17:01:56.786978 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xtmq6\" (UniqueName: \"kubernetes.io/projected/26dfe6ee-15f8-424e-b16a-58a57d5bc4f8-kube-api-access-xtmq6\") pod \"tempest-tests-tempest-s01-single-test\" (UID: \"26dfe6ee-15f8-424e-b16a-58a57d5bc4f8\") " pod="openstack/tempest-tests-tempest-s01-single-test" Oct 01 17:01:56 crc kubenswrapper[4869]: I1001 17:01:56.914637 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/tempest-tests-tempest-s01-single-test" Oct 01 17:01:57 crc kubenswrapper[4869]: I1001 17:01:57.443061 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/tempest-tests-tempest-s01-single-test"] Oct 01 17:01:57 crc kubenswrapper[4869]: W1001 17:01:57.444734 4869 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod26dfe6ee_15f8_424e_b16a_58a57d5bc4f8.slice/crio-efb7598238f207a387b1aa777204526ce69ed87496891675e347e18a0df3c7a4 WatchSource:0}: Error finding container efb7598238f207a387b1aa777204526ce69ed87496891675e347e18a0df3c7a4: Status 404 returned error can't find the container with id efb7598238f207a387b1aa777204526ce69ed87496891675e347e18a0df3c7a4 Oct 01 17:01:57 crc kubenswrapper[4869]: I1001 17:01:57.646207 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/tempest-tests-tempest-s01-single-test" event={"ID":"26dfe6ee-15f8-424e-b16a-58a57d5bc4f8","Type":"ContainerStarted","Data":"efb7598238f207a387b1aa777204526ce69ed87496891675e347e18a0df3c7a4"} Oct 01 17:01:58 crc kubenswrapper[4869]: I1001 17:01:58.657937 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/tempest-tests-tempest-s01-single-test" event={"ID":"26dfe6ee-15f8-424e-b16a-58a57d5bc4f8","Type":"ContainerStarted","Data":"5f85bf629024a8ef122fc1e9369d39c02c290b936c3f52763ba7c028811f55a2"} Oct 01 17:01:58 crc kubenswrapper[4869]: I1001 17:01:58.682245 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/tempest-tests-tempest-s01-single-test" podStartSLOduration=2.682220486 podStartE2EDuration="2.682220486s" podCreationTimestamp="2025-10-01 17:01:56 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-01 17:01:58.678073821 +0000 UTC m=+7027.824916947" watchObservedRunningTime="2025-10-01 17:01:58.682220486 +0000 UTC m=+7027.829063602" Oct 01 17:01:58 crc kubenswrapper[4869]: I1001 17:01:58.810985 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-4whvq" Oct 01 17:01:58 crc kubenswrapper[4869]: I1001 17:01:58.811415 4869 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-4whvq" Oct 01 17:01:58 crc kubenswrapper[4869]: I1001 17:01:58.870179 4869 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-4whvq" Oct 01 17:01:59 crc kubenswrapper[4869]: I1001 17:01:59.734273 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-4whvq" Oct 01 17:01:59 crc kubenswrapper[4869]: I1001 17:01:59.797966 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-4whvq"] Oct 01 17:02:01 crc kubenswrapper[4869]: I1001 17:02:01.689129 4869 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-4whvq" podUID="650a55d6-dff7-4ed7-a5bb-0b5b59a003da" containerName="registry-server" containerID="cri-o://a671614b3c49d760619a55ff06f94c4d575f4ae150293e860ec432c967ef749b" gracePeriod=2 Oct 01 17:02:02 crc kubenswrapper[4869]: I1001 17:02:02.192401 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-4whvq" Oct 01 17:02:02 crc kubenswrapper[4869]: I1001 17:02:02.299095 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-p4nv2\" (UniqueName: \"kubernetes.io/projected/650a55d6-dff7-4ed7-a5bb-0b5b59a003da-kube-api-access-p4nv2\") pod \"650a55d6-dff7-4ed7-a5bb-0b5b59a003da\" (UID: \"650a55d6-dff7-4ed7-a5bb-0b5b59a003da\") " Oct 01 17:02:02 crc kubenswrapper[4869]: I1001 17:02:02.299151 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/650a55d6-dff7-4ed7-a5bb-0b5b59a003da-catalog-content\") pod \"650a55d6-dff7-4ed7-a5bb-0b5b59a003da\" (UID: \"650a55d6-dff7-4ed7-a5bb-0b5b59a003da\") " Oct 01 17:02:02 crc kubenswrapper[4869]: I1001 17:02:02.299244 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/650a55d6-dff7-4ed7-a5bb-0b5b59a003da-utilities\") pod \"650a55d6-dff7-4ed7-a5bb-0b5b59a003da\" (UID: \"650a55d6-dff7-4ed7-a5bb-0b5b59a003da\") " Oct 01 17:02:02 crc kubenswrapper[4869]: I1001 17:02:02.300306 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/650a55d6-dff7-4ed7-a5bb-0b5b59a003da-utilities" (OuterVolumeSpecName: "utilities") pod "650a55d6-dff7-4ed7-a5bb-0b5b59a003da" (UID: "650a55d6-dff7-4ed7-a5bb-0b5b59a003da"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 17:02:02 crc kubenswrapper[4869]: I1001 17:02:02.307356 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/650a55d6-dff7-4ed7-a5bb-0b5b59a003da-kube-api-access-p4nv2" (OuterVolumeSpecName: "kube-api-access-p4nv2") pod "650a55d6-dff7-4ed7-a5bb-0b5b59a003da" (UID: "650a55d6-dff7-4ed7-a5bb-0b5b59a003da"). InnerVolumeSpecName "kube-api-access-p4nv2". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 17:02:02 crc kubenswrapper[4869]: I1001 17:02:02.322661 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/650a55d6-dff7-4ed7-a5bb-0b5b59a003da-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "650a55d6-dff7-4ed7-a5bb-0b5b59a003da" (UID: "650a55d6-dff7-4ed7-a5bb-0b5b59a003da"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 17:02:02 crc kubenswrapper[4869]: I1001 17:02:02.402494 4869 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/650a55d6-dff7-4ed7-a5bb-0b5b59a003da-utilities\") on node \"crc\" DevicePath \"\"" Oct 01 17:02:02 crc kubenswrapper[4869]: I1001 17:02:02.402535 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-p4nv2\" (UniqueName: \"kubernetes.io/projected/650a55d6-dff7-4ed7-a5bb-0b5b59a003da-kube-api-access-p4nv2\") on node \"crc\" DevicePath \"\"" Oct 01 17:02:02 crc kubenswrapper[4869]: I1001 17:02:02.402549 4869 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/650a55d6-dff7-4ed7-a5bb-0b5b59a003da-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 01 17:02:02 crc kubenswrapper[4869]: I1001 17:02:02.699645 4869 generic.go:334] "Generic (PLEG): container finished" podID="650a55d6-dff7-4ed7-a5bb-0b5b59a003da" containerID="a671614b3c49d760619a55ff06f94c4d575f4ae150293e860ec432c967ef749b" exitCode=0 Oct 01 17:02:02 crc kubenswrapper[4869]: I1001 17:02:02.699690 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-4whvq" event={"ID":"650a55d6-dff7-4ed7-a5bb-0b5b59a003da","Type":"ContainerDied","Data":"a671614b3c49d760619a55ff06f94c4d575f4ae150293e860ec432c967ef749b"} Oct 01 17:02:02 crc kubenswrapper[4869]: I1001 17:02:02.699719 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-4whvq" event={"ID":"650a55d6-dff7-4ed7-a5bb-0b5b59a003da","Type":"ContainerDied","Data":"021ec8d152b7556ae8ab4c541cd8f235f677bf22660ed4a60e1e08a45eaa6785"} Oct 01 17:02:02 crc kubenswrapper[4869]: I1001 17:02:02.699734 4869 scope.go:117] "RemoveContainer" containerID="a671614b3c49d760619a55ff06f94c4d575f4ae150293e860ec432c967ef749b" Oct 01 17:02:02 crc kubenswrapper[4869]: I1001 17:02:02.699771 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-4whvq" Oct 01 17:02:02 crc kubenswrapper[4869]: I1001 17:02:02.725583 4869 scope.go:117] "RemoveContainer" containerID="6638e2efa8d40bd31a5ada37197f2a05fa7ec5a80bd23092280e7c0526b09638" Oct 01 17:02:02 crc kubenswrapper[4869]: I1001 17:02:02.753313 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-4whvq"] Oct 01 17:02:02 crc kubenswrapper[4869]: I1001 17:02:02.772894 4869 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-4whvq"] Oct 01 17:02:02 crc kubenswrapper[4869]: I1001 17:02:02.777820 4869 scope.go:117] "RemoveContainer" containerID="0b0683732f5d6ed9526259c8617ce5d8e4400f720c47164ef8532a14cd325f9d" Oct 01 17:02:02 crc kubenswrapper[4869]: I1001 17:02:02.828301 4869 scope.go:117] "RemoveContainer" containerID="a671614b3c49d760619a55ff06f94c4d575f4ae150293e860ec432c967ef749b" Oct 01 17:02:02 crc kubenswrapper[4869]: E1001 17:02:02.830822 4869 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a671614b3c49d760619a55ff06f94c4d575f4ae150293e860ec432c967ef749b\": container with ID starting with a671614b3c49d760619a55ff06f94c4d575f4ae150293e860ec432c967ef749b not found: ID does not exist" containerID="a671614b3c49d760619a55ff06f94c4d575f4ae150293e860ec432c967ef749b" Oct 01 17:02:02 crc kubenswrapper[4869]: I1001 17:02:02.830939 4869 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a671614b3c49d760619a55ff06f94c4d575f4ae150293e860ec432c967ef749b"} err="failed to get container status \"a671614b3c49d760619a55ff06f94c4d575f4ae150293e860ec432c967ef749b\": rpc error: code = NotFound desc = could not find container \"a671614b3c49d760619a55ff06f94c4d575f4ae150293e860ec432c967ef749b\": container with ID starting with a671614b3c49d760619a55ff06f94c4d575f4ae150293e860ec432c967ef749b not found: ID does not exist" Oct 01 17:02:02 crc kubenswrapper[4869]: I1001 17:02:02.831016 4869 scope.go:117] "RemoveContainer" containerID="6638e2efa8d40bd31a5ada37197f2a05fa7ec5a80bd23092280e7c0526b09638" Oct 01 17:02:02 crc kubenswrapper[4869]: E1001 17:02:02.831642 4869 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6638e2efa8d40bd31a5ada37197f2a05fa7ec5a80bd23092280e7c0526b09638\": container with ID starting with 6638e2efa8d40bd31a5ada37197f2a05fa7ec5a80bd23092280e7c0526b09638 not found: ID does not exist" containerID="6638e2efa8d40bd31a5ada37197f2a05fa7ec5a80bd23092280e7c0526b09638" Oct 01 17:02:02 crc kubenswrapper[4869]: I1001 17:02:02.831724 4869 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6638e2efa8d40bd31a5ada37197f2a05fa7ec5a80bd23092280e7c0526b09638"} err="failed to get container status \"6638e2efa8d40bd31a5ada37197f2a05fa7ec5a80bd23092280e7c0526b09638\": rpc error: code = NotFound desc = could not find container \"6638e2efa8d40bd31a5ada37197f2a05fa7ec5a80bd23092280e7c0526b09638\": container with ID starting with 6638e2efa8d40bd31a5ada37197f2a05fa7ec5a80bd23092280e7c0526b09638 not found: ID does not exist" Oct 01 17:02:02 crc kubenswrapper[4869]: I1001 17:02:02.831784 4869 scope.go:117] "RemoveContainer" containerID="0b0683732f5d6ed9526259c8617ce5d8e4400f720c47164ef8532a14cd325f9d" Oct 01 17:02:02 crc kubenswrapper[4869]: E1001 17:02:02.832281 4869 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0b0683732f5d6ed9526259c8617ce5d8e4400f720c47164ef8532a14cd325f9d\": container with ID starting with 0b0683732f5d6ed9526259c8617ce5d8e4400f720c47164ef8532a14cd325f9d not found: ID does not exist" containerID="0b0683732f5d6ed9526259c8617ce5d8e4400f720c47164ef8532a14cd325f9d" Oct 01 17:02:02 crc kubenswrapper[4869]: I1001 17:02:02.832355 4869 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0b0683732f5d6ed9526259c8617ce5d8e4400f720c47164ef8532a14cd325f9d"} err="failed to get container status \"0b0683732f5d6ed9526259c8617ce5d8e4400f720c47164ef8532a14cd325f9d\": rpc error: code = NotFound desc = could not find container \"0b0683732f5d6ed9526259c8617ce5d8e4400f720c47164ef8532a14cd325f9d\": container with ID starting with 0b0683732f5d6ed9526259c8617ce5d8e4400f720c47164ef8532a14cd325f9d not found: ID does not exist" Oct 01 17:02:03 crc kubenswrapper[4869]: I1001 17:02:03.599866 4869 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="650a55d6-dff7-4ed7-a5bb-0b5b59a003da" path="/var/lib/kubelet/pods/650a55d6-dff7-4ed7-a5bb-0b5b59a003da/volumes" Oct 01 17:02:04 crc kubenswrapper[4869]: I1001 17:02:04.522322 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-hc2qh"] Oct 01 17:02:04 crc kubenswrapper[4869]: E1001 17:02:04.531705 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="650a55d6-dff7-4ed7-a5bb-0b5b59a003da" containerName="extract-utilities" Oct 01 17:02:04 crc kubenswrapper[4869]: I1001 17:02:04.531752 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="650a55d6-dff7-4ed7-a5bb-0b5b59a003da" containerName="extract-utilities" Oct 01 17:02:04 crc kubenswrapper[4869]: E1001 17:02:04.531785 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="650a55d6-dff7-4ed7-a5bb-0b5b59a003da" containerName="registry-server" Oct 01 17:02:04 crc kubenswrapper[4869]: I1001 17:02:04.531794 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="650a55d6-dff7-4ed7-a5bb-0b5b59a003da" containerName="registry-server" Oct 01 17:02:04 crc kubenswrapper[4869]: E1001 17:02:04.531824 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="650a55d6-dff7-4ed7-a5bb-0b5b59a003da" containerName="extract-content" Oct 01 17:02:04 crc kubenswrapper[4869]: I1001 17:02:04.531832 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="650a55d6-dff7-4ed7-a5bb-0b5b59a003da" containerName="extract-content" Oct 01 17:02:04 crc kubenswrapper[4869]: I1001 17:02:04.532124 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="650a55d6-dff7-4ed7-a5bb-0b5b59a003da" containerName="registry-server" Oct 01 17:02:04 crc kubenswrapper[4869]: I1001 17:02:04.533819 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-hc2qh" Oct 01 17:02:04 crc kubenswrapper[4869]: I1001 17:02:04.535159 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-hc2qh"] Oct 01 17:02:04 crc kubenswrapper[4869]: I1001 17:02:04.656382 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xplr8\" (UniqueName: \"kubernetes.io/projected/a6460e76-484b-4edf-8dbd-b095bf6a72e0-kube-api-access-xplr8\") pod \"certified-operators-hc2qh\" (UID: \"a6460e76-484b-4edf-8dbd-b095bf6a72e0\") " pod="openshift-marketplace/certified-operators-hc2qh" Oct 01 17:02:04 crc kubenswrapper[4869]: I1001 17:02:04.656498 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a6460e76-484b-4edf-8dbd-b095bf6a72e0-catalog-content\") pod \"certified-operators-hc2qh\" (UID: \"a6460e76-484b-4edf-8dbd-b095bf6a72e0\") " pod="openshift-marketplace/certified-operators-hc2qh" Oct 01 17:02:04 crc kubenswrapper[4869]: I1001 17:02:04.656592 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a6460e76-484b-4edf-8dbd-b095bf6a72e0-utilities\") pod \"certified-operators-hc2qh\" (UID: \"a6460e76-484b-4edf-8dbd-b095bf6a72e0\") " pod="openshift-marketplace/certified-operators-hc2qh" Oct 01 17:02:04 crc kubenswrapper[4869]: I1001 17:02:04.758826 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xplr8\" (UniqueName: \"kubernetes.io/projected/a6460e76-484b-4edf-8dbd-b095bf6a72e0-kube-api-access-xplr8\") pod \"certified-operators-hc2qh\" (UID: \"a6460e76-484b-4edf-8dbd-b095bf6a72e0\") " pod="openshift-marketplace/certified-operators-hc2qh" Oct 01 17:02:04 crc kubenswrapper[4869]: I1001 17:02:04.760242 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a6460e76-484b-4edf-8dbd-b095bf6a72e0-catalog-content\") pod \"certified-operators-hc2qh\" (UID: \"a6460e76-484b-4edf-8dbd-b095bf6a72e0\") " pod="openshift-marketplace/certified-operators-hc2qh" Oct 01 17:02:04 crc kubenswrapper[4869]: I1001 17:02:04.760588 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a6460e76-484b-4edf-8dbd-b095bf6a72e0-utilities\") pod \"certified-operators-hc2qh\" (UID: \"a6460e76-484b-4edf-8dbd-b095bf6a72e0\") " pod="openshift-marketplace/certified-operators-hc2qh" Oct 01 17:02:04 crc kubenswrapper[4869]: I1001 17:02:04.760756 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a6460e76-484b-4edf-8dbd-b095bf6a72e0-catalog-content\") pod \"certified-operators-hc2qh\" (UID: \"a6460e76-484b-4edf-8dbd-b095bf6a72e0\") " pod="openshift-marketplace/certified-operators-hc2qh" Oct 01 17:02:04 crc kubenswrapper[4869]: I1001 17:02:04.760934 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a6460e76-484b-4edf-8dbd-b095bf6a72e0-utilities\") pod \"certified-operators-hc2qh\" (UID: \"a6460e76-484b-4edf-8dbd-b095bf6a72e0\") " pod="openshift-marketplace/certified-operators-hc2qh" Oct 01 17:02:04 crc kubenswrapper[4869]: I1001 17:02:04.784362 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xplr8\" (UniqueName: \"kubernetes.io/projected/a6460e76-484b-4edf-8dbd-b095bf6a72e0-kube-api-access-xplr8\") pod \"certified-operators-hc2qh\" (UID: \"a6460e76-484b-4edf-8dbd-b095bf6a72e0\") " pod="openshift-marketplace/certified-operators-hc2qh" Oct 01 17:02:04 crc kubenswrapper[4869]: I1001 17:02:04.859043 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-hc2qh" Oct 01 17:02:05 crc kubenswrapper[4869]: I1001 17:02:05.328841 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-hc2qh"] Oct 01 17:02:05 crc kubenswrapper[4869]: I1001 17:02:05.732583 4869 generic.go:334] "Generic (PLEG): container finished" podID="a6460e76-484b-4edf-8dbd-b095bf6a72e0" containerID="55503c26ef64c78a9627f3c996739dc80b4c16ffa7470cf06216d30da14dee5f" exitCode=0 Oct 01 17:02:05 crc kubenswrapper[4869]: I1001 17:02:05.732628 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-hc2qh" event={"ID":"a6460e76-484b-4edf-8dbd-b095bf6a72e0","Type":"ContainerDied","Data":"55503c26ef64c78a9627f3c996739dc80b4c16ffa7470cf06216d30da14dee5f"} Oct 01 17:02:05 crc kubenswrapper[4869]: I1001 17:02:05.732661 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-hc2qh" event={"ID":"a6460e76-484b-4edf-8dbd-b095bf6a72e0","Type":"ContainerStarted","Data":"e28ecdc783a37146726a94f3fd7c5c3e94a6ef253d84e44a1ec19e2bae49e652"} Oct 01 17:02:06 crc kubenswrapper[4869]: I1001 17:02:06.744463 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-hc2qh" event={"ID":"a6460e76-484b-4edf-8dbd-b095bf6a72e0","Type":"ContainerStarted","Data":"5d48970eb12ca4c2e8679fe46095cfff4eb0935d66052d938774d4c1f50868a3"} Oct 01 17:02:07 crc kubenswrapper[4869]: I1001 17:02:07.761051 4869 generic.go:334] "Generic (PLEG): container finished" podID="a6460e76-484b-4edf-8dbd-b095bf6a72e0" containerID="5d48970eb12ca4c2e8679fe46095cfff4eb0935d66052d938774d4c1f50868a3" exitCode=0 Oct 01 17:02:07 crc kubenswrapper[4869]: I1001 17:02:07.761116 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-hc2qh" event={"ID":"a6460e76-484b-4edf-8dbd-b095bf6a72e0","Type":"ContainerDied","Data":"5d48970eb12ca4c2e8679fe46095cfff4eb0935d66052d938774d4c1f50868a3"} Oct 01 17:02:08 crc kubenswrapper[4869]: I1001 17:02:08.771425 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-hc2qh" event={"ID":"a6460e76-484b-4edf-8dbd-b095bf6a72e0","Type":"ContainerStarted","Data":"e93c0b30111097d70c3168af728a76e31527895f5c2526dd85380c033df92fac"} Oct 01 17:02:09 crc kubenswrapper[4869]: I1001 17:02:09.581631 4869 scope.go:117] "RemoveContainer" containerID="13e59c672543e057ea5825aec1ce436345edb91e96c5ed564629c2560312008b" Oct 01 17:02:09 crc kubenswrapper[4869]: E1001 17:02:09.582010 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-c86m8_openshift-machine-config-operator(a4b64f7f-0b03-4f47-965b-9fde048b735c)\"" pod="openshift-machine-config-operator/machine-config-daemon-c86m8" podUID="a4b64f7f-0b03-4f47-965b-9fde048b735c" Oct 01 17:02:14 crc kubenswrapper[4869]: I1001 17:02:14.859993 4869 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-hc2qh" Oct 01 17:02:14 crc kubenswrapper[4869]: I1001 17:02:14.860887 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-hc2qh" Oct 01 17:02:14 crc kubenswrapper[4869]: I1001 17:02:14.929655 4869 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-hc2qh" Oct 01 17:02:14 crc kubenswrapper[4869]: I1001 17:02:14.959068 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-hc2qh" podStartSLOduration=8.406323681 podStartE2EDuration="10.959043076s" podCreationTimestamp="2025-10-01 17:02:04 +0000 UTC" firstStartedPulling="2025-10-01 17:02:05.734926347 +0000 UTC m=+7034.881769463" lastFinishedPulling="2025-10-01 17:02:08.287645732 +0000 UTC m=+7037.434488858" observedRunningTime="2025-10-01 17:02:08.793487614 +0000 UTC m=+7037.940330740" watchObservedRunningTime="2025-10-01 17:02:14.959043076 +0000 UTC m=+7044.105886202" Oct 01 17:02:15 crc kubenswrapper[4869]: I1001 17:02:15.890435 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-hc2qh" Oct 01 17:02:15 crc kubenswrapper[4869]: I1001 17:02:15.950222 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-hc2qh"] Oct 01 17:02:17 crc kubenswrapper[4869]: I1001 17:02:17.860162 4869 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-hc2qh" podUID="a6460e76-484b-4edf-8dbd-b095bf6a72e0" containerName="registry-server" containerID="cri-o://e93c0b30111097d70c3168af728a76e31527895f5c2526dd85380c033df92fac" gracePeriod=2 Oct 01 17:02:18 crc kubenswrapper[4869]: I1001 17:02:18.879382 4869 generic.go:334] "Generic (PLEG): container finished" podID="a6460e76-484b-4edf-8dbd-b095bf6a72e0" containerID="e93c0b30111097d70c3168af728a76e31527895f5c2526dd85380c033df92fac" exitCode=0 Oct 01 17:02:18 crc kubenswrapper[4869]: I1001 17:02:18.879451 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-hc2qh" event={"ID":"a6460e76-484b-4edf-8dbd-b095bf6a72e0","Type":"ContainerDied","Data":"e93c0b30111097d70c3168af728a76e31527895f5c2526dd85380c033df92fac"} Oct 01 17:02:19 crc kubenswrapper[4869]: I1001 17:02:19.342642 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-hc2qh" Oct 01 17:02:19 crc kubenswrapper[4869]: I1001 17:02:19.495568 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a6460e76-484b-4edf-8dbd-b095bf6a72e0-utilities\") pod \"a6460e76-484b-4edf-8dbd-b095bf6a72e0\" (UID: \"a6460e76-484b-4edf-8dbd-b095bf6a72e0\") " Oct 01 17:02:19 crc kubenswrapper[4869]: I1001 17:02:19.495745 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a6460e76-484b-4edf-8dbd-b095bf6a72e0-catalog-content\") pod \"a6460e76-484b-4edf-8dbd-b095bf6a72e0\" (UID: \"a6460e76-484b-4edf-8dbd-b095bf6a72e0\") " Oct 01 17:02:19 crc kubenswrapper[4869]: I1001 17:02:19.495800 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xplr8\" (UniqueName: \"kubernetes.io/projected/a6460e76-484b-4edf-8dbd-b095bf6a72e0-kube-api-access-xplr8\") pod \"a6460e76-484b-4edf-8dbd-b095bf6a72e0\" (UID: \"a6460e76-484b-4edf-8dbd-b095bf6a72e0\") " Oct 01 17:02:19 crc kubenswrapper[4869]: I1001 17:02:19.496933 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a6460e76-484b-4edf-8dbd-b095bf6a72e0-utilities" (OuterVolumeSpecName: "utilities") pod "a6460e76-484b-4edf-8dbd-b095bf6a72e0" (UID: "a6460e76-484b-4edf-8dbd-b095bf6a72e0"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 17:02:19 crc kubenswrapper[4869]: I1001 17:02:19.505307 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a6460e76-484b-4edf-8dbd-b095bf6a72e0-kube-api-access-xplr8" (OuterVolumeSpecName: "kube-api-access-xplr8") pod "a6460e76-484b-4edf-8dbd-b095bf6a72e0" (UID: "a6460e76-484b-4edf-8dbd-b095bf6a72e0"). InnerVolumeSpecName "kube-api-access-xplr8". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 17:02:19 crc kubenswrapper[4869]: I1001 17:02:19.573299 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a6460e76-484b-4edf-8dbd-b095bf6a72e0-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "a6460e76-484b-4edf-8dbd-b095bf6a72e0" (UID: "a6460e76-484b-4edf-8dbd-b095bf6a72e0"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 17:02:19 crc kubenswrapper[4869]: I1001 17:02:19.598065 4869 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a6460e76-484b-4edf-8dbd-b095bf6a72e0-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 01 17:02:19 crc kubenswrapper[4869]: I1001 17:02:19.598099 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xplr8\" (UniqueName: \"kubernetes.io/projected/a6460e76-484b-4edf-8dbd-b095bf6a72e0-kube-api-access-xplr8\") on node \"crc\" DevicePath \"\"" Oct 01 17:02:19 crc kubenswrapper[4869]: I1001 17:02:19.598118 4869 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a6460e76-484b-4edf-8dbd-b095bf6a72e0-utilities\") on node \"crc\" DevicePath \"\"" Oct 01 17:02:19 crc kubenswrapper[4869]: I1001 17:02:19.902211 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-hc2qh" event={"ID":"a6460e76-484b-4edf-8dbd-b095bf6a72e0","Type":"ContainerDied","Data":"e28ecdc783a37146726a94f3fd7c5c3e94a6ef253d84e44a1ec19e2bae49e652"} Oct 01 17:02:19 crc kubenswrapper[4869]: I1001 17:02:19.902408 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-hc2qh" Oct 01 17:02:19 crc kubenswrapper[4869]: I1001 17:02:19.902854 4869 scope.go:117] "RemoveContainer" containerID="e93c0b30111097d70c3168af728a76e31527895f5c2526dd85380c033df92fac" Oct 01 17:02:19 crc kubenswrapper[4869]: I1001 17:02:19.941691 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-hc2qh"] Oct 01 17:02:19 crc kubenswrapper[4869]: I1001 17:02:19.950003 4869 scope.go:117] "RemoveContainer" containerID="5d48970eb12ca4c2e8679fe46095cfff4eb0935d66052d938774d4c1f50868a3" Oct 01 17:02:19 crc kubenswrapper[4869]: I1001 17:02:19.952155 4869 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-hc2qh"] Oct 01 17:02:19 crc kubenswrapper[4869]: I1001 17:02:19.993913 4869 scope.go:117] "RemoveContainer" containerID="55503c26ef64c78a9627f3c996739dc80b4c16ffa7470cf06216d30da14dee5f" Oct 01 17:02:21 crc kubenswrapper[4869]: I1001 17:02:21.594557 4869 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a6460e76-484b-4edf-8dbd-b095bf6a72e0" path="/var/lib/kubelet/pods/a6460e76-484b-4edf-8dbd-b095bf6a72e0/volumes" Oct 01 17:02:23 crc kubenswrapper[4869]: I1001 17:02:23.582113 4869 scope.go:117] "RemoveContainer" containerID="13e59c672543e057ea5825aec1ce436345edb91e96c5ed564629c2560312008b" Oct 01 17:02:23 crc kubenswrapper[4869]: E1001 17:02:23.582889 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-c86m8_openshift-machine-config-operator(a4b64f7f-0b03-4f47-965b-9fde048b735c)\"" pod="openshift-machine-config-operator/machine-config-daemon-c86m8" podUID="a4b64f7f-0b03-4f47-965b-9fde048b735c" Oct 01 17:02:37 crc kubenswrapper[4869]: I1001 17:02:37.581114 4869 scope.go:117] "RemoveContainer" containerID="13e59c672543e057ea5825aec1ce436345edb91e96c5ed564629c2560312008b" Oct 01 17:02:37 crc kubenswrapper[4869]: E1001 17:02:37.581988 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-c86m8_openshift-machine-config-operator(a4b64f7f-0b03-4f47-965b-9fde048b735c)\"" pod="openshift-machine-config-operator/machine-config-daemon-c86m8" podUID="a4b64f7f-0b03-4f47-965b-9fde048b735c" Oct 01 17:02:48 crc kubenswrapper[4869]: I1001 17:02:48.581406 4869 scope.go:117] "RemoveContainer" containerID="13e59c672543e057ea5825aec1ce436345edb91e96c5ed564629c2560312008b" Oct 01 17:02:48 crc kubenswrapper[4869]: E1001 17:02:48.582467 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-c86m8_openshift-machine-config-operator(a4b64f7f-0b03-4f47-965b-9fde048b735c)\"" pod="openshift-machine-config-operator/machine-config-daemon-c86m8" podUID="a4b64f7f-0b03-4f47-965b-9fde048b735c" Oct 01 17:02:59 crc kubenswrapper[4869]: I1001 17:02:59.581766 4869 scope.go:117] "RemoveContainer" containerID="13e59c672543e057ea5825aec1ce436345edb91e96c5ed564629c2560312008b" Oct 01 17:02:59 crc kubenswrapper[4869]: E1001 17:02:59.582895 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-c86m8_openshift-machine-config-operator(a4b64f7f-0b03-4f47-965b-9fde048b735c)\"" pod="openshift-machine-config-operator/machine-config-daemon-c86m8" podUID="a4b64f7f-0b03-4f47-965b-9fde048b735c" Oct 01 17:03:11 crc kubenswrapper[4869]: I1001 17:03:11.593303 4869 scope.go:117] "RemoveContainer" containerID="13e59c672543e057ea5825aec1ce436345edb91e96c5ed564629c2560312008b" Oct 01 17:03:11 crc kubenswrapper[4869]: E1001 17:03:11.594548 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-c86m8_openshift-machine-config-operator(a4b64f7f-0b03-4f47-965b-9fde048b735c)\"" pod="openshift-machine-config-operator/machine-config-daemon-c86m8" podUID="a4b64f7f-0b03-4f47-965b-9fde048b735c" Oct 01 17:03:26 crc kubenswrapper[4869]: I1001 17:03:26.581780 4869 scope.go:117] "RemoveContainer" containerID="13e59c672543e057ea5825aec1ce436345edb91e96c5ed564629c2560312008b" Oct 01 17:03:26 crc kubenswrapper[4869]: E1001 17:03:26.583014 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-c86m8_openshift-machine-config-operator(a4b64f7f-0b03-4f47-965b-9fde048b735c)\"" pod="openshift-machine-config-operator/machine-config-daemon-c86m8" podUID="a4b64f7f-0b03-4f47-965b-9fde048b735c" Oct 01 17:03:41 crc kubenswrapper[4869]: I1001 17:03:41.598315 4869 scope.go:117] "RemoveContainer" containerID="13e59c672543e057ea5825aec1ce436345edb91e96c5ed564629c2560312008b" Oct 01 17:03:41 crc kubenswrapper[4869]: E1001 17:03:41.599456 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-c86m8_openshift-machine-config-operator(a4b64f7f-0b03-4f47-965b-9fde048b735c)\"" pod="openshift-machine-config-operator/machine-config-daemon-c86m8" podUID="a4b64f7f-0b03-4f47-965b-9fde048b735c" Oct 01 17:03:53 crc kubenswrapper[4869]: I1001 17:03:53.584433 4869 scope.go:117] "RemoveContainer" containerID="13e59c672543e057ea5825aec1ce436345edb91e96c5ed564629c2560312008b" Oct 01 17:03:53 crc kubenswrapper[4869]: E1001 17:03:53.585767 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-c86m8_openshift-machine-config-operator(a4b64f7f-0b03-4f47-965b-9fde048b735c)\"" pod="openshift-machine-config-operator/machine-config-daemon-c86m8" podUID="a4b64f7f-0b03-4f47-965b-9fde048b735c" Oct 01 17:04:06 crc kubenswrapper[4869]: I1001 17:04:06.581797 4869 scope.go:117] "RemoveContainer" containerID="13e59c672543e057ea5825aec1ce436345edb91e96c5ed564629c2560312008b" Oct 01 17:04:06 crc kubenswrapper[4869]: E1001 17:04:06.582846 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-c86m8_openshift-machine-config-operator(a4b64f7f-0b03-4f47-965b-9fde048b735c)\"" pod="openshift-machine-config-operator/machine-config-daemon-c86m8" podUID="a4b64f7f-0b03-4f47-965b-9fde048b735c" Oct 01 17:04:18 crc kubenswrapper[4869]: I1001 17:04:18.581937 4869 scope.go:117] "RemoveContainer" containerID="13e59c672543e057ea5825aec1ce436345edb91e96c5ed564629c2560312008b" Oct 01 17:04:18 crc kubenswrapper[4869]: E1001 17:04:18.583670 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-c86m8_openshift-machine-config-operator(a4b64f7f-0b03-4f47-965b-9fde048b735c)\"" pod="openshift-machine-config-operator/machine-config-daemon-c86m8" podUID="a4b64f7f-0b03-4f47-965b-9fde048b735c" Oct 01 17:04:29 crc kubenswrapper[4869]: I1001 17:04:29.581371 4869 scope.go:117] "RemoveContainer" containerID="13e59c672543e057ea5825aec1ce436345edb91e96c5ed564629c2560312008b" Oct 01 17:04:29 crc kubenswrapper[4869]: E1001 17:04:29.582635 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-c86m8_openshift-machine-config-operator(a4b64f7f-0b03-4f47-965b-9fde048b735c)\"" pod="openshift-machine-config-operator/machine-config-daemon-c86m8" podUID="a4b64f7f-0b03-4f47-965b-9fde048b735c" Oct 01 17:04:40 crc kubenswrapper[4869]: I1001 17:04:40.581677 4869 scope.go:117] "RemoveContainer" containerID="13e59c672543e057ea5825aec1ce436345edb91e96c5ed564629c2560312008b" Oct 01 17:04:40 crc kubenswrapper[4869]: E1001 17:04:40.582485 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-c86m8_openshift-machine-config-operator(a4b64f7f-0b03-4f47-965b-9fde048b735c)\"" pod="openshift-machine-config-operator/machine-config-daemon-c86m8" podUID="a4b64f7f-0b03-4f47-965b-9fde048b735c" Oct 01 17:04:53 crc kubenswrapper[4869]: I1001 17:04:53.581197 4869 scope.go:117] "RemoveContainer" containerID="13e59c672543e057ea5825aec1ce436345edb91e96c5ed564629c2560312008b" Oct 01 17:04:53 crc kubenswrapper[4869]: E1001 17:04:53.582314 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-c86m8_openshift-machine-config-operator(a4b64f7f-0b03-4f47-965b-9fde048b735c)\"" pod="openshift-machine-config-operator/machine-config-daemon-c86m8" podUID="a4b64f7f-0b03-4f47-965b-9fde048b735c" Oct 01 17:05:05 crc kubenswrapper[4869]: I1001 17:05:05.581774 4869 scope.go:117] "RemoveContainer" containerID="13e59c672543e057ea5825aec1ce436345edb91e96c5ed564629c2560312008b" Oct 01 17:05:05 crc kubenswrapper[4869]: E1001 17:05:05.582370 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-c86m8_openshift-machine-config-operator(a4b64f7f-0b03-4f47-965b-9fde048b735c)\"" pod="openshift-machine-config-operator/machine-config-daemon-c86m8" podUID="a4b64f7f-0b03-4f47-965b-9fde048b735c" Oct 01 17:05:17 crc kubenswrapper[4869]: I1001 17:05:17.581708 4869 scope.go:117] "RemoveContainer" containerID="13e59c672543e057ea5825aec1ce436345edb91e96c5ed564629c2560312008b" Oct 01 17:05:17 crc kubenswrapper[4869]: E1001 17:05:17.582407 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-c86m8_openshift-machine-config-operator(a4b64f7f-0b03-4f47-965b-9fde048b735c)\"" pod="openshift-machine-config-operator/machine-config-daemon-c86m8" podUID="a4b64f7f-0b03-4f47-965b-9fde048b735c" Oct 01 17:05:29 crc kubenswrapper[4869]: I1001 17:05:29.581342 4869 scope.go:117] "RemoveContainer" containerID="13e59c672543e057ea5825aec1ce436345edb91e96c5ed564629c2560312008b" Oct 01 17:05:29 crc kubenswrapper[4869]: E1001 17:05:29.582404 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-c86m8_openshift-machine-config-operator(a4b64f7f-0b03-4f47-965b-9fde048b735c)\"" pod="openshift-machine-config-operator/machine-config-daemon-c86m8" podUID="a4b64f7f-0b03-4f47-965b-9fde048b735c" Oct 01 17:05:42 crc kubenswrapper[4869]: I1001 17:05:42.581955 4869 scope.go:117] "RemoveContainer" containerID="13e59c672543e057ea5825aec1ce436345edb91e96c5ed564629c2560312008b" Oct 01 17:05:42 crc kubenswrapper[4869]: E1001 17:05:42.583394 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-c86m8_openshift-machine-config-operator(a4b64f7f-0b03-4f47-965b-9fde048b735c)\"" pod="openshift-machine-config-operator/machine-config-daemon-c86m8" podUID="a4b64f7f-0b03-4f47-965b-9fde048b735c" Oct 01 17:05:56 crc kubenswrapper[4869]: I1001 17:05:56.581832 4869 scope.go:117] "RemoveContainer" containerID="13e59c672543e057ea5825aec1ce436345edb91e96c5ed564629c2560312008b" Oct 01 17:05:56 crc kubenswrapper[4869]: E1001 17:05:56.583134 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-c86m8_openshift-machine-config-operator(a4b64f7f-0b03-4f47-965b-9fde048b735c)\"" pod="openshift-machine-config-operator/machine-config-daemon-c86m8" podUID="a4b64f7f-0b03-4f47-965b-9fde048b735c" Oct 01 17:06:11 crc kubenswrapper[4869]: I1001 17:06:11.580932 4869 scope.go:117] "RemoveContainer" containerID="13e59c672543e057ea5825aec1ce436345edb91e96c5ed564629c2560312008b" Oct 01 17:06:11 crc kubenswrapper[4869]: E1001 17:06:11.581629 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-c86m8_openshift-machine-config-operator(a4b64f7f-0b03-4f47-965b-9fde048b735c)\"" pod="openshift-machine-config-operator/machine-config-daemon-c86m8" podUID="a4b64f7f-0b03-4f47-965b-9fde048b735c" Oct 01 17:06:24 crc kubenswrapper[4869]: I1001 17:06:24.582206 4869 scope.go:117] "RemoveContainer" containerID="13e59c672543e057ea5825aec1ce436345edb91e96c5ed564629c2560312008b" Oct 01 17:06:25 crc kubenswrapper[4869]: I1001 17:06:25.627702 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-c86m8" event={"ID":"a4b64f7f-0b03-4f47-965b-9fde048b735c","Type":"ContainerStarted","Data":"63f8525468ce6f604876dfd165c9ad9323212e1d06ef08033a784d7275c08f61"} Oct 01 17:06:32 crc kubenswrapper[4869]: I1001 17:06:32.703063 4869 generic.go:334] "Generic (PLEG): container finished" podID="26dfe6ee-15f8-424e-b16a-58a57d5bc4f8" containerID="5f85bf629024a8ef122fc1e9369d39c02c290b936c3f52763ba7c028811f55a2" exitCode=0 Oct 01 17:06:32 crc kubenswrapper[4869]: I1001 17:06:32.703172 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/tempest-tests-tempest-s01-single-test" event={"ID":"26dfe6ee-15f8-424e-b16a-58a57d5bc4f8","Type":"ContainerDied","Data":"5f85bf629024a8ef122fc1e9369d39c02c290b936c3f52763ba7c028811f55a2"} Oct 01 17:06:34 crc kubenswrapper[4869]: I1001 17:06:34.269505 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/tempest-tests-tempest-s01-single-test" Oct 01 17:06:34 crc kubenswrapper[4869]: I1001 17:06:34.297034 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xtmq6\" (UniqueName: \"kubernetes.io/projected/26dfe6ee-15f8-424e-b16a-58a57d5bc4f8-kube-api-access-xtmq6\") pod \"26dfe6ee-15f8-424e-b16a-58a57d5bc4f8\" (UID: \"26dfe6ee-15f8-424e-b16a-58a57d5bc4f8\") " Oct 01 17:06:34 crc kubenswrapper[4869]: I1001 17:06:34.297108 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"test-operator-ephemeral-workdir\" (UniqueName: \"kubernetes.io/empty-dir/26dfe6ee-15f8-424e-b16a-58a57d5bc4f8-test-operator-ephemeral-workdir\") pod \"26dfe6ee-15f8-424e-b16a-58a57d5bc4f8\" (UID: \"26dfe6ee-15f8-424e-b16a-58a57d5bc4f8\") " Oct 01 17:06:34 crc kubenswrapper[4869]: I1001 17:06:34.297161 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/26dfe6ee-15f8-424e-b16a-58a57d5bc4f8-openstack-config\") pod \"26dfe6ee-15f8-424e-b16a-58a57d5bc4f8\" (UID: \"26dfe6ee-15f8-424e-b16a-58a57d5bc4f8\") " Oct 01 17:06:34 crc kubenswrapper[4869]: I1001 17:06:34.297206 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"test-operator-ephemeral-temporary\" (UniqueName: \"kubernetes.io/empty-dir/26dfe6ee-15f8-424e-b16a-58a57d5bc4f8-test-operator-ephemeral-temporary\") pod \"26dfe6ee-15f8-424e-b16a-58a57d5bc4f8\" (UID: \"26dfe6ee-15f8-424e-b16a-58a57d5bc4f8\") " Oct 01 17:06:34 crc kubenswrapper[4869]: I1001 17:06:34.297455 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/26dfe6ee-15f8-424e-b16a-58a57d5bc4f8-config-data\") pod \"26dfe6ee-15f8-424e-b16a-58a57d5bc4f8\" (UID: \"26dfe6ee-15f8-424e-b16a-58a57d5bc4f8\") " Oct 01 17:06:34 crc kubenswrapper[4869]: I1001 17:06:34.297503 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"test-operator-logs\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"26dfe6ee-15f8-424e-b16a-58a57d5bc4f8\" (UID: \"26dfe6ee-15f8-424e-b16a-58a57d5bc4f8\") " Oct 01 17:06:34 crc kubenswrapper[4869]: I1001 17:06:34.297665 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/26dfe6ee-15f8-424e-b16a-58a57d5bc4f8-openstack-config-secret\") pod \"26dfe6ee-15f8-424e-b16a-58a57d5bc4f8\" (UID: \"26dfe6ee-15f8-424e-b16a-58a57d5bc4f8\") " Oct 01 17:06:34 crc kubenswrapper[4869]: I1001 17:06:34.297709 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/26dfe6ee-15f8-424e-b16a-58a57d5bc4f8-ceph\") pod \"26dfe6ee-15f8-424e-b16a-58a57d5bc4f8\" (UID: \"26dfe6ee-15f8-424e-b16a-58a57d5bc4f8\") " Oct 01 17:06:34 crc kubenswrapper[4869]: I1001 17:06:34.297770 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ca-certs\" (UniqueName: \"kubernetes.io/secret/26dfe6ee-15f8-424e-b16a-58a57d5bc4f8-ca-certs\") pod \"26dfe6ee-15f8-424e-b16a-58a57d5bc4f8\" (UID: \"26dfe6ee-15f8-424e-b16a-58a57d5bc4f8\") " Oct 01 17:06:34 crc kubenswrapper[4869]: I1001 17:06:34.297792 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/26dfe6ee-15f8-424e-b16a-58a57d5bc4f8-ssh-key\") pod \"26dfe6ee-15f8-424e-b16a-58a57d5bc4f8\" (UID: \"26dfe6ee-15f8-424e-b16a-58a57d5bc4f8\") " Oct 01 17:06:34 crc kubenswrapper[4869]: I1001 17:06:34.298829 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/26dfe6ee-15f8-424e-b16a-58a57d5bc4f8-test-operator-ephemeral-temporary" (OuterVolumeSpecName: "test-operator-ephemeral-temporary") pod "26dfe6ee-15f8-424e-b16a-58a57d5bc4f8" (UID: "26dfe6ee-15f8-424e-b16a-58a57d5bc4f8"). InnerVolumeSpecName "test-operator-ephemeral-temporary". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 17:06:34 crc kubenswrapper[4869]: I1001 17:06:34.299569 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/26dfe6ee-15f8-424e-b16a-58a57d5bc4f8-config-data" (OuterVolumeSpecName: "config-data") pod "26dfe6ee-15f8-424e-b16a-58a57d5bc4f8" (UID: "26dfe6ee-15f8-424e-b16a-58a57d5bc4f8"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 17:06:34 crc kubenswrapper[4869]: I1001 17:06:34.299754 4869 reconciler_common.go:293] "Volume detached for volume \"test-operator-ephemeral-temporary\" (UniqueName: \"kubernetes.io/empty-dir/26dfe6ee-15f8-424e-b16a-58a57d5bc4f8-test-operator-ephemeral-temporary\") on node \"crc\" DevicePath \"\"" Oct 01 17:06:34 crc kubenswrapper[4869]: I1001 17:06:34.307142 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/26dfe6ee-15f8-424e-b16a-58a57d5bc4f8-test-operator-ephemeral-workdir" (OuterVolumeSpecName: "test-operator-ephemeral-workdir") pod "26dfe6ee-15f8-424e-b16a-58a57d5bc4f8" (UID: "26dfe6ee-15f8-424e-b16a-58a57d5bc4f8"). InnerVolumeSpecName "test-operator-ephemeral-workdir". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 17:06:34 crc kubenswrapper[4869]: I1001 17:06:34.309636 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/26dfe6ee-15f8-424e-b16a-58a57d5bc4f8-kube-api-access-xtmq6" (OuterVolumeSpecName: "kube-api-access-xtmq6") pod "26dfe6ee-15f8-424e-b16a-58a57d5bc4f8" (UID: "26dfe6ee-15f8-424e-b16a-58a57d5bc4f8"). InnerVolumeSpecName "kube-api-access-xtmq6". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 17:06:34 crc kubenswrapper[4869]: I1001 17:06:34.320179 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage08-crc" (OuterVolumeSpecName: "test-operator-logs") pod "26dfe6ee-15f8-424e-b16a-58a57d5bc4f8" (UID: "26dfe6ee-15f8-424e-b16a-58a57d5bc4f8"). InnerVolumeSpecName "local-storage08-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Oct 01 17:06:34 crc kubenswrapper[4869]: I1001 17:06:34.328718 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/26dfe6ee-15f8-424e-b16a-58a57d5bc4f8-ceph" (OuterVolumeSpecName: "ceph") pod "26dfe6ee-15f8-424e-b16a-58a57d5bc4f8" (UID: "26dfe6ee-15f8-424e-b16a-58a57d5bc4f8"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 17:06:34 crc kubenswrapper[4869]: I1001 17:06:34.350165 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/26dfe6ee-15f8-424e-b16a-58a57d5bc4f8-ca-certs" (OuterVolumeSpecName: "ca-certs") pod "26dfe6ee-15f8-424e-b16a-58a57d5bc4f8" (UID: "26dfe6ee-15f8-424e-b16a-58a57d5bc4f8"). InnerVolumeSpecName "ca-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 17:06:34 crc kubenswrapper[4869]: I1001 17:06:34.352502 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/26dfe6ee-15f8-424e-b16a-58a57d5bc4f8-openstack-config-secret" (OuterVolumeSpecName: "openstack-config-secret") pod "26dfe6ee-15f8-424e-b16a-58a57d5bc4f8" (UID: "26dfe6ee-15f8-424e-b16a-58a57d5bc4f8"). InnerVolumeSpecName "openstack-config-secret". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 17:06:34 crc kubenswrapper[4869]: I1001 17:06:34.364535 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/26dfe6ee-15f8-424e-b16a-58a57d5bc4f8-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "26dfe6ee-15f8-424e-b16a-58a57d5bc4f8" (UID: "26dfe6ee-15f8-424e-b16a-58a57d5bc4f8"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 17:06:34 crc kubenswrapper[4869]: I1001 17:06:34.384158 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/26dfe6ee-15f8-424e-b16a-58a57d5bc4f8-openstack-config" (OuterVolumeSpecName: "openstack-config") pod "26dfe6ee-15f8-424e-b16a-58a57d5bc4f8" (UID: "26dfe6ee-15f8-424e-b16a-58a57d5bc4f8"). InnerVolumeSpecName "openstack-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 17:06:34 crc kubenswrapper[4869]: I1001 17:06:34.401306 4869 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/26dfe6ee-15f8-424e-b16a-58a57d5bc4f8-config-data\") on node \"crc\" DevicePath \"\"" Oct 01 17:06:34 crc kubenswrapper[4869]: I1001 17:06:34.401358 4869 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") on node \"crc\" " Oct 01 17:06:34 crc kubenswrapper[4869]: I1001 17:06:34.401369 4869 reconciler_common.go:293] "Volume detached for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/26dfe6ee-15f8-424e-b16a-58a57d5bc4f8-openstack-config-secret\") on node \"crc\" DevicePath \"\"" Oct 01 17:06:34 crc kubenswrapper[4869]: I1001 17:06:34.401380 4869 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/26dfe6ee-15f8-424e-b16a-58a57d5bc4f8-ceph\") on node \"crc\" DevicePath \"\"" Oct 01 17:06:34 crc kubenswrapper[4869]: I1001 17:06:34.401388 4869 reconciler_common.go:293] "Volume detached for volume \"ca-certs\" (UniqueName: \"kubernetes.io/secret/26dfe6ee-15f8-424e-b16a-58a57d5bc4f8-ca-certs\") on node \"crc\" DevicePath \"\"" Oct 01 17:06:34 crc kubenswrapper[4869]: I1001 17:06:34.401396 4869 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/26dfe6ee-15f8-424e-b16a-58a57d5bc4f8-ssh-key\") on node \"crc\" DevicePath \"\"" Oct 01 17:06:34 crc kubenswrapper[4869]: I1001 17:06:34.401405 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xtmq6\" (UniqueName: \"kubernetes.io/projected/26dfe6ee-15f8-424e-b16a-58a57d5bc4f8-kube-api-access-xtmq6\") on node \"crc\" DevicePath \"\"" Oct 01 17:06:34 crc kubenswrapper[4869]: I1001 17:06:34.401414 4869 reconciler_common.go:293] "Volume detached for volume \"test-operator-ephemeral-workdir\" (UniqueName: \"kubernetes.io/empty-dir/26dfe6ee-15f8-424e-b16a-58a57d5bc4f8-test-operator-ephemeral-workdir\") on node \"crc\" DevicePath \"\"" Oct 01 17:06:34 crc kubenswrapper[4869]: I1001 17:06:34.401422 4869 reconciler_common.go:293] "Volume detached for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/26dfe6ee-15f8-424e-b16a-58a57d5bc4f8-openstack-config\") on node \"crc\" DevicePath \"\"" Oct 01 17:06:34 crc kubenswrapper[4869]: I1001 17:06:34.422803 4869 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage08-crc" (UniqueName: "kubernetes.io/local-volume/local-storage08-crc") on node "crc" Oct 01 17:06:34 crc kubenswrapper[4869]: I1001 17:06:34.502999 4869 reconciler_common.go:293] "Volume detached for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") on node \"crc\" DevicePath \"\"" Oct 01 17:06:34 crc kubenswrapper[4869]: I1001 17:06:34.727730 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/tempest-tests-tempest-s01-single-test" event={"ID":"26dfe6ee-15f8-424e-b16a-58a57d5bc4f8","Type":"ContainerDied","Data":"efb7598238f207a387b1aa777204526ce69ed87496891675e347e18a0df3c7a4"} Oct 01 17:06:34 crc kubenswrapper[4869]: I1001 17:06:34.727817 4869 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="efb7598238f207a387b1aa777204526ce69ed87496891675e347e18a0df3c7a4" Oct 01 17:06:34 crc kubenswrapper[4869]: I1001 17:06:34.727938 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/tempest-tests-tempest-s01-single-test" Oct 01 17:06:40 crc kubenswrapper[4869]: I1001 17:06:40.616446 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/test-operator-logs-pod-tempest-tempest-tests-tempest"] Oct 01 17:06:40 crc kubenswrapper[4869]: E1001 17:06:40.617467 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a6460e76-484b-4edf-8dbd-b095bf6a72e0" containerName="extract-content" Oct 01 17:06:40 crc kubenswrapper[4869]: I1001 17:06:40.617483 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="a6460e76-484b-4edf-8dbd-b095bf6a72e0" containerName="extract-content" Oct 01 17:06:40 crc kubenswrapper[4869]: E1001 17:06:40.617503 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a6460e76-484b-4edf-8dbd-b095bf6a72e0" containerName="extract-utilities" Oct 01 17:06:40 crc kubenswrapper[4869]: I1001 17:06:40.617510 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="a6460e76-484b-4edf-8dbd-b095bf6a72e0" containerName="extract-utilities" Oct 01 17:06:40 crc kubenswrapper[4869]: E1001 17:06:40.617537 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a6460e76-484b-4edf-8dbd-b095bf6a72e0" containerName="registry-server" Oct 01 17:06:40 crc kubenswrapper[4869]: I1001 17:06:40.617544 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="a6460e76-484b-4edf-8dbd-b095bf6a72e0" containerName="registry-server" Oct 01 17:06:40 crc kubenswrapper[4869]: E1001 17:06:40.617559 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="26dfe6ee-15f8-424e-b16a-58a57d5bc4f8" containerName="tempest-tests-tempest-tests-runner" Oct 01 17:06:40 crc kubenswrapper[4869]: I1001 17:06:40.617565 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="26dfe6ee-15f8-424e-b16a-58a57d5bc4f8" containerName="tempest-tests-tempest-tests-runner" Oct 01 17:06:40 crc kubenswrapper[4869]: I1001 17:06:40.617740 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="26dfe6ee-15f8-424e-b16a-58a57d5bc4f8" containerName="tempest-tests-tempest-tests-runner" Oct 01 17:06:40 crc kubenswrapper[4869]: I1001 17:06:40.617757 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="a6460e76-484b-4edf-8dbd-b095bf6a72e0" containerName="registry-server" Oct 01 17:06:40 crc kubenswrapper[4869]: I1001 17:06:40.618453 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Oct 01 17:06:40 crc kubenswrapper[4869]: I1001 17:06:40.620072 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"default-dockercfg-rnrsn" Oct 01 17:06:40 crc kubenswrapper[4869]: I1001 17:06:40.631666 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/test-operator-logs-pod-tempest-tempest-tests-tempest"] Oct 01 17:06:40 crc kubenswrapper[4869]: I1001 17:06:40.737670 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9fj8f\" (UniqueName: \"kubernetes.io/projected/105ad49f-b7e5-40d7-a021-7eef559250ea-kube-api-access-9fj8f\") pod \"test-operator-logs-pod-tempest-tempest-tests-tempest\" (UID: \"105ad49f-b7e5-40d7-a021-7eef559250ea\") " pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Oct 01 17:06:40 crc kubenswrapper[4869]: I1001 17:06:40.737932 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"test-operator-logs-pod-tempest-tempest-tests-tempest\" (UID: \"105ad49f-b7e5-40d7-a021-7eef559250ea\") " pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Oct 01 17:06:40 crc kubenswrapper[4869]: I1001 17:06:40.840314 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"test-operator-logs-pod-tempest-tempest-tests-tempest\" (UID: \"105ad49f-b7e5-40d7-a021-7eef559250ea\") " pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Oct 01 17:06:40 crc kubenswrapper[4869]: I1001 17:06:40.840416 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9fj8f\" (UniqueName: \"kubernetes.io/projected/105ad49f-b7e5-40d7-a021-7eef559250ea-kube-api-access-9fj8f\") pod \"test-operator-logs-pod-tempest-tempest-tests-tempest\" (UID: \"105ad49f-b7e5-40d7-a021-7eef559250ea\") " pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Oct 01 17:06:40 crc kubenswrapper[4869]: I1001 17:06:40.840829 4869 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"test-operator-logs-pod-tempest-tempest-tests-tempest\" (UID: \"105ad49f-b7e5-40d7-a021-7eef559250ea\") device mount path \"/mnt/openstack/pv08\"" pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Oct 01 17:06:40 crc kubenswrapper[4869]: I1001 17:06:40.862219 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9fj8f\" (UniqueName: \"kubernetes.io/projected/105ad49f-b7e5-40d7-a021-7eef559250ea-kube-api-access-9fj8f\") pod \"test-operator-logs-pod-tempest-tempest-tests-tempest\" (UID: \"105ad49f-b7e5-40d7-a021-7eef559250ea\") " pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Oct 01 17:06:40 crc kubenswrapper[4869]: I1001 17:06:40.875840 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"test-operator-logs-pod-tempest-tempest-tests-tempest\" (UID: \"105ad49f-b7e5-40d7-a021-7eef559250ea\") " pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Oct 01 17:06:40 crc kubenswrapper[4869]: I1001 17:06:40.951944 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Oct 01 17:06:41 crc kubenswrapper[4869]: I1001 17:06:41.426718 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/test-operator-logs-pod-tempest-tempest-tests-tempest"] Oct 01 17:06:41 crc kubenswrapper[4869]: I1001 17:06:41.440575 4869 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Oct 01 17:06:41 crc kubenswrapper[4869]: I1001 17:06:41.805514 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" event={"ID":"105ad49f-b7e5-40d7-a021-7eef559250ea","Type":"ContainerStarted","Data":"65bc7595f07d1b1e56fcde6c4e2882b08edf19e64e3d857b3cdfc7f8081104ed"} Oct 01 17:06:42 crc kubenswrapper[4869]: I1001 17:06:42.819147 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" event={"ID":"105ad49f-b7e5-40d7-a021-7eef559250ea","Type":"ContainerStarted","Data":"308f5afc790046ee58121f13d460224bd94d37e5350d4f26a40ec7d6d6f265c2"} Oct 01 17:06:42 crc kubenswrapper[4869]: I1001 17:06:42.836278 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" podStartSLOduration=1.83410724 podStartE2EDuration="2.836238822s" podCreationTimestamp="2025-10-01 17:06:40 +0000 UTC" firstStartedPulling="2025-10-01 17:06:41.440190493 +0000 UTC m=+7310.587033609" lastFinishedPulling="2025-10-01 17:06:42.442322085 +0000 UTC m=+7311.589165191" observedRunningTime="2025-10-01 17:06:42.831325808 +0000 UTC m=+7311.978168974" watchObservedRunningTime="2025-10-01 17:06:42.836238822 +0000 UTC m=+7311.983081958" Oct 01 17:07:05 crc kubenswrapper[4869]: I1001 17:07:05.279095 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/tobiko-tests-tobiko-s00-podified-functional"] Oct 01 17:07:05 crc kubenswrapper[4869]: I1001 17:07:05.282086 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/tobiko-tests-tobiko-s00-podified-functional" Oct 01 17:07:05 crc kubenswrapper[4869]: I1001 17:07:05.284774 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"tobiko-tests-tobikotobiko-public-key" Oct 01 17:07:05 crc kubenswrapper[4869]: I1001 17:07:05.285404 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"tobiko-secret" Oct 01 17:07:05 crc kubenswrapper[4869]: I1001 17:07:05.285405 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"tobiko-tests-tobikotobiko-config" Oct 01 17:07:05 crc kubenswrapper[4869]: I1001 17:07:05.287544 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"test-operator-clouds-config" Oct 01 17:07:05 crc kubenswrapper[4869]: I1001 17:07:05.287637 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"tobiko-tests-tobikotobiko-private-key" Oct 01 17:07:05 crc kubenswrapper[4869]: I1001 17:07:05.289683 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/tobiko-tests-tobiko-s00-podified-functional"] Oct 01 17:07:05 crc kubenswrapper[4869]: I1001 17:07:05.400373 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-p4f4s\" (UniqueName: \"kubernetes.io/projected/a1a92aa0-ac91-4391-949a-fd2bcfa3e714-kube-api-access-p4f4s\") pod \"tobiko-tests-tobiko-s00-podified-functional\" (UID: \"a1a92aa0-ac91-4391-949a-fd2bcfa3e714\") " pod="openstack/tobiko-tests-tobiko-s00-podified-functional" Oct 01 17:07:05 crc kubenswrapper[4869]: I1001 17:07:05.400426 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ca-certs\" (UniqueName: \"kubernetes.io/secret/a1a92aa0-ac91-4391-949a-fd2bcfa3e714-ca-certs\") pod \"tobiko-tests-tobiko-s00-podified-functional\" (UID: \"a1a92aa0-ac91-4391-949a-fd2bcfa3e714\") " pod="openstack/tobiko-tests-tobiko-s00-podified-functional" Oct 01 17:07:05 crc kubenswrapper[4869]: I1001 17:07:05.400471 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"test-operator-clouds-config\" (UniqueName: \"kubernetes.io/configmap/a1a92aa0-ac91-4391-949a-fd2bcfa3e714-test-operator-clouds-config\") pod \"tobiko-tests-tobiko-s00-podified-functional\" (UID: \"a1a92aa0-ac91-4391-949a-fd2bcfa3e714\") " pod="openstack/tobiko-tests-tobiko-s00-podified-functional" Oct 01 17:07:05 crc kubenswrapper[4869]: I1001 17:07:05.400549 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/a1a92aa0-ac91-4391-949a-fd2bcfa3e714-ceph\") pod \"tobiko-tests-tobiko-s00-podified-functional\" (UID: \"a1a92aa0-ac91-4391-949a-fd2bcfa3e714\") " pod="openstack/tobiko-tests-tobiko-s00-podified-functional" Oct 01 17:07:05 crc kubenswrapper[4869]: I1001 17:07:05.400614 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"tobiko-tests-tobiko-s00-podified-functional\" (UID: \"a1a92aa0-ac91-4391-949a-fd2bcfa3e714\") " pod="openstack/tobiko-tests-tobiko-s00-podified-functional" Oct 01 17:07:05 crc kubenswrapper[4869]: I1001 17:07:05.400657 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"test-operator-ephemeral-temporary\" (UniqueName: \"kubernetes.io/empty-dir/a1a92aa0-ac91-4391-949a-fd2bcfa3e714-test-operator-ephemeral-temporary\") pod \"tobiko-tests-tobiko-s00-podified-functional\" (UID: \"a1a92aa0-ac91-4391-949a-fd2bcfa3e714\") " pod="openstack/tobiko-tests-tobiko-s00-podified-functional" Oct 01 17:07:05 crc kubenswrapper[4869]: I1001 17:07:05.400692 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tobiko-private-key\" (UniqueName: \"kubernetes.io/configmap/a1a92aa0-ac91-4391-949a-fd2bcfa3e714-tobiko-private-key\") pod \"tobiko-tests-tobiko-s00-podified-functional\" (UID: \"a1a92aa0-ac91-4391-949a-fd2bcfa3e714\") " pod="openstack/tobiko-tests-tobiko-s00-podified-functional" Oct 01 17:07:05 crc kubenswrapper[4869]: I1001 17:07:05.400722 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"test-operator-ephemeral-workdir\" (UniqueName: \"kubernetes.io/empty-dir/a1a92aa0-ac91-4391-949a-fd2bcfa3e714-test-operator-ephemeral-workdir\") pod \"tobiko-tests-tobiko-s00-podified-functional\" (UID: \"a1a92aa0-ac91-4391-949a-fd2bcfa3e714\") " pod="openstack/tobiko-tests-tobiko-s00-podified-functional" Oct 01 17:07:05 crc kubenswrapper[4869]: I1001 17:07:05.401177 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/a1a92aa0-ac91-4391-949a-fd2bcfa3e714-openstack-config-secret\") pod \"tobiko-tests-tobiko-s00-podified-functional\" (UID: \"a1a92aa0-ac91-4391-949a-fd2bcfa3e714\") " pod="openstack/tobiko-tests-tobiko-s00-podified-functional" Oct 01 17:07:05 crc kubenswrapper[4869]: I1001 17:07:05.401252 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tobiko-public-key\" (UniqueName: \"kubernetes.io/configmap/a1a92aa0-ac91-4391-949a-fd2bcfa3e714-tobiko-public-key\") pod \"tobiko-tests-tobiko-s00-podified-functional\" (UID: \"a1a92aa0-ac91-4391-949a-fd2bcfa3e714\") " pod="openstack/tobiko-tests-tobiko-s00-podified-functional" Oct 01 17:07:05 crc kubenswrapper[4869]: I1001 17:07:05.401451 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tobiko-config\" (UniqueName: \"kubernetes.io/configmap/a1a92aa0-ac91-4391-949a-fd2bcfa3e714-tobiko-config\") pod \"tobiko-tests-tobiko-s00-podified-functional\" (UID: \"a1a92aa0-ac91-4391-949a-fd2bcfa3e714\") " pod="openstack/tobiko-tests-tobiko-s00-podified-functional" Oct 01 17:07:05 crc kubenswrapper[4869]: I1001 17:07:05.401549 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kubeconfig\" (UniqueName: \"kubernetes.io/secret/a1a92aa0-ac91-4391-949a-fd2bcfa3e714-kubeconfig\") pod \"tobiko-tests-tobiko-s00-podified-functional\" (UID: \"a1a92aa0-ac91-4391-949a-fd2bcfa3e714\") " pod="openstack/tobiko-tests-tobiko-s00-podified-functional" Oct 01 17:07:05 crc kubenswrapper[4869]: I1001 17:07:05.503378 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/a1a92aa0-ac91-4391-949a-fd2bcfa3e714-openstack-config-secret\") pod \"tobiko-tests-tobiko-s00-podified-functional\" (UID: \"a1a92aa0-ac91-4391-949a-fd2bcfa3e714\") " pod="openstack/tobiko-tests-tobiko-s00-podified-functional" Oct 01 17:07:05 crc kubenswrapper[4869]: I1001 17:07:05.503471 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tobiko-public-key\" (UniqueName: \"kubernetes.io/configmap/a1a92aa0-ac91-4391-949a-fd2bcfa3e714-tobiko-public-key\") pod \"tobiko-tests-tobiko-s00-podified-functional\" (UID: \"a1a92aa0-ac91-4391-949a-fd2bcfa3e714\") " pod="openstack/tobiko-tests-tobiko-s00-podified-functional" Oct 01 17:07:05 crc kubenswrapper[4869]: I1001 17:07:05.503571 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tobiko-config\" (UniqueName: \"kubernetes.io/configmap/a1a92aa0-ac91-4391-949a-fd2bcfa3e714-tobiko-config\") pod \"tobiko-tests-tobiko-s00-podified-functional\" (UID: \"a1a92aa0-ac91-4391-949a-fd2bcfa3e714\") " pod="openstack/tobiko-tests-tobiko-s00-podified-functional" Oct 01 17:07:05 crc kubenswrapper[4869]: I1001 17:07:05.503703 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kubeconfig\" (UniqueName: \"kubernetes.io/secret/a1a92aa0-ac91-4391-949a-fd2bcfa3e714-kubeconfig\") pod \"tobiko-tests-tobiko-s00-podified-functional\" (UID: \"a1a92aa0-ac91-4391-949a-fd2bcfa3e714\") " pod="openstack/tobiko-tests-tobiko-s00-podified-functional" Oct 01 17:07:05 crc kubenswrapper[4869]: I1001 17:07:05.504478 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-p4f4s\" (UniqueName: \"kubernetes.io/projected/a1a92aa0-ac91-4391-949a-fd2bcfa3e714-kube-api-access-p4f4s\") pod \"tobiko-tests-tobiko-s00-podified-functional\" (UID: \"a1a92aa0-ac91-4391-949a-fd2bcfa3e714\") " pod="openstack/tobiko-tests-tobiko-s00-podified-functional" Oct 01 17:07:05 crc kubenswrapper[4869]: I1001 17:07:05.504786 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ca-certs\" (UniqueName: \"kubernetes.io/secret/a1a92aa0-ac91-4391-949a-fd2bcfa3e714-ca-certs\") pod \"tobiko-tests-tobiko-s00-podified-functional\" (UID: \"a1a92aa0-ac91-4391-949a-fd2bcfa3e714\") " pod="openstack/tobiko-tests-tobiko-s00-podified-functional" Oct 01 17:07:05 crc kubenswrapper[4869]: I1001 17:07:05.505062 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"test-operator-clouds-config\" (UniqueName: \"kubernetes.io/configmap/a1a92aa0-ac91-4391-949a-fd2bcfa3e714-test-operator-clouds-config\") pod \"tobiko-tests-tobiko-s00-podified-functional\" (UID: \"a1a92aa0-ac91-4391-949a-fd2bcfa3e714\") " pod="openstack/tobiko-tests-tobiko-s00-podified-functional" Oct 01 17:07:05 crc kubenswrapper[4869]: I1001 17:07:05.505441 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/a1a92aa0-ac91-4391-949a-fd2bcfa3e714-ceph\") pod \"tobiko-tests-tobiko-s00-podified-functional\" (UID: \"a1a92aa0-ac91-4391-949a-fd2bcfa3e714\") " pod="openstack/tobiko-tests-tobiko-s00-podified-functional" Oct 01 17:07:05 crc kubenswrapper[4869]: I1001 17:07:05.505222 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tobiko-config\" (UniqueName: \"kubernetes.io/configmap/a1a92aa0-ac91-4391-949a-fd2bcfa3e714-tobiko-config\") pod \"tobiko-tests-tobiko-s00-podified-functional\" (UID: \"a1a92aa0-ac91-4391-949a-fd2bcfa3e714\") " pod="openstack/tobiko-tests-tobiko-s00-podified-functional" Oct 01 17:07:05 crc kubenswrapper[4869]: I1001 17:07:05.505164 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tobiko-public-key\" (UniqueName: \"kubernetes.io/configmap/a1a92aa0-ac91-4391-949a-fd2bcfa3e714-tobiko-public-key\") pod \"tobiko-tests-tobiko-s00-podified-functional\" (UID: \"a1a92aa0-ac91-4391-949a-fd2bcfa3e714\") " pod="openstack/tobiko-tests-tobiko-s00-podified-functional" Oct 01 17:07:05 crc kubenswrapper[4869]: I1001 17:07:05.505769 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"tobiko-tests-tobiko-s00-podified-functional\" (UID: \"a1a92aa0-ac91-4391-949a-fd2bcfa3e714\") " pod="openstack/tobiko-tests-tobiko-s00-podified-functional" Oct 01 17:07:05 crc kubenswrapper[4869]: I1001 17:07:05.506354 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"test-operator-ephemeral-temporary\" (UniqueName: \"kubernetes.io/empty-dir/a1a92aa0-ac91-4391-949a-fd2bcfa3e714-test-operator-ephemeral-temporary\") pod \"tobiko-tests-tobiko-s00-podified-functional\" (UID: \"a1a92aa0-ac91-4391-949a-fd2bcfa3e714\") " pod="openstack/tobiko-tests-tobiko-s00-podified-functional" Oct 01 17:07:05 crc kubenswrapper[4869]: I1001 17:07:05.506666 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tobiko-private-key\" (UniqueName: \"kubernetes.io/configmap/a1a92aa0-ac91-4391-949a-fd2bcfa3e714-tobiko-private-key\") pod \"tobiko-tests-tobiko-s00-podified-functional\" (UID: \"a1a92aa0-ac91-4391-949a-fd2bcfa3e714\") " pod="openstack/tobiko-tests-tobiko-s00-podified-functional" Oct 01 17:07:05 crc kubenswrapper[4869]: I1001 17:07:05.506894 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"test-operator-ephemeral-workdir\" (UniqueName: \"kubernetes.io/empty-dir/a1a92aa0-ac91-4391-949a-fd2bcfa3e714-test-operator-ephemeral-workdir\") pod \"tobiko-tests-tobiko-s00-podified-functional\" (UID: \"a1a92aa0-ac91-4391-949a-fd2bcfa3e714\") " pod="openstack/tobiko-tests-tobiko-s00-podified-functional" Oct 01 17:07:05 crc kubenswrapper[4869]: I1001 17:07:05.508227 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"test-operator-ephemeral-workdir\" (UniqueName: \"kubernetes.io/empty-dir/a1a92aa0-ac91-4391-949a-fd2bcfa3e714-test-operator-ephemeral-workdir\") pod \"tobiko-tests-tobiko-s00-podified-functional\" (UID: \"a1a92aa0-ac91-4391-949a-fd2bcfa3e714\") " pod="openstack/tobiko-tests-tobiko-s00-podified-functional" Oct 01 17:07:05 crc kubenswrapper[4869]: I1001 17:07:05.508330 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"test-operator-ephemeral-temporary\" (UniqueName: \"kubernetes.io/empty-dir/a1a92aa0-ac91-4391-949a-fd2bcfa3e714-test-operator-ephemeral-temporary\") pod \"tobiko-tests-tobiko-s00-podified-functional\" (UID: \"a1a92aa0-ac91-4391-949a-fd2bcfa3e714\") " pod="openstack/tobiko-tests-tobiko-s00-podified-functional" Oct 01 17:07:05 crc kubenswrapper[4869]: I1001 17:07:05.508885 4869 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"tobiko-tests-tobiko-s00-podified-functional\" (UID: \"a1a92aa0-ac91-4391-949a-fd2bcfa3e714\") device mount path \"/mnt/openstack/pv07\"" pod="openstack/tobiko-tests-tobiko-s00-podified-functional" Oct 01 17:07:05 crc kubenswrapper[4869]: I1001 17:07:05.511596 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kubeconfig\" (UniqueName: \"kubernetes.io/secret/a1a92aa0-ac91-4391-949a-fd2bcfa3e714-kubeconfig\") pod \"tobiko-tests-tobiko-s00-podified-functional\" (UID: \"a1a92aa0-ac91-4391-949a-fd2bcfa3e714\") " pod="openstack/tobiko-tests-tobiko-s00-podified-functional" Oct 01 17:07:05 crc kubenswrapper[4869]: I1001 17:07:05.513966 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/a1a92aa0-ac91-4391-949a-fd2bcfa3e714-ceph\") pod \"tobiko-tests-tobiko-s00-podified-functional\" (UID: \"a1a92aa0-ac91-4391-949a-fd2bcfa3e714\") " pod="openstack/tobiko-tests-tobiko-s00-podified-functional" Oct 01 17:07:05 crc kubenswrapper[4869]: I1001 17:07:05.514547 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ca-certs\" (UniqueName: \"kubernetes.io/secret/a1a92aa0-ac91-4391-949a-fd2bcfa3e714-ca-certs\") pod \"tobiko-tests-tobiko-s00-podified-functional\" (UID: \"a1a92aa0-ac91-4391-949a-fd2bcfa3e714\") " pod="openstack/tobiko-tests-tobiko-s00-podified-functional" Oct 01 17:07:05 crc kubenswrapper[4869]: I1001 17:07:05.515771 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tobiko-private-key\" (UniqueName: \"kubernetes.io/configmap/a1a92aa0-ac91-4391-949a-fd2bcfa3e714-tobiko-private-key\") pod \"tobiko-tests-tobiko-s00-podified-functional\" (UID: \"a1a92aa0-ac91-4391-949a-fd2bcfa3e714\") " pod="openstack/tobiko-tests-tobiko-s00-podified-functional" Oct 01 17:07:05 crc kubenswrapper[4869]: I1001 17:07:05.516874 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/a1a92aa0-ac91-4391-949a-fd2bcfa3e714-openstack-config-secret\") pod \"tobiko-tests-tobiko-s00-podified-functional\" (UID: \"a1a92aa0-ac91-4391-949a-fd2bcfa3e714\") " pod="openstack/tobiko-tests-tobiko-s00-podified-functional" Oct 01 17:07:05 crc kubenswrapper[4869]: I1001 17:07:05.520558 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"test-operator-clouds-config\" (UniqueName: \"kubernetes.io/configmap/a1a92aa0-ac91-4391-949a-fd2bcfa3e714-test-operator-clouds-config\") pod \"tobiko-tests-tobiko-s00-podified-functional\" (UID: \"a1a92aa0-ac91-4391-949a-fd2bcfa3e714\") " pod="openstack/tobiko-tests-tobiko-s00-podified-functional" Oct 01 17:07:05 crc kubenswrapper[4869]: I1001 17:07:05.541423 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"tobiko-tests-tobiko-s00-podified-functional\" (UID: \"a1a92aa0-ac91-4391-949a-fd2bcfa3e714\") " pod="openstack/tobiko-tests-tobiko-s00-podified-functional" Oct 01 17:07:05 crc kubenswrapper[4869]: I1001 17:07:05.543689 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-p4f4s\" (UniqueName: \"kubernetes.io/projected/a1a92aa0-ac91-4391-949a-fd2bcfa3e714-kube-api-access-p4f4s\") pod \"tobiko-tests-tobiko-s00-podified-functional\" (UID: \"a1a92aa0-ac91-4391-949a-fd2bcfa3e714\") " pod="openstack/tobiko-tests-tobiko-s00-podified-functional" Oct 01 17:07:05 crc kubenswrapper[4869]: I1001 17:07:05.625844 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/tobiko-tests-tobiko-s00-podified-functional" Oct 01 17:07:06 crc kubenswrapper[4869]: I1001 17:07:06.237248 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/tobiko-tests-tobiko-s00-podified-functional"] Oct 01 17:07:06 crc kubenswrapper[4869]: W1001 17:07:06.244790 4869 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-poda1a92aa0_ac91_4391_949a_fd2bcfa3e714.slice/crio-0efb7b0063aaaa1ae863daf8f335c1c9211e5cb6ef396227bd89461a5de12a01 WatchSource:0}: Error finding container 0efb7b0063aaaa1ae863daf8f335c1c9211e5cb6ef396227bd89461a5de12a01: Status 404 returned error can't find the container with id 0efb7b0063aaaa1ae863daf8f335c1c9211e5cb6ef396227bd89461a5de12a01 Oct 01 17:07:07 crc kubenswrapper[4869]: I1001 17:07:07.121014 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/tobiko-tests-tobiko-s00-podified-functional" event={"ID":"a1a92aa0-ac91-4391-949a-fd2bcfa3e714","Type":"ContainerStarted","Data":"0efb7b0063aaaa1ae863daf8f335c1c9211e5cb6ef396227bd89461a5de12a01"} Oct 01 17:07:23 crc kubenswrapper[4869]: E1001 17:07:23.820508 4869 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-tobiko:current-podified" Oct 01 17:07:23 crc kubenswrapper[4869]: E1001 17:07:23.821589 4869 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:tobiko-tests-tobiko,Image:quay.io/podified-antelope-centos9/openstack-tobiko:current-podified,Command:[],Args:[],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:TOBIKO_DEBUG_MODE,Value:false,ValueFrom:nil,},EnvVar{Name:TOBIKO_KEYS_FOLDER,Value:/etc/test_operator,ValueFrom:nil,},EnvVar{Name:TOBIKO_LOGS_DIR_NAME,Value:tobiko-tests-tobiko-s00-podified-functional,ValueFrom:nil,},EnvVar{Name:TOBIKO_PYTEST_ADDOPTS,Value:,ValueFrom:nil,},EnvVar{Name:TOBIKO_TESTENV,Value:functional -- tobiko/tests/functional/podified/test_topology.py,ValueFrom:nil,},EnvVar{Name:TOBIKO_VERSION,Value:master,ValueFrom:nil,},EnvVar{Name:TOX_NUM_PROCESSES,Value:2,ValueFrom:nil,},EnvVar{Name:USE_EXTERNAL_FILES,Value:True,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{8 0} {} 8 DecimalSI},memory: {{8589934592 0} {} BinarySI},},Requests:ResourceList{cpu: {{4 0} {} 4 DecimalSI},memory: {{4294967296 0} {} 4Gi BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:test-operator-ephemeral-workdir,ReadOnly:false,MountPath:/var/lib/tobiko,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:test-operator-ephemeral-temporary,ReadOnly:false,MountPath:/tmp,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:test-operator-logs,ReadOnly:false,MountPath:/var/lib/tobiko/external_files,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:test-operator-clouds-config,ReadOnly:true,MountPath:/var/lib/tobiko/.config/openstack/clouds.yaml,SubPath:clouds.yaml,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:test-operator-clouds-config,ReadOnly:true,MountPath:/etc/openstack/clouds.yaml,SubPath:clouds.yaml,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:openstack-config-secret,ReadOnly:false,MountPath:/etc/openstack/secure.yaml,SubPath:secure.yaml,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:tobiko-config,ReadOnly:false,MountPath:/etc/tobiko/tobiko.conf,SubPath:tobiko.conf,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:ca-certs,ReadOnly:true,MountPath:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem,SubPath:tls-ca-bundle.pem,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:ca-certs,ReadOnly:true,MountPath:/etc/pki/tls/certs/ca-bundle.trust.crt,SubPath:tls-ca-bundle.pem,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:tobiko-private-key,ReadOnly:true,MountPath:/etc/test_operator/id_ecdsa,SubPath:id_ecdsa,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:tobiko-public-key,ReadOnly:true,MountPath:/etc/test_operator/id_ecdsa.pub,SubPath:id_ecdsa.pub,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kubeconfig,ReadOnly:true,MountPath:/var/lib/tobiko/.kube/config,SubPath:config,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:ceph,ReadOnly:true,MountPath:/etc/ceph,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-p4f4s,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[NET_ADMIN NET_RAW],Drop:[],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*42495,RunAsNonRoot:*false,ReadOnlyRootFilesystem:*false,AllowPrivilegeEscalation:*true,RunAsGroup:*42495,ProcMount:nil,WindowsOptions:nil,SeccompProfile:&SeccompProfile{Type:RuntimeDefault,LocalhostProfile:nil,},AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod tobiko-tests-tobiko-s00-podified-functional_openstack(a1a92aa0-ac91-4391-949a-fd2bcfa3e714): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Oct 01 17:07:23 crc kubenswrapper[4869]: E1001 17:07:23.824656 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"tobiko-tests-tobiko\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/tobiko-tests-tobiko-s00-podified-functional" podUID="a1a92aa0-ac91-4391-949a-fd2bcfa3e714" Oct 01 17:07:24 crc kubenswrapper[4869]: E1001 17:07:24.300704 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"tobiko-tests-tobiko\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-tobiko:current-podified\\\"\"" pod="openstack/tobiko-tests-tobiko-s00-podified-functional" podUID="a1a92aa0-ac91-4391-949a-fd2bcfa3e714" Oct 01 17:07:37 crc kubenswrapper[4869]: I1001 17:07:37.439076 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/tobiko-tests-tobiko-s00-podified-functional" event={"ID":"a1a92aa0-ac91-4391-949a-fd2bcfa3e714","Type":"ContainerStarted","Data":"fe1a0d7edb6855ad5ecd4f16e5fadce2028b45ec423a31d57dfc1246a2db468b"} Oct 01 17:07:37 crc kubenswrapper[4869]: I1001 17:07:37.479006 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/tobiko-tests-tobiko-s00-podified-functional" podStartSLOduration=3.3848752380000002 podStartE2EDuration="33.478982963s" podCreationTimestamp="2025-10-01 17:07:04 +0000 UTC" firstStartedPulling="2025-10-01 17:07:06.247310192 +0000 UTC m=+7335.394153308" lastFinishedPulling="2025-10-01 17:07:36.341417907 +0000 UTC m=+7365.488261033" observedRunningTime="2025-10-01 17:07:37.465909653 +0000 UTC m=+7366.612752789" watchObservedRunningTime="2025-10-01 17:07:37.478982963 +0000 UTC m=+7366.625826089" Oct 01 17:08:15 crc kubenswrapper[4869]: I1001 17:08:15.195578 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-fvcp4"] Oct 01 17:08:15 crc kubenswrapper[4869]: I1001 17:08:15.200470 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-fvcp4" Oct 01 17:08:15 crc kubenswrapper[4869]: I1001 17:08:15.216420 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-fvcp4"] Oct 01 17:08:15 crc kubenswrapper[4869]: I1001 17:08:15.297316 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-s4t4w\" (UniqueName: \"kubernetes.io/projected/55c5fb4b-2317-4f41-829f-4a48c35b9caa-kube-api-access-s4t4w\") pod \"redhat-operators-fvcp4\" (UID: \"55c5fb4b-2317-4f41-829f-4a48c35b9caa\") " pod="openshift-marketplace/redhat-operators-fvcp4" Oct 01 17:08:15 crc kubenswrapper[4869]: I1001 17:08:15.297432 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/55c5fb4b-2317-4f41-829f-4a48c35b9caa-utilities\") pod \"redhat-operators-fvcp4\" (UID: \"55c5fb4b-2317-4f41-829f-4a48c35b9caa\") " pod="openshift-marketplace/redhat-operators-fvcp4" Oct 01 17:08:15 crc kubenswrapper[4869]: I1001 17:08:15.297739 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/55c5fb4b-2317-4f41-829f-4a48c35b9caa-catalog-content\") pod \"redhat-operators-fvcp4\" (UID: \"55c5fb4b-2317-4f41-829f-4a48c35b9caa\") " pod="openshift-marketplace/redhat-operators-fvcp4" Oct 01 17:08:15 crc kubenswrapper[4869]: I1001 17:08:15.399514 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/55c5fb4b-2317-4f41-829f-4a48c35b9caa-utilities\") pod \"redhat-operators-fvcp4\" (UID: \"55c5fb4b-2317-4f41-829f-4a48c35b9caa\") " pod="openshift-marketplace/redhat-operators-fvcp4" Oct 01 17:08:15 crc kubenswrapper[4869]: I1001 17:08:15.399634 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/55c5fb4b-2317-4f41-829f-4a48c35b9caa-catalog-content\") pod \"redhat-operators-fvcp4\" (UID: \"55c5fb4b-2317-4f41-829f-4a48c35b9caa\") " pod="openshift-marketplace/redhat-operators-fvcp4" Oct 01 17:08:15 crc kubenswrapper[4869]: I1001 17:08:15.399720 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s4t4w\" (UniqueName: \"kubernetes.io/projected/55c5fb4b-2317-4f41-829f-4a48c35b9caa-kube-api-access-s4t4w\") pod \"redhat-operators-fvcp4\" (UID: \"55c5fb4b-2317-4f41-829f-4a48c35b9caa\") " pod="openshift-marketplace/redhat-operators-fvcp4" Oct 01 17:08:15 crc kubenswrapper[4869]: I1001 17:08:15.400083 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/55c5fb4b-2317-4f41-829f-4a48c35b9caa-utilities\") pod \"redhat-operators-fvcp4\" (UID: \"55c5fb4b-2317-4f41-829f-4a48c35b9caa\") " pod="openshift-marketplace/redhat-operators-fvcp4" Oct 01 17:08:15 crc kubenswrapper[4869]: I1001 17:08:15.400201 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/55c5fb4b-2317-4f41-829f-4a48c35b9caa-catalog-content\") pod \"redhat-operators-fvcp4\" (UID: \"55c5fb4b-2317-4f41-829f-4a48c35b9caa\") " pod="openshift-marketplace/redhat-operators-fvcp4" Oct 01 17:08:15 crc kubenswrapper[4869]: I1001 17:08:15.420244 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s4t4w\" (UniqueName: \"kubernetes.io/projected/55c5fb4b-2317-4f41-829f-4a48c35b9caa-kube-api-access-s4t4w\") pod \"redhat-operators-fvcp4\" (UID: \"55c5fb4b-2317-4f41-829f-4a48c35b9caa\") " pod="openshift-marketplace/redhat-operators-fvcp4" Oct 01 17:08:15 crc kubenswrapper[4869]: I1001 17:08:15.524416 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-fvcp4" Oct 01 17:08:16 crc kubenswrapper[4869]: I1001 17:08:16.044443 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-fvcp4"] Oct 01 17:08:16 crc kubenswrapper[4869]: I1001 17:08:16.906631 4869 generic.go:334] "Generic (PLEG): container finished" podID="55c5fb4b-2317-4f41-829f-4a48c35b9caa" containerID="e2011eb139e053d26ad69414557adca117f2bfb08091e3a358c30fb35067770d" exitCode=0 Oct 01 17:08:16 crc kubenswrapper[4869]: I1001 17:08:16.906749 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-fvcp4" event={"ID":"55c5fb4b-2317-4f41-829f-4a48c35b9caa","Type":"ContainerDied","Data":"e2011eb139e053d26ad69414557adca117f2bfb08091e3a358c30fb35067770d"} Oct 01 17:08:16 crc kubenswrapper[4869]: I1001 17:08:16.906988 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-fvcp4" event={"ID":"55c5fb4b-2317-4f41-829f-4a48c35b9caa","Type":"ContainerStarted","Data":"b4964df1b2736954e53a77ac14b2227bc8b7470702984e339924f43386f6feb8"} Oct 01 17:08:17 crc kubenswrapper[4869]: I1001 17:08:17.928136 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-fvcp4" event={"ID":"55c5fb4b-2317-4f41-829f-4a48c35b9caa","Type":"ContainerStarted","Data":"e7c8145a15579746502d145e67a5830844f61346d8d6796105ff3cf49daec095"} Oct 01 17:08:18 crc kubenswrapper[4869]: I1001 17:08:18.940613 4869 generic.go:334] "Generic (PLEG): container finished" podID="55c5fb4b-2317-4f41-829f-4a48c35b9caa" containerID="e7c8145a15579746502d145e67a5830844f61346d8d6796105ff3cf49daec095" exitCode=0 Oct 01 17:08:18 crc kubenswrapper[4869]: I1001 17:08:18.940975 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-fvcp4" event={"ID":"55c5fb4b-2317-4f41-829f-4a48c35b9caa","Type":"ContainerDied","Data":"e7c8145a15579746502d145e67a5830844f61346d8d6796105ff3cf49daec095"} Oct 01 17:08:20 crc kubenswrapper[4869]: I1001 17:08:20.968625 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-fvcp4" event={"ID":"55c5fb4b-2317-4f41-829f-4a48c35b9caa","Type":"ContainerStarted","Data":"cdd63245f334604ad097b546e7cb2eb0ada90a422653657a5887a7e9c8be66dd"} Oct 01 17:08:21 crc kubenswrapper[4869]: I1001 17:08:21.000392 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-fvcp4" podStartSLOduration=3.24492748 podStartE2EDuration="6.00036695s" podCreationTimestamp="2025-10-01 17:08:15 +0000 UTC" firstStartedPulling="2025-10-01 17:08:16.908848511 +0000 UTC m=+7406.055691647" lastFinishedPulling="2025-10-01 17:08:19.664287961 +0000 UTC m=+7408.811131117" observedRunningTime="2025-10-01 17:08:20.985376612 +0000 UTC m=+7410.132219818" watchObservedRunningTime="2025-10-01 17:08:21.00036695 +0000 UTC m=+7410.147210106" Oct 01 17:08:25 crc kubenswrapper[4869]: I1001 17:08:25.524949 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-fvcp4" Oct 01 17:08:25 crc kubenswrapper[4869]: I1001 17:08:25.526439 4869 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-fvcp4" Oct 01 17:08:25 crc kubenswrapper[4869]: I1001 17:08:25.610929 4869 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-fvcp4" Oct 01 17:08:26 crc kubenswrapper[4869]: I1001 17:08:26.117857 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-fvcp4" Oct 01 17:08:26 crc kubenswrapper[4869]: I1001 17:08:26.196979 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-fvcp4"] Oct 01 17:08:28 crc kubenswrapper[4869]: I1001 17:08:28.042572 4869 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-fvcp4" podUID="55c5fb4b-2317-4f41-829f-4a48c35b9caa" containerName="registry-server" containerID="cri-o://cdd63245f334604ad097b546e7cb2eb0ada90a422653657a5887a7e9c8be66dd" gracePeriod=2 Oct 01 17:08:28 crc kubenswrapper[4869]: I1001 17:08:28.519781 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-fvcp4" Oct 01 17:08:28 crc kubenswrapper[4869]: I1001 17:08:28.596775 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/55c5fb4b-2317-4f41-829f-4a48c35b9caa-catalog-content\") pod \"55c5fb4b-2317-4f41-829f-4a48c35b9caa\" (UID: \"55c5fb4b-2317-4f41-829f-4a48c35b9caa\") " Oct 01 17:08:28 crc kubenswrapper[4869]: I1001 17:08:28.596980 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/55c5fb4b-2317-4f41-829f-4a48c35b9caa-utilities\") pod \"55c5fb4b-2317-4f41-829f-4a48c35b9caa\" (UID: \"55c5fb4b-2317-4f41-829f-4a48c35b9caa\") " Oct 01 17:08:28 crc kubenswrapper[4869]: I1001 17:08:28.597901 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/55c5fb4b-2317-4f41-829f-4a48c35b9caa-utilities" (OuterVolumeSpecName: "utilities") pod "55c5fb4b-2317-4f41-829f-4a48c35b9caa" (UID: "55c5fb4b-2317-4f41-829f-4a48c35b9caa"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 17:08:28 crc kubenswrapper[4869]: I1001 17:08:28.597987 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-s4t4w\" (UniqueName: \"kubernetes.io/projected/55c5fb4b-2317-4f41-829f-4a48c35b9caa-kube-api-access-s4t4w\") pod \"55c5fb4b-2317-4f41-829f-4a48c35b9caa\" (UID: \"55c5fb4b-2317-4f41-829f-4a48c35b9caa\") " Oct 01 17:08:28 crc kubenswrapper[4869]: I1001 17:08:28.599114 4869 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/55c5fb4b-2317-4f41-829f-4a48c35b9caa-utilities\") on node \"crc\" DevicePath \"\"" Oct 01 17:08:28 crc kubenswrapper[4869]: I1001 17:08:28.602794 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/55c5fb4b-2317-4f41-829f-4a48c35b9caa-kube-api-access-s4t4w" (OuterVolumeSpecName: "kube-api-access-s4t4w") pod "55c5fb4b-2317-4f41-829f-4a48c35b9caa" (UID: "55c5fb4b-2317-4f41-829f-4a48c35b9caa"). InnerVolumeSpecName "kube-api-access-s4t4w". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 17:08:28 crc kubenswrapper[4869]: I1001 17:08:28.684616 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/55c5fb4b-2317-4f41-829f-4a48c35b9caa-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "55c5fb4b-2317-4f41-829f-4a48c35b9caa" (UID: "55c5fb4b-2317-4f41-829f-4a48c35b9caa"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 17:08:28 crc kubenswrapper[4869]: I1001 17:08:28.701514 4869 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/55c5fb4b-2317-4f41-829f-4a48c35b9caa-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 01 17:08:28 crc kubenswrapper[4869]: I1001 17:08:28.701657 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-s4t4w\" (UniqueName: \"kubernetes.io/projected/55c5fb4b-2317-4f41-829f-4a48c35b9caa-kube-api-access-s4t4w\") on node \"crc\" DevicePath \"\"" Oct 01 17:08:29 crc kubenswrapper[4869]: I1001 17:08:29.056314 4869 generic.go:334] "Generic (PLEG): container finished" podID="55c5fb4b-2317-4f41-829f-4a48c35b9caa" containerID="cdd63245f334604ad097b546e7cb2eb0ada90a422653657a5887a7e9c8be66dd" exitCode=0 Oct 01 17:08:29 crc kubenswrapper[4869]: I1001 17:08:29.056378 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-fvcp4" event={"ID":"55c5fb4b-2317-4f41-829f-4a48c35b9caa","Type":"ContainerDied","Data":"cdd63245f334604ad097b546e7cb2eb0ada90a422653657a5887a7e9c8be66dd"} Oct 01 17:08:29 crc kubenswrapper[4869]: I1001 17:08:29.056431 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-fvcp4" event={"ID":"55c5fb4b-2317-4f41-829f-4a48c35b9caa","Type":"ContainerDied","Data":"b4964df1b2736954e53a77ac14b2227bc8b7470702984e339924f43386f6feb8"} Oct 01 17:08:29 crc kubenswrapper[4869]: I1001 17:08:29.056453 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-fvcp4" Oct 01 17:08:29 crc kubenswrapper[4869]: I1001 17:08:29.056469 4869 scope.go:117] "RemoveContainer" containerID="cdd63245f334604ad097b546e7cb2eb0ada90a422653657a5887a7e9c8be66dd" Oct 01 17:08:29 crc kubenswrapper[4869]: I1001 17:08:29.107702 4869 scope.go:117] "RemoveContainer" containerID="e7c8145a15579746502d145e67a5830844f61346d8d6796105ff3cf49daec095" Oct 01 17:08:29 crc kubenswrapper[4869]: I1001 17:08:29.112082 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-fvcp4"] Oct 01 17:08:29 crc kubenswrapper[4869]: I1001 17:08:29.125575 4869 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-fvcp4"] Oct 01 17:08:29 crc kubenswrapper[4869]: I1001 17:08:29.145510 4869 scope.go:117] "RemoveContainer" containerID="e2011eb139e053d26ad69414557adca117f2bfb08091e3a358c30fb35067770d" Oct 01 17:08:29 crc kubenswrapper[4869]: I1001 17:08:29.204036 4869 scope.go:117] "RemoveContainer" containerID="cdd63245f334604ad097b546e7cb2eb0ada90a422653657a5887a7e9c8be66dd" Oct 01 17:08:29 crc kubenswrapper[4869]: E1001 17:08:29.204488 4869 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"cdd63245f334604ad097b546e7cb2eb0ada90a422653657a5887a7e9c8be66dd\": container with ID starting with cdd63245f334604ad097b546e7cb2eb0ada90a422653657a5887a7e9c8be66dd not found: ID does not exist" containerID="cdd63245f334604ad097b546e7cb2eb0ada90a422653657a5887a7e9c8be66dd" Oct 01 17:08:29 crc kubenswrapper[4869]: I1001 17:08:29.204546 4869 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"cdd63245f334604ad097b546e7cb2eb0ada90a422653657a5887a7e9c8be66dd"} err="failed to get container status \"cdd63245f334604ad097b546e7cb2eb0ada90a422653657a5887a7e9c8be66dd\": rpc error: code = NotFound desc = could not find container \"cdd63245f334604ad097b546e7cb2eb0ada90a422653657a5887a7e9c8be66dd\": container with ID starting with cdd63245f334604ad097b546e7cb2eb0ada90a422653657a5887a7e9c8be66dd not found: ID does not exist" Oct 01 17:08:29 crc kubenswrapper[4869]: I1001 17:08:29.204580 4869 scope.go:117] "RemoveContainer" containerID="e7c8145a15579746502d145e67a5830844f61346d8d6796105ff3cf49daec095" Oct 01 17:08:29 crc kubenswrapper[4869]: E1001 17:08:29.205018 4869 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e7c8145a15579746502d145e67a5830844f61346d8d6796105ff3cf49daec095\": container with ID starting with e7c8145a15579746502d145e67a5830844f61346d8d6796105ff3cf49daec095 not found: ID does not exist" containerID="e7c8145a15579746502d145e67a5830844f61346d8d6796105ff3cf49daec095" Oct 01 17:08:29 crc kubenswrapper[4869]: I1001 17:08:29.205195 4869 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e7c8145a15579746502d145e67a5830844f61346d8d6796105ff3cf49daec095"} err="failed to get container status \"e7c8145a15579746502d145e67a5830844f61346d8d6796105ff3cf49daec095\": rpc error: code = NotFound desc = could not find container \"e7c8145a15579746502d145e67a5830844f61346d8d6796105ff3cf49daec095\": container with ID starting with e7c8145a15579746502d145e67a5830844f61346d8d6796105ff3cf49daec095 not found: ID does not exist" Oct 01 17:08:29 crc kubenswrapper[4869]: I1001 17:08:29.205426 4869 scope.go:117] "RemoveContainer" containerID="e2011eb139e053d26ad69414557adca117f2bfb08091e3a358c30fb35067770d" Oct 01 17:08:29 crc kubenswrapper[4869]: E1001 17:08:29.205857 4869 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e2011eb139e053d26ad69414557adca117f2bfb08091e3a358c30fb35067770d\": container with ID starting with e2011eb139e053d26ad69414557adca117f2bfb08091e3a358c30fb35067770d not found: ID does not exist" containerID="e2011eb139e053d26ad69414557adca117f2bfb08091e3a358c30fb35067770d" Oct 01 17:08:29 crc kubenswrapper[4869]: I1001 17:08:29.205889 4869 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e2011eb139e053d26ad69414557adca117f2bfb08091e3a358c30fb35067770d"} err="failed to get container status \"e2011eb139e053d26ad69414557adca117f2bfb08091e3a358c30fb35067770d\": rpc error: code = NotFound desc = could not find container \"e2011eb139e053d26ad69414557adca117f2bfb08091e3a358c30fb35067770d\": container with ID starting with e2011eb139e053d26ad69414557adca117f2bfb08091e3a358c30fb35067770d not found: ID does not exist" Oct 01 17:08:29 crc kubenswrapper[4869]: I1001 17:08:29.596764 4869 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="55c5fb4b-2317-4f41-829f-4a48c35b9caa" path="/var/lib/kubelet/pods/55c5fb4b-2317-4f41-829f-4a48c35b9caa/volumes" Oct 01 17:08:43 crc kubenswrapper[4869]: I1001 17:08:43.354071 4869 patch_prober.go:28] interesting pod/machine-config-daemon-c86m8 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 01 17:08:43 crc kubenswrapper[4869]: I1001 17:08:43.354855 4869 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-c86m8" podUID="a4b64f7f-0b03-4f47-965b-9fde048b735c" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 01 17:09:11 crc kubenswrapper[4869]: I1001 17:09:11.503320 4869 generic.go:334] "Generic (PLEG): container finished" podID="a1a92aa0-ac91-4391-949a-fd2bcfa3e714" containerID="fe1a0d7edb6855ad5ecd4f16e5fadce2028b45ec423a31d57dfc1246a2db468b" exitCode=1 Oct 01 17:09:11 crc kubenswrapper[4869]: I1001 17:09:11.503404 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/tobiko-tests-tobiko-s00-podified-functional" event={"ID":"a1a92aa0-ac91-4391-949a-fd2bcfa3e714","Type":"ContainerDied","Data":"fe1a0d7edb6855ad5ecd4f16e5fadce2028b45ec423a31d57dfc1246a2db468b"} Oct 01 17:09:12 crc kubenswrapper[4869]: I1001 17:09:12.946578 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/tobiko-tests-tobiko-s00-podified-functional" Oct 01 17:09:13 crc kubenswrapper[4869]: I1001 17:09:13.006564 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/a1a92aa0-ac91-4391-949a-fd2bcfa3e714-openstack-config-secret\") pod \"a1a92aa0-ac91-4391-949a-fd2bcfa3e714\" (UID: \"a1a92aa0-ac91-4391-949a-fd2bcfa3e714\") " Oct 01 17:09:13 crc kubenswrapper[4869]: I1001 17:09:13.006625 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kubeconfig\" (UniqueName: \"kubernetes.io/secret/a1a92aa0-ac91-4391-949a-fd2bcfa3e714-kubeconfig\") pod \"a1a92aa0-ac91-4391-949a-fd2bcfa3e714\" (UID: \"a1a92aa0-ac91-4391-949a-fd2bcfa3e714\") " Oct 01 17:09:13 crc kubenswrapper[4869]: I1001 17:09:13.006704 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"test-operator-ephemeral-temporary\" (UniqueName: \"kubernetes.io/empty-dir/a1a92aa0-ac91-4391-949a-fd2bcfa3e714-test-operator-ephemeral-temporary\") pod \"a1a92aa0-ac91-4391-949a-fd2bcfa3e714\" (UID: \"a1a92aa0-ac91-4391-949a-fd2bcfa3e714\") " Oct 01 17:09:13 crc kubenswrapper[4869]: I1001 17:09:13.006751 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"tobiko-config\" (UniqueName: \"kubernetes.io/configmap/a1a92aa0-ac91-4391-949a-fd2bcfa3e714-tobiko-config\") pod \"a1a92aa0-ac91-4391-949a-fd2bcfa3e714\" (UID: \"a1a92aa0-ac91-4391-949a-fd2bcfa3e714\") " Oct 01 17:09:13 crc kubenswrapper[4869]: I1001 17:09:13.006795 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"test-operator-ephemeral-workdir\" (UniqueName: \"kubernetes.io/empty-dir/a1a92aa0-ac91-4391-949a-fd2bcfa3e714-test-operator-ephemeral-workdir\") pod \"a1a92aa0-ac91-4391-949a-fd2bcfa3e714\" (UID: \"a1a92aa0-ac91-4391-949a-fd2bcfa3e714\") " Oct 01 17:09:13 crc kubenswrapper[4869]: I1001 17:09:13.006874 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-p4f4s\" (UniqueName: \"kubernetes.io/projected/a1a92aa0-ac91-4391-949a-fd2bcfa3e714-kube-api-access-p4f4s\") pod \"a1a92aa0-ac91-4391-949a-fd2bcfa3e714\" (UID: \"a1a92aa0-ac91-4391-949a-fd2bcfa3e714\") " Oct 01 17:09:13 crc kubenswrapper[4869]: I1001 17:09:13.006916 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"test-operator-logs\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"a1a92aa0-ac91-4391-949a-fd2bcfa3e714\" (UID: \"a1a92aa0-ac91-4391-949a-fd2bcfa3e714\") " Oct 01 17:09:13 crc kubenswrapper[4869]: I1001 17:09:13.006963 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"tobiko-public-key\" (UniqueName: \"kubernetes.io/configmap/a1a92aa0-ac91-4391-949a-fd2bcfa3e714-tobiko-public-key\") pod \"a1a92aa0-ac91-4391-949a-fd2bcfa3e714\" (UID: \"a1a92aa0-ac91-4391-949a-fd2bcfa3e714\") " Oct 01 17:09:13 crc kubenswrapper[4869]: I1001 17:09:13.006990 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ca-certs\" (UniqueName: \"kubernetes.io/secret/a1a92aa0-ac91-4391-949a-fd2bcfa3e714-ca-certs\") pod \"a1a92aa0-ac91-4391-949a-fd2bcfa3e714\" (UID: \"a1a92aa0-ac91-4391-949a-fd2bcfa3e714\") " Oct 01 17:09:13 crc kubenswrapper[4869]: I1001 17:09:13.007055 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"tobiko-private-key\" (UniqueName: \"kubernetes.io/configmap/a1a92aa0-ac91-4391-949a-fd2bcfa3e714-tobiko-private-key\") pod \"a1a92aa0-ac91-4391-949a-fd2bcfa3e714\" (UID: \"a1a92aa0-ac91-4391-949a-fd2bcfa3e714\") " Oct 01 17:09:13 crc kubenswrapper[4869]: I1001 17:09:13.007164 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/a1a92aa0-ac91-4391-949a-fd2bcfa3e714-ceph\") pod \"a1a92aa0-ac91-4391-949a-fd2bcfa3e714\" (UID: \"a1a92aa0-ac91-4391-949a-fd2bcfa3e714\") " Oct 01 17:09:13 crc kubenswrapper[4869]: I1001 17:09:13.007309 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"test-operator-clouds-config\" (UniqueName: \"kubernetes.io/configmap/a1a92aa0-ac91-4391-949a-fd2bcfa3e714-test-operator-clouds-config\") pod \"a1a92aa0-ac91-4391-949a-fd2bcfa3e714\" (UID: \"a1a92aa0-ac91-4391-949a-fd2bcfa3e714\") " Oct 01 17:09:13 crc kubenswrapper[4869]: I1001 17:09:13.008183 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a1a92aa0-ac91-4391-949a-fd2bcfa3e714-test-operator-ephemeral-temporary" (OuterVolumeSpecName: "test-operator-ephemeral-temporary") pod "a1a92aa0-ac91-4391-949a-fd2bcfa3e714" (UID: "a1a92aa0-ac91-4391-949a-fd2bcfa3e714"). InnerVolumeSpecName "test-operator-ephemeral-temporary". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 17:09:13 crc kubenswrapper[4869]: I1001 17:09:13.009206 4869 reconciler_common.go:293] "Volume detached for volume \"test-operator-ephemeral-temporary\" (UniqueName: \"kubernetes.io/empty-dir/a1a92aa0-ac91-4391-949a-fd2bcfa3e714-test-operator-ephemeral-temporary\") on node \"crc\" DevicePath \"\"" Oct 01 17:09:13 crc kubenswrapper[4869]: I1001 17:09:13.015403 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage07-crc" (OuterVolumeSpecName: "test-operator-logs") pod "a1a92aa0-ac91-4391-949a-fd2bcfa3e714" (UID: "a1a92aa0-ac91-4391-949a-fd2bcfa3e714"). InnerVolumeSpecName "local-storage07-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Oct 01 17:09:13 crc kubenswrapper[4869]: I1001 17:09:13.017510 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a1a92aa0-ac91-4391-949a-fd2bcfa3e714-kube-api-access-p4f4s" (OuterVolumeSpecName: "kube-api-access-p4f4s") pod "a1a92aa0-ac91-4391-949a-fd2bcfa3e714" (UID: "a1a92aa0-ac91-4391-949a-fd2bcfa3e714"). InnerVolumeSpecName "kube-api-access-p4f4s". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 17:09:13 crc kubenswrapper[4869]: I1001 17:09:13.032137 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a1a92aa0-ac91-4391-949a-fd2bcfa3e714-ceph" (OuterVolumeSpecName: "ceph") pod "a1a92aa0-ac91-4391-949a-fd2bcfa3e714" (UID: "a1a92aa0-ac91-4391-949a-fd2bcfa3e714"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 17:09:13 crc kubenswrapper[4869]: I1001 17:09:13.077814 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/tobiko-tests-tobiko-s01-sanity"] Oct 01 17:09:13 crc kubenswrapper[4869]: E1001 17:09:13.083554 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="55c5fb4b-2317-4f41-829f-4a48c35b9caa" containerName="extract-utilities" Oct 01 17:09:13 crc kubenswrapper[4869]: I1001 17:09:13.083599 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="55c5fb4b-2317-4f41-829f-4a48c35b9caa" containerName="extract-utilities" Oct 01 17:09:13 crc kubenswrapper[4869]: E1001 17:09:13.083674 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="55c5fb4b-2317-4f41-829f-4a48c35b9caa" containerName="extract-content" Oct 01 17:09:13 crc kubenswrapper[4869]: I1001 17:09:13.083689 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="55c5fb4b-2317-4f41-829f-4a48c35b9caa" containerName="extract-content" Oct 01 17:09:13 crc kubenswrapper[4869]: E1001 17:09:13.083706 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="55c5fb4b-2317-4f41-829f-4a48c35b9caa" containerName="registry-server" Oct 01 17:09:13 crc kubenswrapper[4869]: I1001 17:09:13.083714 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="55c5fb4b-2317-4f41-829f-4a48c35b9caa" containerName="registry-server" Oct 01 17:09:13 crc kubenswrapper[4869]: E1001 17:09:13.083743 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a1a92aa0-ac91-4391-949a-fd2bcfa3e714" containerName="tobiko-tests-tobiko" Oct 01 17:09:13 crc kubenswrapper[4869]: I1001 17:09:13.083755 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="a1a92aa0-ac91-4391-949a-fd2bcfa3e714" containerName="tobiko-tests-tobiko" Oct 01 17:09:13 crc kubenswrapper[4869]: I1001 17:09:13.084415 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="55c5fb4b-2317-4f41-829f-4a48c35b9caa" containerName="registry-server" Oct 01 17:09:13 crc kubenswrapper[4869]: I1001 17:09:13.084469 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="a1a92aa0-ac91-4391-949a-fd2bcfa3e714" containerName="tobiko-tests-tobiko" Oct 01 17:09:13 crc kubenswrapper[4869]: I1001 17:09:13.088565 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/tobiko-tests-tobiko-s01-sanity" Oct 01 17:09:13 crc kubenswrapper[4869]: I1001 17:09:13.113365 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/tobiko-tests-tobiko-s01-sanity"] Oct 01 17:09:13 crc kubenswrapper[4869]: I1001 17:09:13.115909 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-p4f4s\" (UniqueName: \"kubernetes.io/projected/a1a92aa0-ac91-4391-949a-fd2bcfa3e714-kube-api-access-p4f4s\") on node \"crc\" DevicePath \"\"" Oct 01 17:09:13 crc kubenswrapper[4869]: I1001 17:09:13.115974 4869 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") on node \"crc\" " Oct 01 17:09:13 crc kubenswrapper[4869]: I1001 17:09:13.115989 4869 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/a1a92aa0-ac91-4391-949a-fd2bcfa3e714-ceph\") on node \"crc\" DevicePath \"\"" Oct 01 17:09:13 crc kubenswrapper[4869]: I1001 17:09:13.119824 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a1a92aa0-ac91-4391-949a-fd2bcfa3e714-kubeconfig" (OuterVolumeSpecName: "kubeconfig") pod "a1a92aa0-ac91-4391-949a-fd2bcfa3e714" (UID: "a1a92aa0-ac91-4391-949a-fd2bcfa3e714"). InnerVolumeSpecName "kubeconfig". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 17:09:13 crc kubenswrapper[4869]: I1001 17:09:13.120547 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a1a92aa0-ac91-4391-949a-fd2bcfa3e714-tobiko-private-key" (OuterVolumeSpecName: "tobiko-private-key") pod "a1a92aa0-ac91-4391-949a-fd2bcfa3e714" (UID: "a1a92aa0-ac91-4391-949a-fd2bcfa3e714"). InnerVolumeSpecName "tobiko-private-key". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 17:09:13 crc kubenswrapper[4869]: I1001 17:09:13.123046 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a1a92aa0-ac91-4391-949a-fd2bcfa3e714-openstack-config-secret" (OuterVolumeSpecName: "openstack-config-secret") pod "a1a92aa0-ac91-4391-949a-fd2bcfa3e714" (UID: "a1a92aa0-ac91-4391-949a-fd2bcfa3e714"). InnerVolumeSpecName "openstack-config-secret". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 17:09:13 crc kubenswrapper[4869]: I1001 17:09:13.132557 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a1a92aa0-ac91-4391-949a-fd2bcfa3e714-tobiko-config" (OuterVolumeSpecName: "tobiko-config") pod "a1a92aa0-ac91-4391-949a-fd2bcfa3e714" (UID: "a1a92aa0-ac91-4391-949a-fd2bcfa3e714"). InnerVolumeSpecName "tobiko-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 17:09:13 crc kubenswrapper[4869]: I1001 17:09:13.134297 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a1a92aa0-ac91-4391-949a-fd2bcfa3e714-tobiko-public-key" (OuterVolumeSpecName: "tobiko-public-key") pod "a1a92aa0-ac91-4391-949a-fd2bcfa3e714" (UID: "a1a92aa0-ac91-4391-949a-fd2bcfa3e714"). InnerVolumeSpecName "tobiko-public-key". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 17:09:13 crc kubenswrapper[4869]: I1001 17:09:13.146456 4869 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage07-crc" (UniqueName: "kubernetes.io/local-volume/local-storage07-crc") on node "crc" Oct 01 17:09:13 crc kubenswrapper[4869]: I1001 17:09:13.148456 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a1a92aa0-ac91-4391-949a-fd2bcfa3e714-ca-certs" (OuterVolumeSpecName: "ca-certs") pod "a1a92aa0-ac91-4391-949a-fd2bcfa3e714" (UID: "a1a92aa0-ac91-4391-949a-fd2bcfa3e714"). InnerVolumeSpecName "ca-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 17:09:13 crc kubenswrapper[4869]: I1001 17:09:13.163495 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a1a92aa0-ac91-4391-949a-fd2bcfa3e714-test-operator-clouds-config" (OuterVolumeSpecName: "test-operator-clouds-config") pod "a1a92aa0-ac91-4391-949a-fd2bcfa3e714" (UID: "a1a92aa0-ac91-4391-949a-fd2bcfa3e714"). InnerVolumeSpecName "test-operator-clouds-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 17:09:13 crc kubenswrapper[4869]: I1001 17:09:13.218432 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"tobiko-tests-tobiko-s01-sanity\" (UID: \"cc01041e-7f42-4831-bb4d-c663af563735\") " pod="openstack/tobiko-tests-tobiko-s01-sanity" Oct 01 17:09:13 crc kubenswrapper[4869]: I1001 17:09:13.218476 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tobiko-public-key\" (UniqueName: \"kubernetes.io/configmap/cc01041e-7f42-4831-bb4d-c663af563735-tobiko-public-key\") pod \"tobiko-tests-tobiko-s01-sanity\" (UID: \"cc01041e-7f42-4831-bb4d-c663af563735\") " pod="openstack/tobiko-tests-tobiko-s01-sanity" Oct 01 17:09:13 crc kubenswrapper[4869]: I1001 17:09:13.218503 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"test-operator-clouds-config\" (UniqueName: \"kubernetes.io/configmap/cc01041e-7f42-4831-bb4d-c663af563735-test-operator-clouds-config\") pod \"tobiko-tests-tobiko-s01-sanity\" (UID: \"cc01041e-7f42-4831-bb4d-c663af563735\") " pod="openstack/tobiko-tests-tobiko-s01-sanity" Oct 01 17:09:13 crc kubenswrapper[4869]: I1001 17:09:13.218524 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tobiko-private-key\" (UniqueName: \"kubernetes.io/configmap/cc01041e-7f42-4831-bb4d-c663af563735-tobiko-private-key\") pod \"tobiko-tests-tobiko-s01-sanity\" (UID: \"cc01041e-7f42-4831-bb4d-c663af563735\") " pod="openstack/tobiko-tests-tobiko-s01-sanity" Oct 01 17:09:13 crc kubenswrapper[4869]: I1001 17:09:13.218542 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tobiko-config\" (UniqueName: \"kubernetes.io/configmap/cc01041e-7f42-4831-bb4d-c663af563735-tobiko-config\") pod \"tobiko-tests-tobiko-s01-sanity\" (UID: \"cc01041e-7f42-4831-bb4d-c663af563735\") " pod="openstack/tobiko-tests-tobiko-s01-sanity" Oct 01 17:09:13 crc kubenswrapper[4869]: I1001 17:09:13.218671 4869 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"tobiko-tests-tobiko-s01-sanity\" (UID: \"cc01041e-7f42-4831-bb4d-c663af563735\") device mount path \"/mnt/openstack/pv07\"" pod="openstack/tobiko-tests-tobiko-s01-sanity" Oct 01 17:09:13 crc kubenswrapper[4869]: I1001 17:09:13.218692 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"test-operator-ephemeral-temporary\" (UniqueName: \"kubernetes.io/empty-dir/cc01041e-7f42-4831-bb4d-c663af563735-test-operator-ephemeral-temporary\") pod \"tobiko-tests-tobiko-s01-sanity\" (UID: \"cc01041e-7f42-4831-bb4d-c663af563735\") " pod="openstack/tobiko-tests-tobiko-s01-sanity" Oct 01 17:09:13 crc kubenswrapper[4869]: I1001 17:09:13.218767 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kubeconfig\" (UniqueName: \"kubernetes.io/secret/cc01041e-7f42-4831-bb4d-c663af563735-kubeconfig\") pod \"tobiko-tests-tobiko-s01-sanity\" (UID: \"cc01041e-7f42-4831-bb4d-c663af563735\") " pod="openstack/tobiko-tests-tobiko-s01-sanity" Oct 01 17:09:13 crc kubenswrapper[4869]: I1001 17:09:13.218969 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/cc01041e-7f42-4831-bb4d-c663af563735-openstack-config-secret\") pod \"tobiko-tests-tobiko-s01-sanity\" (UID: \"cc01041e-7f42-4831-bb4d-c663af563735\") " pod="openstack/tobiko-tests-tobiko-s01-sanity" Oct 01 17:09:13 crc kubenswrapper[4869]: I1001 17:09:13.219067 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ca-certs\" (UniqueName: \"kubernetes.io/secret/cc01041e-7f42-4831-bb4d-c663af563735-ca-certs\") pod \"tobiko-tests-tobiko-s01-sanity\" (UID: \"cc01041e-7f42-4831-bb4d-c663af563735\") " pod="openstack/tobiko-tests-tobiko-s01-sanity" Oct 01 17:09:13 crc kubenswrapper[4869]: I1001 17:09:13.219084 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"test-operator-ephemeral-workdir\" (UniqueName: \"kubernetes.io/empty-dir/cc01041e-7f42-4831-bb4d-c663af563735-test-operator-ephemeral-workdir\") pod \"tobiko-tests-tobiko-s01-sanity\" (UID: \"cc01041e-7f42-4831-bb4d-c663af563735\") " pod="openstack/tobiko-tests-tobiko-s01-sanity" Oct 01 17:09:13 crc kubenswrapper[4869]: I1001 17:09:13.219751 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/cc01041e-7f42-4831-bb4d-c663af563735-ceph\") pod \"tobiko-tests-tobiko-s01-sanity\" (UID: \"cc01041e-7f42-4831-bb4d-c663af563735\") " pod="openstack/tobiko-tests-tobiko-s01-sanity" Oct 01 17:09:13 crc kubenswrapper[4869]: I1001 17:09:13.219795 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-j467t\" (UniqueName: \"kubernetes.io/projected/cc01041e-7f42-4831-bb4d-c663af563735-kube-api-access-j467t\") pod \"tobiko-tests-tobiko-s01-sanity\" (UID: \"cc01041e-7f42-4831-bb4d-c663af563735\") " pod="openstack/tobiko-tests-tobiko-s01-sanity" Oct 01 17:09:13 crc kubenswrapper[4869]: I1001 17:09:13.219963 4869 reconciler_common.go:293] "Volume detached for volume \"tobiko-public-key\" (UniqueName: \"kubernetes.io/configmap/a1a92aa0-ac91-4391-949a-fd2bcfa3e714-tobiko-public-key\") on node \"crc\" DevicePath \"\"" Oct 01 17:09:13 crc kubenswrapper[4869]: I1001 17:09:13.219980 4869 reconciler_common.go:293] "Volume detached for volume \"ca-certs\" (UniqueName: \"kubernetes.io/secret/a1a92aa0-ac91-4391-949a-fd2bcfa3e714-ca-certs\") on node \"crc\" DevicePath \"\"" Oct 01 17:09:13 crc kubenswrapper[4869]: I1001 17:09:13.219992 4869 reconciler_common.go:293] "Volume detached for volume \"tobiko-private-key\" (UniqueName: \"kubernetes.io/configmap/a1a92aa0-ac91-4391-949a-fd2bcfa3e714-tobiko-private-key\") on node \"crc\" DevicePath \"\"" Oct 01 17:09:13 crc kubenswrapper[4869]: I1001 17:09:13.220002 4869 reconciler_common.go:293] "Volume detached for volume \"test-operator-clouds-config\" (UniqueName: \"kubernetes.io/configmap/a1a92aa0-ac91-4391-949a-fd2bcfa3e714-test-operator-clouds-config\") on node \"crc\" DevicePath \"\"" Oct 01 17:09:13 crc kubenswrapper[4869]: I1001 17:09:13.220013 4869 reconciler_common.go:293] "Volume detached for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/a1a92aa0-ac91-4391-949a-fd2bcfa3e714-openstack-config-secret\") on node \"crc\" DevicePath \"\"" Oct 01 17:09:13 crc kubenswrapper[4869]: I1001 17:09:13.220022 4869 reconciler_common.go:293] "Volume detached for volume \"kubeconfig\" (UniqueName: \"kubernetes.io/secret/a1a92aa0-ac91-4391-949a-fd2bcfa3e714-kubeconfig\") on node \"crc\" DevicePath \"\"" Oct 01 17:09:13 crc kubenswrapper[4869]: I1001 17:09:13.220031 4869 reconciler_common.go:293] "Volume detached for volume \"tobiko-config\" (UniqueName: \"kubernetes.io/configmap/a1a92aa0-ac91-4391-949a-fd2bcfa3e714-tobiko-config\") on node \"crc\" DevicePath \"\"" Oct 01 17:09:13 crc kubenswrapper[4869]: I1001 17:09:13.243708 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"tobiko-tests-tobiko-s01-sanity\" (UID: \"cc01041e-7f42-4831-bb4d-c663af563735\") " pod="openstack/tobiko-tests-tobiko-s01-sanity" Oct 01 17:09:13 crc kubenswrapper[4869]: I1001 17:09:13.321219 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"test-operator-ephemeral-temporary\" (UniqueName: \"kubernetes.io/empty-dir/cc01041e-7f42-4831-bb4d-c663af563735-test-operator-ephemeral-temporary\") pod \"tobiko-tests-tobiko-s01-sanity\" (UID: \"cc01041e-7f42-4831-bb4d-c663af563735\") " pod="openstack/tobiko-tests-tobiko-s01-sanity" Oct 01 17:09:13 crc kubenswrapper[4869]: I1001 17:09:13.321285 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kubeconfig\" (UniqueName: \"kubernetes.io/secret/cc01041e-7f42-4831-bb4d-c663af563735-kubeconfig\") pod \"tobiko-tests-tobiko-s01-sanity\" (UID: \"cc01041e-7f42-4831-bb4d-c663af563735\") " pod="openstack/tobiko-tests-tobiko-s01-sanity" Oct 01 17:09:13 crc kubenswrapper[4869]: I1001 17:09:13.321356 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/cc01041e-7f42-4831-bb4d-c663af563735-openstack-config-secret\") pod \"tobiko-tests-tobiko-s01-sanity\" (UID: \"cc01041e-7f42-4831-bb4d-c663af563735\") " pod="openstack/tobiko-tests-tobiko-s01-sanity" Oct 01 17:09:13 crc kubenswrapper[4869]: I1001 17:09:13.321396 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ca-certs\" (UniqueName: \"kubernetes.io/secret/cc01041e-7f42-4831-bb4d-c663af563735-ca-certs\") pod \"tobiko-tests-tobiko-s01-sanity\" (UID: \"cc01041e-7f42-4831-bb4d-c663af563735\") " pod="openstack/tobiko-tests-tobiko-s01-sanity" Oct 01 17:09:13 crc kubenswrapper[4869]: I1001 17:09:13.321412 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"test-operator-ephemeral-workdir\" (UniqueName: \"kubernetes.io/empty-dir/cc01041e-7f42-4831-bb4d-c663af563735-test-operator-ephemeral-workdir\") pod \"tobiko-tests-tobiko-s01-sanity\" (UID: \"cc01041e-7f42-4831-bb4d-c663af563735\") " pod="openstack/tobiko-tests-tobiko-s01-sanity" Oct 01 17:09:13 crc kubenswrapper[4869]: I1001 17:09:13.321445 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/cc01041e-7f42-4831-bb4d-c663af563735-ceph\") pod \"tobiko-tests-tobiko-s01-sanity\" (UID: \"cc01041e-7f42-4831-bb4d-c663af563735\") " pod="openstack/tobiko-tests-tobiko-s01-sanity" Oct 01 17:09:13 crc kubenswrapper[4869]: I1001 17:09:13.321463 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-j467t\" (UniqueName: \"kubernetes.io/projected/cc01041e-7f42-4831-bb4d-c663af563735-kube-api-access-j467t\") pod \"tobiko-tests-tobiko-s01-sanity\" (UID: \"cc01041e-7f42-4831-bb4d-c663af563735\") " pod="openstack/tobiko-tests-tobiko-s01-sanity" Oct 01 17:09:13 crc kubenswrapper[4869]: I1001 17:09:13.321517 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tobiko-public-key\" (UniqueName: \"kubernetes.io/configmap/cc01041e-7f42-4831-bb4d-c663af563735-tobiko-public-key\") pod \"tobiko-tests-tobiko-s01-sanity\" (UID: \"cc01041e-7f42-4831-bb4d-c663af563735\") " pod="openstack/tobiko-tests-tobiko-s01-sanity" Oct 01 17:09:13 crc kubenswrapper[4869]: I1001 17:09:13.321539 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"test-operator-clouds-config\" (UniqueName: \"kubernetes.io/configmap/cc01041e-7f42-4831-bb4d-c663af563735-test-operator-clouds-config\") pod \"tobiko-tests-tobiko-s01-sanity\" (UID: \"cc01041e-7f42-4831-bb4d-c663af563735\") " pod="openstack/tobiko-tests-tobiko-s01-sanity" Oct 01 17:09:13 crc kubenswrapper[4869]: I1001 17:09:13.321555 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tobiko-private-key\" (UniqueName: \"kubernetes.io/configmap/cc01041e-7f42-4831-bb4d-c663af563735-tobiko-private-key\") pod \"tobiko-tests-tobiko-s01-sanity\" (UID: \"cc01041e-7f42-4831-bb4d-c663af563735\") " pod="openstack/tobiko-tests-tobiko-s01-sanity" Oct 01 17:09:13 crc kubenswrapper[4869]: I1001 17:09:13.321575 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tobiko-config\" (UniqueName: \"kubernetes.io/configmap/cc01041e-7f42-4831-bb4d-c663af563735-tobiko-config\") pod \"tobiko-tests-tobiko-s01-sanity\" (UID: \"cc01041e-7f42-4831-bb4d-c663af563735\") " pod="openstack/tobiko-tests-tobiko-s01-sanity" Oct 01 17:09:13 crc kubenswrapper[4869]: I1001 17:09:13.321813 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"test-operator-ephemeral-temporary\" (UniqueName: \"kubernetes.io/empty-dir/cc01041e-7f42-4831-bb4d-c663af563735-test-operator-ephemeral-temporary\") pod \"tobiko-tests-tobiko-s01-sanity\" (UID: \"cc01041e-7f42-4831-bb4d-c663af563735\") " pod="openstack/tobiko-tests-tobiko-s01-sanity" Oct 01 17:09:13 crc kubenswrapper[4869]: I1001 17:09:13.322332 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tobiko-config\" (UniqueName: \"kubernetes.io/configmap/cc01041e-7f42-4831-bb4d-c663af563735-tobiko-config\") pod \"tobiko-tests-tobiko-s01-sanity\" (UID: \"cc01041e-7f42-4831-bb4d-c663af563735\") " pod="openstack/tobiko-tests-tobiko-s01-sanity" Oct 01 17:09:13 crc kubenswrapper[4869]: I1001 17:09:13.322789 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"test-operator-ephemeral-workdir\" (UniqueName: \"kubernetes.io/empty-dir/cc01041e-7f42-4831-bb4d-c663af563735-test-operator-ephemeral-workdir\") pod \"tobiko-tests-tobiko-s01-sanity\" (UID: \"cc01041e-7f42-4831-bb4d-c663af563735\") " pod="openstack/tobiko-tests-tobiko-s01-sanity" Oct 01 17:09:13 crc kubenswrapper[4869]: I1001 17:09:13.323425 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tobiko-private-key\" (UniqueName: \"kubernetes.io/configmap/cc01041e-7f42-4831-bb4d-c663af563735-tobiko-private-key\") pod \"tobiko-tests-tobiko-s01-sanity\" (UID: \"cc01041e-7f42-4831-bb4d-c663af563735\") " pod="openstack/tobiko-tests-tobiko-s01-sanity" Oct 01 17:09:13 crc kubenswrapper[4869]: I1001 17:09:13.323477 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tobiko-public-key\" (UniqueName: \"kubernetes.io/configmap/cc01041e-7f42-4831-bb4d-c663af563735-tobiko-public-key\") pod \"tobiko-tests-tobiko-s01-sanity\" (UID: \"cc01041e-7f42-4831-bb4d-c663af563735\") " pod="openstack/tobiko-tests-tobiko-s01-sanity" Oct 01 17:09:13 crc kubenswrapper[4869]: I1001 17:09:13.323723 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"test-operator-clouds-config\" (UniqueName: \"kubernetes.io/configmap/cc01041e-7f42-4831-bb4d-c663af563735-test-operator-clouds-config\") pod \"tobiko-tests-tobiko-s01-sanity\" (UID: \"cc01041e-7f42-4831-bb4d-c663af563735\") " pod="openstack/tobiko-tests-tobiko-s01-sanity" Oct 01 17:09:13 crc kubenswrapper[4869]: I1001 17:09:13.328557 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/cc01041e-7f42-4831-bb4d-c663af563735-ceph\") pod \"tobiko-tests-tobiko-s01-sanity\" (UID: \"cc01041e-7f42-4831-bb4d-c663af563735\") " pod="openstack/tobiko-tests-tobiko-s01-sanity" Oct 01 17:09:13 crc kubenswrapper[4869]: I1001 17:09:13.328618 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kubeconfig\" (UniqueName: \"kubernetes.io/secret/cc01041e-7f42-4831-bb4d-c663af563735-kubeconfig\") pod \"tobiko-tests-tobiko-s01-sanity\" (UID: \"cc01041e-7f42-4831-bb4d-c663af563735\") " pod="openstack/tobiko-tests-tobiko-s01-sanity" Oct 01 17:09:13 crc kubenswrapper[4869]: I1001 17:09:13.329068 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/cc01041e-7f42-4831-bb4d-c663af563735-openstack-config-secret\") pod \"tobiko-tests-tobiko-s01-sanity\" (UID: \"cc01041e-7f42-4831-bb4d-c663af563735\") " pod="openstack/tobiko-tests-tobiko-s01-sanity" Oct 01 17:09:13 crc kubenswrapper[4869]: I1001 17:09:13.337578 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ca-certs\" (UniqueName: \"kubernetes.io/secret/cc01041e-7f42-4831-bb4d-c663af563735-ca-certs\") pod \"tobiko-tests-tobiko-s01-sanity\" (UID: \"cc01041e-7f42-4831-bb4d-c663af563735\") " pod="openstack/tobiko-tests-tobiko-s01-sanity" Oct 01 17:09:13 crc kubenswrapper[4869]: I1001 17:09:13.339961 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-j467t\" (UniqueName: \"kubernetes.io/projected/cc01041e-7f42-4831-bb4d-c663af563735-kube-api-access-j467t\") pod \"tobiko-tests-tobiko-s01-sanity\" (UID: \"cc01041e-7f42-4831-bb4d-c663af563735\") " pod="openstack/tobiko-tests-tobiko-s01-sanity" Oct 01 17:09:13 crc kubenswrapper[4869]: I1001 17:09:13.354388 4869 patch_prober.go:28] interesting pod/machine-config-daemon-c86m8 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 01 17:09:13 crc kubenswrapper[4869]: I1001 17:09:13.354576 4869 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-c86m8" podUID="a4b64f7f-0b03-4f47-965b-9fde048b735c" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 01 17:09:13 crc kubenswrapper[4869]: I1001 17:09:13.524656 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/tobiko-tests-tobiko-s00-podified-functional" event={"ID":"a1a92aa0-ac91-4391-949a-fd2bcfa3e714","Type":"ContainerDied","Data":"0efb7b0063aaaa1ae863daf8f335c1c9211e5cb6ef396227bd89461a5de12a01"} Oct 01 17:09:13 crc kubenswrapper[4869]: I1001 17:09:13.524702 4869 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="0efb7b0063aaaa1ae863daf8f335c1c9211e5cb6ef396227bd89461a5de12a01" Oct 01 17:09:13 crc kubenswrapper[4869]: I1001 17:09:13.524744 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/tobiko-tests-tobiko-s00-podified-functional" Oct 01 17:09:13 crc kubenswrapper[4869]: I1001 17:09:13.528532 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/tobiko-tests-tobiko-s01-sanity" Oct 01 17:09:14 crc kubenswrapper[4869]: I1001 17:09:14.051126 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/tobiko-tests-tobiko-s01-sanity"] Oct 01 17:09:14 crc kubenswrapper[4869]: I1001 17:09:14.537161 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/tobiko-tests-tobiko-s01-sanity" event={"ID":"cc01041e-7f42-4831-bb4d-c663af563735","Type":"ContainerStarted","Data":"c36388d37f80716343ec629f06d3994dc564a0c1be87433e45c0b2000c2b001a"} Oct 01 17:09:15 crc kubenswrapper[4869]: I1001 17:09:15.553252 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/tobiko-tests-tobiko-s01-sanity" event={"ID":"cc01041e-7f42-4831-bb4d-c663af563735","Type":"ContainerStarted","Data":"721c2b8ab44f130a8dc6776592b62c3ad9e2b52b8796151d43e2d1e8ac3c5dde"} Oct 01 17:09:15 crc kubenswrapper[4869]: I1001 17:09:15.585494 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/tobiko-tests-tobiko-s01-sanity" podStartSLOduration=3.5854698369999998 podStartE2EDuration="3.585469837s" podCreationTimestamp="2025-10-01 17:09:12 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-01 17:09:15.585019906 +0000 UTC m=+7464.731863112" watchObservedRunningTime="2025-10-01 17:09:15.585469837 +0000 UTC m=+7464.732312983" Oct 01 17:09:17 crc kubenswrapper[4869]: I1001 17:09:17.712294 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a1a92aa0-ac91-4391-949a-fd2bcfa3e714-test-operator-ephemeral-workdir" (OuterVolumeSpecName: "test-operator-ephemeral-workdir") pod "a1a92aa0-ac91-4391-949a-fd2bcfa3e714" (UID: "a1a92aa0-ac91-4391-949a-fd2bcfa3e714"). InnerVolumeSpecName "test-operator-ephemeral-workdir". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 17:09:17 crc kubenswrapper[4869]: I1001 17:09:17.737220 4869 reconciler_common.go:293] "Volume detached for volume \"test-operator-ephemeral-workdir\" (UniqueName: \"kubernetes.io/empty-dir/a1a92aa0-ac91-4391-949a-fd2bcfa3e714-test-operator-ephemeral-workdir\") on node \"crc\" DevicePath \"\"" Oct 01 17:09:43 crc kubenswrapper[4869]: I1001 17:09:43.354586 4869 patch_prober.go:28] interesting pod/machine-config-daemon-c86m8 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 01 17:09:43 crc kubenswrapper[4869]: I1001 17:09:43.355313 4869 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-c86m8" podUID="a4b64f7f-0b03-4f47-965b-9fde048b735c" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 01 17:09:43 crc kubenswrapper[4869]: I1001 17:09:43.355370 4869 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-c86m8" Oct 01 17:09:43 crc kubenswrapper[4869]: I1001 17:09:43.356237 4869 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"63f8525468ce6f604876dfd165c9ad9323212e1d06ef08033a784d7275c08f61"} pod="openshift-machine-config-operator/machine-config-daemon-c86m8" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Oct 01 17:09:43 crc kubenswrapper[4869]: I1001 17:09:43.356340 4869 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-c86m8" podUID="a4b64f7f-0b03-4f47-965b-9fde048b735c" containerName="machine-config-daemon" containerID="cri-o://63f8525468ce6f604876dfd165c9ad9323212e1d06ef08033a784d7275c08f61" gracePeriod=600 Oct 01 17:09:43 crc kubenswrapper[4869]: I1001 17:09:43.880755 4869 generic.go:334] "Generic (PLEG): container finished" podID="a4b64f7f-0b03-4f47-965b-9fde048b735c" containerID="63f8525468ce6f604876dfd165c9ad9323212e1d06ef08033a784d7275c08f61" exitCode=0 Oct 01 17:09:43 crc kubenswrapper[4869]: I1001 17:09:43.880854 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-c86m8" event={"ID":"a4b64f7f-0b03-4f47-965b-9fde048b735c","Type":"ContainerDied","Data":"63f8525468ce6f604876dfd165c9ad9323212e1d06ef08033a784d7275c08f61"} Oct 01 17:09:43 crc kubenswrapper[4869]: I1001 17:09:43.881132 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-c86m8" event={"ID":"a4b64f7f-0b03-4f47-965b-9fde048b735c","Type":"ContainerStarted","Data":"1c17c6f6d4b5b7ea03383059037e4e76a83710badfded0c48fa96960660db929"} Oct 01 17:09:43 crc kubenswrapper[4869]: I1001 17:09:43.881158 4869 scope.go:117] "RemoveContainer" containerID="13e59c672543e057ea5825aec1ce436345edb91e96c5ed564629c2560312008b" Oct 01 17:09:45 crc kubenswrapper[4869]: I1001 17:09:45.892788 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-5j7rl"] Oct 01 17:09:45 crc kubenswrapper[4869]: I1001 17:09:45.896022 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-5j7rl" Oct 01 17:09:45 crc kubenswrapper[4869]: I1001 17:09:45.916147 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-5j7rl"] Oct 01 17:09:45 crc kubenswrapper[4869]: I1001 17:09:45.973511 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-f7jbp\" (UniqueName: \"kubernetes.io/projected/6ccd2468-311f-47cf-aff3-e79a32910afb-kube-api-access-f7jbp\") pod \"community-operators-5j7rl\" (UID: \"6ccd2468-311f-47cf-aff3-e79a32910afb\") " pod="openshift-marketplace/community-operators-5j7rl" Oct 01 17:09:45 crc kubenswrapper[4869]: I1001 17:09:45.973648 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6ccd2468-311f-47cf-aff3-e79a32910afb-catalog-content\") pod \"community-operators-5j7rl\" (UID: \"6ccd2468-311f-47cf-aff3-e79a32910afb\") " pod="openshift-marketplace/community-operators-5j7rl" Oct 01 17:09:45 crc kubenswrapper[4869]: I1001 17:09:45.973707 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6ccd2468-311f-47cf-aff3-e79a32910afb-utilities\") pod \"community-operators-5j7rl\" (UID: \"6ccd2468-311f-47cf-aff3-e79a32910afb\") " pod="openshift-marketplace/community-operators-5j7rl" Oct 01 17:09:46 crc kubenswrapper[4869]: I1001 17:09:46.075219 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6ccd2468-311f-47cf-aff3-e79a32910afb-utilities\") pod \"community-operators-5j7rl\" (UID: \"6ccd2468-311f-47cf-aff3-e79a32910afb\") " pod="openshift-marketplace/community-operators-5j7rl" Oct 01 17:09:46 crc kubenswrapper[4869]: I1001 17:09:46.075343 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-f7jbp\" (UniqueName: \"kubernetes.io/projected/6ccd2468-311f-47cf-aff3-e79a32910afb-kube-api-access-f7jbp\") pod \"community-operators-5j7rl\" (UID: \"6ccd2468-311f-47cf-aff3-e79a32910afb\") " pod="openshift-marketplace/community-operators-5j7rl" Oct 01 17:09:46 crc kubenswrapper[4869]: I1001 17:09:46.075433 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6ccd2468-311f-47cf-aff3-e79a32910afb-catalog-content\") pod \"community-operators-5j7rl\" (UID: \"6ccd2468-311f-47cf-aff3-e79a32910afb\") " pod="openshift-marketplace/community-operators-5j7rl" Oct 01 17:09:46 crc kubenswrapper[4869]: I1001 17:09:46.075750 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6ccd2468-311f-47cf-aff3-e79a32910afb-utilities\") pod \"community-operators-5j7rl\" (UID: \"6ccd2468-311f-47cf-aff3-e79a32910afb\") " pod="openshift-marketplace/community-operators-5j7rl" Oct 01 17:09:46 crc kubenswrapper[4869]: I1001 17:09:46.075798 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6ccd2468-311f-47cf-aff3-e79a32910afb-catalog-content\") pod \"community-operators-5j7rl\" (UID: \"6ccd2468-311f-47cf-aff3-e79a32910afb\") " pod="openshift-marketplace/community-operators-5j7rl" Oct 01 17:09:46 crc kubenswrapper[4869]: I1001 17:09:46.103444 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-f7jbp\" (UniqueName: \"kubernetes.io/projected/6ccd2468-311f-47cf-aff3-e79a32910afb-kube-api-access-f7jbp\") pod \"community-operators-5j7rl\" (UID: \"6ccd2468-311f-47cf-aff3-e79a32910afb\") " pod="openshift-marketplace/community-operators-5j7rl" Oct 01 17:09:46 crc kubenswrapper[4869]: I1001 17:09:46.244627 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-5j7rl" Oct 01 17:09:46 crc kubenswrapper[4869]: I1001 17:09:46.813714 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-5j7rl"] Oct 01 17:09:46 crc kubenswrapper[4869]: W1001 17:09:46.833618 4869 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod6ccd2468_311f_47cf_aff3_e79a32910afb.slice/crio-89070989eb2518e0ea4a719356c4cde0f6570a03b8175682b12c3d2014695772 WatchSource:0}: Error finding container 89070989eb2518e0ea4a719356c4cde0f6570a03b8175682b12c3d2014695772: Status 404 returned error can't find the container with id 89070989eb2518e0ea4a719356c4cde0f6570a03b8175682b12c3d2014695772 Oct 01 17:09:46 crc kubenswrapper[4869]: I1001 17:09:46.911661 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-5j7rl" event={"ID":"6ccd2468-311f-47cf-aff3-e79a32910afb","Type":"ContainerStarted","Data":"89070989eb2518e0ea4a719356c4cde0f6570a03b8175682b12c3d2014695772"} Oct 01 17:09:47 crc kubenswrapper[4869]: I1001 17:09:47.932529 4869 generic.go:334] "Generic (PLEG): container finished" podID="6ccd2468-311f-47cf-aff3-e79a32910afb" containerID="5f00e28e1e3051fc5c540e2d2f6d96aed32f7e69a06e0a0d4084c0cdc77012a9" exitCode=0 Oct 01 17:09:47 crc kubenswrapper[4869]: I1001 17:09:47.932607 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-5j7rl" event={"ID":"6ccd2468-311f-47cf-aff3-e79a32910afb","Type":"ContainerDied","Data":"5f00e28e1e3051fc5c540e2d2f6d96aed32f7e69a06e0a0d4084c0cdc77012a9"} Oct 01 17:09:49 crc kubenswrapper[4869]: I1001 17:09:49.950981 4869 generic.go:334] "Generic (PLEG): container finished" podID="6ccd2468-311f-47cf-aff3-e79a32910afb" containerID="1e8e5209a1a9a8910c1a2efeb466fea33997c10d5cc17e6afff224a130d92c59" exitCode=0 Oct 01 17:09:49 crc kubenswrapper[4869]: I1001 17:09:49.951083 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-5j7rl" event={"ID":"6ccd2468-311f-47cf-aff3-e79a32910afb","Type":"ContainerDied","Data":"1e8e5209a1a9a8910c1a2efeb466fea33997c10d5cc17e6afff224a130d92c59"} Oct 01 17:09:51 crc kubenswrapper[4869]: I1001 17:09:51.974292 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-5j7rl" event={"ID":"6ccd2468-311f-47cf-aff3-e79a32910afb","Type":"ContainerStarted","Data":"8d95af306b4939178548aac266da1e7d4a93e87d640d290c412114ab81aa7a06"} Oct 01 17:09:52 crc kubenswrapper[4869]: I1001 17:09:52.005010 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-5j7rl" podStartSLOduration=4.306318429 podStartE2EDuration="7.004989126s" podCreationTimestamp="2025-10-01 17:09:45 +0000 UTC" firstStartedPulling="2025-10-01 17:09:47.934787356 +0000 UTC m=+7497.081630462" lastFinishedPulling="2025-10-01 17:09:50.633458043 +0000 UTC m=+7499.780301159" observedRunningTime="2025-10-01 17:09:51.996626625 +0000 UTC m=+7501.143469781" watchObservedRunningTime="2025-10-01 17:09:52.004989126 +0000 UTC m=+7501.151832252" Oct 01 17:09:56 crc kubenswrapper[4869]: I1001 17:09:56.245763 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-5j7rl" Oct 01 17:09:56 crc kubenswrapper[4869]: I1001 17:09:56.246490 4869 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-5j7rl" Oct 01 17:09:56 crc kubenswrapper[4869]: I1001 17:09:56.316674 4869 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-5j7rl" Oct 01 17:09:57 crc kubenswrapper[4869]: I1001 17:09:57.096667 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-5j7rl" Oct 01 17:09:57 crc kubenswrapper[4869]: I1001 17:09:57.161615 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-5j7rl"] Oct 01 17:09:59 crc kubenswrapper[4869]: I1001 17:09:59.060164 4869 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-5j7rl" podUID="6ccd2468-311f-47cf-aff3-e79a32910afb" containerName="registry-server" containerID="cri-o://8d95af306b4939178548aac266da1e7d4a93e87d640d290c412114ab81aa7a06" gracePeriod=2 Oct 01 17:10:00 crc kubenswrapper[4869]: I1001 17:10:00.073391 4869 generic.go:334] "Generic (PLEG): container finished" podID="6ccd2468-311f-47cf-aff3-e79a32910afb" containerID="8d95af306b4939178548aac266da1e7d4a93e87d640d290c412114ab81aa7a06" exitCode=0 Oct 01 17:10:00 crc kubenswrapper[4869]: I1001 17:10:00.073632 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-5j7rl" event={"ID":"6ccd2468-311f-47cf-aff3-e79a32910afb","Type":"ContainerDied","Data":"8d95af306b4939178548aac266da1e7d4a93e87d640d290c412114ab81aa7a06"} Oct 01 17:10:00 crc kubenswrapper[4869]: I1001 17:10:00.164963 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-5j7rl" Oct 01 17:10:00 crc kubenswrapper[4869]: I1001 17:10:00.275696 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-f7jbp\" (UniqueName: \"kubernetes.io/projected/6ccd2468-311f-47cf-aff3-e79a32910afb-kube-api-access-f7jbp\") pod \"6ccd2468-311f-47cf-aff3-e79a32910afb\" (UID: \"6ccd2468-311f-47cf-aff3-e79a32910afb\") " Oct 01 17:10:00 crc kubenswrapper[4869]: I1001 17:10:00.275819 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6ccd2468-311f-47cf-aff3-e79a32910afb-utilities\") pod \"6ccd2468-311f-47cf-aff3-e79a32910afb\" (UID: \"6ccd2468-311f-47cf-aff3-e79a32910afb\") " Oct 01 17:10:00 crc kubenswrapper[4869]: I1001 17:10:00.275918 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6ccd2468-311f-47cf-aff3-e79a32910afb-catalog-content\") pod \"6ccd2468-311f-47cf-aff3-e79a32910afb\" (UID: \"6ccd2468-311f-47cf-aff3-e79a32910afb\") " Oct 01 17:10:00 crc kubenswrapper[4869]: I1001 17:10:00.277813 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/6ccd2468-311f-47cf-aff3-e79a32910afb-utilities" (OuterVolumeSpecName: "utilities") pod "6ccd2468-311f-47cf-aff3-e79a32910afb" (UID: "6ccd2468-311f-47cf-aff3-e79a32910afb"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 17:10:00 crc kubenswrapper[4869]: I1001 17:10:00.284953 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6ccd2468-311f-47cf-aff3-e79a32910afb-kube-api-access-f7jbp" (OuterVolumeSpecName: "kube-api-access-f7jbp") pod "6ccd2468-311f-47cf-aff3-e79a32910afb" (UID: "6ccd2468-311f-47cf-aff3-e79a32910afb"). InnerVolumeSpecName "kube-api-access-f7jbp". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 17:10:00 crc kubenswrapper[4869]: I1001 17:10:00.378979 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-f7jbp\" (UniqueName: \"kubernetes.io/projected/6ccd2468-311f-47cf-aff3-e79a32910afb-kube-api-access-f7jbp\") on node \"crc\" DevicePath \"\"" Oct 01 17:10:00 crc kubenswrapper[4869]: I1001 17:10:00.379376 4869 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6ccd2468-311f-47cf-aff3-e79a32910afb-utilities\") on node \"crc\" DevicePath \"\"" Oct 01 17:10:01 crc kubenswrapper[4869]: I1001 17:10:01.099177 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-5j7rl" event={"ID":"6ccd2468-311f-47cf-aff3-e79a32910afb","Type":"ContainerDied","Data":"89070989eb2518e0ea4a719356c4cde0f6570a03b8175682b12c3d2014695772"} Oct 01 17:10:01 crc kubenswrapper[4869]: I1001 17:10:01.099243 4869 scope.go:117] "RemoveContainer" containerID="8d95af306b4939178548aac266da1e7d4a93e87d640d290c412114ab81aa7a06" Oct 01 17:10:01 crc kubenswrapper[4869]: I1001 17:10:01.099402 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-5j7rl" Oct 01 17:10:01 crc kubenswrapper[4869]: I1001 17:10:01.125042 4869 scope.go:117] "RemoveContainer" containerID="1e8e5209a1a9a8910c1a2efeb466fea33997c10d5cc17e6afff224a130d92c59" Oct 01 17:10:01 crc kubenswrapper[4869]: I1001 17:10:01.143843 4869 scope.go:117] "RemoveContainer" containerID="5f00e28e1e3051fc5c540e2d2f6d96aed32f7e69a06e0a0d4084c0cdc77012a9" Oct 01 17:10:01 crc kubenswrapper[4869]: I1001 17:10:01.276072 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/6ccd2468-311f-47cf-aff3-e79a32910afb-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "6ccd2468-311f-47cf-aff3-e79a32910afb" (UID: "6ccd2468-311f-47cf-aff3-e79a32910afb"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 17:10:01 crc kubenswrapper[4869]: I1001 17:10:01.301684 4869 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6ccd2468-311f-47cf-aff3-e79a32910afb-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 01 17:10:01 crc kubenswrapper[4869]: I1001 17:10:01.459236 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-5j7rl"] Oct 01 17:10:01 crc kubenswrapper[4869]: I1001 17:10:01.469568 4869 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-5j7rl"] Oct 01 17:10:01 crc kubenswrapper[4869]: I1001 17:10:01.594501 4869 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6ccd2468-311f-47cf-aff3-e79a32910afb" path="/var/lib/kubelet/pods/6ccd2468-311f-47cf-aff3-e79a32910afb/volumes" Oct 01 17:11:33 crc kubenswrapper[4869]: I1001 17:11:33.112876 4869 generic.go:334] "Generic (PLEG): container finished" podID="cc01041e-7f42-4831-bb4d-c663af563735" containerID="721c2b8ab44f130a8dc6776592b62c3ad9e2b52b8796151d43e2d1e8ac3c5dde" exitCode=0 Oct 01 17:11:33 crc kubenswrapper[4869]: I1001 17:11:33.112981 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/tobiko-tests-tobiko-s01-sanity" event={"ID":"cc01041e-7f42-4831-bb4d-c663af563735","Type":"ContainerDied","Data":"721c2b8ab44f130a8dc6776592b62c3ad9e2b52b8796151d43e2d1e8ac3c5dde"} Oct 01 17:11:34 crc kubenswrapper[4869]: I1001 17:11:34.617906 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/tobiko-tests-tobiko-s01-sanity" Oct 01 17:11:34 crc kubenswrapper[4869]: I1001 17:11:34.763591 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"test-operator-ephemeral-workdir\" (UniqueName: \"kubernetes.io/empty-dir/cc01041e-7f42-4831-bb4d-c663af563735-test-operator-ephemeral-workdir\") pod \"cc01041e-7f42-4831-bb4d-c663af563735\" (UID: \"cc01041e-7f42-4831-bb4d-c663af563735\") " Oct 01 17:11:34 crc kubenswrapper[4869]: I1001 17:11:34.763725 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"test-operator-ephemeral-temporary\" (UniqueName: \"kubernetes.io/empty-dir/cc01041e-7f42-4831-bb4d-c663af563735-test-operator-ephemeral-temporary\") pod \"cc01041e-7f42-4831-bb4d-c663af563735\" (UID: \"cc01041e-7f42-4831-bb4d-c663af563735\") " Oct 01 17:11:34 crc kubenswrapper[4869]: I1001 17:11:34.763780 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"test-operator-logs\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"cc01041e-7f42-4831-bb4d-c663af563735\" (UID: \"cc01041e-7f42-4831-bb4d-c663af563735\") " Oct 01 17:11:34 crc kubenswrapper[4869]: I1001 17:11:34.763866 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kubeconfig\" (UniqueName: \"kubernetes.io/secret/cc01041e-7f42-4831-bb4d-c663af563735-kubeconfig\") pod \"cc01041e-7f42-4831-bb4d-c663af563735\" (UID: \"cc01041e-7f42-4831-bb4d-c663af563735\") " Oct 01 17:11:34 crc kubenswrapper[4869]: I1001 17:11:34.763916 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"tobiko-private-key\" (UniqueName: \"kubernetes.io/configmap/cc01041e-7f42-4831-bb4d-c663af563735-tobiko-private-key\") pod \"cc01041e-7f42-4831-bb4d-c663af563735\" (UID: \"cc01041e-7f42-4831-bb4d-c663af563735\") " Oct 01 17:11:34 crc kubenswrapper[4869]: I1001 17:11:34.763958 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/cc01041e-7f42-4831-bb4d-c663af563735-openstack-config-secret\") pod \"cc01041e-7f42-4831-bb4d-c663af563735\" (UID: \"cc01041e-7f42-4831-bb4d-c663af563735\") " Oct 01 17:11:34 crc kubenswrapper[4869]: I1001 17:11:34.764002 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-j467t\" (UniqueName: \"kubernetes.io/projected/cc01041e-7f42-4831-bb4d-c663af563735-kube-api-access-j467t\") pod \"cc01041e-7f42-4831-bb4d-c663af563735\" (UID: \"cc01041e-7f42-4831-bb4d-c663af563735\") " Oct 01 17:11:34 crc kubenswrapper[4869]: I1001 17:11:34.764033 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/cc01041e-7f42-4831-bb4d-c663af563735-ceph\") pod \"cc01041e-7f42-4831-bb4d-c663af563735\" (UID: \"cc01041e-7f42-4831-bb4d-c663af563735\") " Oct 01 17:11:34 crc kubenswrapper[4869]: I1001 17:11:34.764137 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ca-certs\" (UniqueName: \"kubernetes.io/secret/cc01041e-7f42-4831-bb4d-c663af563735-ca-certs\") pod \"cc01041e-7f42-4831-bb4d-c663af563735\" (UID: \"cc01041e-7f42-4831-bb4d-c663af563735\") " Oct 01 17:11:34 crc kubenswrapper[4869]: I1001 17:11:34.766532 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/cc01041e-7f42-4831-bb4d-c663af563735-test-operator-ephemeral-temporary" (OuterVolumeSpecName: "test-operator-ephemeral-temporary") pod "cc01041e-7f42-4831-bb4d-c663af563735" (UID: "cc01041e-7f42-4831-bb4d-c663af563735"). InnerVolumeSpecName "test-operator-ephemeral-temporary". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 17:11:34 crc kubenswrapper[4869]: I1001 17:11:34.767147 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"tobiko-public-key\" (UniqueName: \"kubernetes.io/configmap/cc01041e-7f42-4831-bb4d-c663af563735-tobiko-public-key\") pod \"cc01041e-7f42-4831-bb4d-c663af563735\" (UID: \"cc01041e-7f42-4831-bb4d-c663af563735\") " Oct 01 17:11:34 crc kubenswrapper[4869]: I1001 17:11:34.767204 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"test-operator-clouds-config\" (UniqueName: \"kubernetes.io/configmap/cc01041e-7f42-4831-bb4d-c663af563735-test-operator-clouds-config\") pod \"cc01041e-7f42-4831-bb4d-c663af563735\" (UID: \"cc01041e-7f42-4831-bb4d-c663af563735\") " Oct 01 17:11:34 crc kubenswrapper[4869]: I1001 17:11:34.767279 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"tobiko-config\" (UniqueName: \"kubernetes.io/configmap/cc01041e-7f42-4831-bb4d-c663af563735-tobiko-config\") pod \"cc01041e-7f42-4831-bb4d-c663af563735\" (UID: \"cc01041e-7f42-4831-bb4d-c663af563735\") " Oct 01 17:11:34 crc kubenswrapper[4869]: I1001 17:11:34.768540 4869 reconciler_common.go:293] "Volume detached for volume \"test-operator-ephemeral-temporary\" (UniqueName: \"kubernetes.io/empty-dir/cc01041e-7f42-4831-bb4d-c663af563735-test-operator-ephemeral-temporary\") on node \"crc\" DevicePath \"\"" Oct 01 17:11:34 crc kubenswrapper[4869]: I1001 17:11:34.772241 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage07-crc" (OuterVolumeSpecName: "test-operator-logs") pod "cc01041e-7f42-4831-bb4d-c663af563735" (UID: "cc01041e-7f42-4831-bb4d-c663af563735"). InnerVolumeSpecName "local-storage07-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Oct 01 17:11:34 crc kubenswrapper[4869]: I1001 17:11:34.772379 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cc01041e-7f42-4831-bb4d-c663af563735-ceph" (OuterVolumeSpecName: "ceph") pod "cc01041e-7f42-4831-bb4d-c663af563735" (UID: "cc01041e-7f42-4831-bb4d-c663af563735"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 17:11:34 crc kubenswrapper[4869]: I1001 17:11:34.772988 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cc01041e-7f42-4831-bb4d-c663af563735-kube-api-access-j467t" (OuterVolumeSpecName: "kube-api-access-j467t") pod "cc01041e-7f42-4831-bb4d-c663af563735" (UID: "cc01041e-7f42-4831-bb4d-c663af563735"). InnerVolumeSpecName "kube-api-access-j467t". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 17:11:34 crc kubenswrapper[4869]: I1001 17:11:34.798582 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/cc01041e-7f42-4831-bb4d-c663af563735-tobiko-private-key" (OuterVolumeSpecName: "tobiko-private-key") pod "cc01041e-7f42-4831-bb4d-c663af563735" (UID: "cc01041e-7f42-4831-bb4d-c663af563735"). InnerVolumeSpecName "tobiko-private-key". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 17:11:34 crc kubenswrapper[4869]: I1001 17:11:34.799441 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/cc01041e-7f42-4831-bb4d-c663af563735-tobiko-config" (OuterVolumeSpecName: "tobiko-config") pod "cc01041e-7f42-4831-bb4d-c663af563735" (UID: "cc01041e-7f42-4831-bb4d-c663af563735"). InnerVolumeSpecName "tobiko-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 17:11:34 crc kubenswrapper[4869]: I1001 17:11:34.807357 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/cc01041e-7f42-4831-bb4d-c663af563735-tobiko-public-key" (OuterVolumeSpecName: "tobiko-public-key") pod "cc01041e-7f42-4831-bb4d-c663af563735" (UID: "cc01041e-7f42-4831-bb4d-c663af563735"). InnerVolumeSpecName "tobiko-public-key". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 17:11:34 crc kubenswrapper[4869]: I1001 17:11:34.813328 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cc01041e-7f42-4831-bb4d-c663af563735-kubeconfig" (OuterVolumeSpecName: "kubeconfig") pod "cc01041e-7f42-4831-bb4d-c663af563735" (UID: "cc01041e-7f42-4831-bb4d-c663af563735"). InnerVolumeSpecName "kubeconfig". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 17:11:34 crc kubenswrapper[4869]: I1001 17:11:34.823623 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cc01041e-7f42-4831-bb4d-c663af563735-ca-certs" (OuterVolumeSpecName: "ca-certs") pod "cc01041e-7f42-4831-bb4d-c663af563735" (UID: "cc01041e-7f42-4831-bb4d-c663af563735"). InnerVolumeSpecName "ca-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 17:11:34 crc kubenswrapper[4869]: I1001 17:11:34.830051 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cc01041e-7f42-4831-bb4d-c663af563735-openstack-config-secret" (OuterVolumeSpecName: "openstack-config-secret") pod "cc01041e-7f42-4831-bb4d-c663af563735" (UID: "cc01041e-7f42-4831-bb4d-c663af563735"). InnerVolumeSpecName "openstack-config-secret". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 17:11:34 crc kubenswrapper[4869]: I1001 17:11:34.844649 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/cc01041e-7f42-4831-bb4d-c663af563735-test-operator-clouds-config" (OuterVolumeSpecName: "test-operator-clouds-config") pod "cc01041e-7f42-4831-bb4d-c663af563735" (UID: "cc01041e-7f42-4831-bb4d-c663af563735"). InnerVolumeSpecName "test-operator-clouds-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 17:11:34 crc kubenswrapper[4869]: I1001 17:11:34.871173 4869 reconciler_common.go:293] "Volume detached for volume \"ca-certs\" (UniqueName: \"kubernetes.io/secret/cc01041e-7f42-4831-bb4d-c663af563735-ca-certs\") on node \"crc\" DevicePath \"\"" Oct 01 17:11:34 crc kubenswrapper[4869]: I1001 17:11:34.871232 4869 reconciler_common.go:293] "Volume detached for volume \"tobiko-public-key\" (UniqueName: \"kubernetes.io/configmap/cc01041e-7f42-4831-bb4d-c663af563735-tobiko-public-key\") on node \"crc\" DevicePath \"\"" Oct 01 17:11:34 crc kubenswrapper[4869]: I1001 17:11:34.871246 4869 reconciler_common.go:293] "Volume detached for volume \"test-operator-clouds-config\" (UniqueName: \"kubernetes.io/configmap/cc01041e-7f42-4831-bb4d-c663af563735-test-operator-clouds-config\") on node \"crc\" DevicePath \"\"" Oct 01 17:11:34 crc kubenswrapper[4869]: I1001 17:11:34.871287 4869 reconciler_common.go:293] "Volume detached for volume \"tobiko-config\" (UniqueName: \"kubernetes.io/configmap/cc01041e-7f42-4831-bb4d-c663af563735-tobiko-config\") on node \"crc\" DevicePath \"\"" Oct 01 17:11:34 crc kubenswrapper[4869]: I1001 17:11:34.871328 4869 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") on node \"crc\" " Oct 01 17:11:34 crc kubenswrapper[4869]: I1001 17:11:34.871364 4869 reconciler_common.go:293] "Volume detached for volume \"kubeconfig\" (UniqueName: \"kubernetes.io/secret/cc01041e-7f42-4831-bb4d-c663af563735-kubeconfig\") on node \"crc\" DevicePath \"\"" Oct 01 17:11:34 crc kubenswrapper[4869]: I1001 17:11:34.871381 4869 reconciler_common.go:293] "Volume detached for volume \"tobiko-private-key\" (UniqueName: \"kubernetes.io/configmap/cc01041e-7f42-4831-bb4d-c663af563735-tobiko-private-key\") on node \"crc\" DevicePath \"\"" Oct 01 17:11:34 crc kubenswrapper[4869]: I1001 17:11:34.871395 4869 reconciler_common.go:293] "Volume detached for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/cc01041e-7f42-4831-bb4d-c663af563735-openstack-config-secret\") on node \"crc\" DevicePath \"\"" Oct 01 17:11:34 crc kubenswrapper[4869]: I1001 17:11:34.871407 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-j467t\" (UniqueName: \"kubernetes.io/projected/cc01041e-7f42-4831-bb4d-c663af563735-kube-api-access-j467t\") on node \"crc\" DevicePath \"\"" Oct 01 17:11:34 crc kubenswrapper[4869]: I1001 17:11:34.871418 4869 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/cc01041e-7f42-4831-bb4d-c663af563735-ceph\") on node \"crc\" DevicePath \"\"" Oct 01 17:11:34 crc kubenswrapper[4869]: I1001 17:11:34.909370 4869 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage07-crc" (UniqueName: "kubernetes.io/local-volume/local-storage07-crc") on node "crc" Oct 01 17:11:34 crc kubenswrapper[4869]: I1001 17:11:34.974053 4869 reconciler_common.go:293] "Volume detached for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") on node \"crc\" DevicePath \"\"" Oct 01 17:11:35 crc kubenswrapper[4869]: I1001 17:11:35.137822 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/tobiko-tests-tobiko-s01-sanity" event={"ID":"cc01041e-7f42-4831-bb4d-c663af563735","Type":"ContainerDied","Data":"c36388d37f80716343ec629f06d3994dc564a0c1be87433e45c0b2000c2b001a"} Oct 01 17:11:35 crc kubenswrapper[4869]: I1001 17:11:35.137863 4869 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="c36388d37f80716343ec629f06d3994dc564a0c1be87433e45c0b2000c2b001a" Oct 01 17:11:35 crc kubenswrapper[4869]: I1001 17:11:35.137914 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/tobiko-tests-tobiko-s01-sanity" Oct 01 17:11:36 crc kubenswrapper[4869]: I1001 17:11:36.521960 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/cc01041e-7f42-4831-bb4d-c663af563735-test-operator-ephemeral-workdir" (OuterVolumeSpecName: "test-operator-ephemeral-workdir") pod "cc01041e-7f42-4831-bb4d-c663af563735" (UID: "cc01041e-7f42-4831-bb4d-c663af563735"). InnerVolumeSpecName "test-operator-ephemeral-workdir". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 17:11:36 crc kubenswrapper[4869]: I1001 17:11:36.611151 4869 reconciler_common.go:293] "Volume detached for volume \"test-operator-ephemeral-workdir\" (UniqueName: \"kubernetes.io/empty-dir/cc01041e-7f42-4831-bb4d-c663af563735-test-operator-ephemeral-workdir\") on node \"crc\" DevicePath \"\"" Oct 01 17:11:37 crc kubenswrapper[4869]: I1001 17:11:37.102086 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/test-operator-logs-pod-tobiko-tobiko-tests-tobiko"] Oct 01 17:11:37 crc kubenswrapper[4869]: E1001 17:11:37.102714 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cc01041e-7f42-4831-bb4d-c663af563735" containerName="tobiko-tests-tobiko" Oct 01 17:11:37 crc kubenswrapper[4869]: I1001 17:11:37.102730 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="cc01041e-7f42-4831-bb4d-c663af563735" containerName="tobiko-tests-tobiko" Oct 01 17:11:37 crc kubenswrapper[4869]: E1001 17:11:37.102742 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6ccd2468-311f-47cf-aff3-e79a32910afb" containerName="extract-utilities" Oct 01 17:11:37 crc kubenswrapper[4869]: I1001 17:11:37.102749 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="6ccd2468-311f-47cf-aff3-e79a32910afb" containerName="extract-utilities" Oct 01 17:11:37 crc kubenswrapper[4869]: E1001 17:11:37.102755 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6ccd2468-311f-47cf-aff3-e79a32910afb" containerName="extract-content" Oct 01 17:11:37 crc kubenswrapper[4869]: I1001 17:11:37.102761 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="6ccd2468-311f-47cf-aff3-e79a32910afb" containerName="extract-content" Oct 01 17:11:37 crc kubenswrapper[4869]: E1001 17:11:37.102779 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6ccd2468-311f-47cf-aff3-e79a32910afb" containerName="registry-server" Oct 01 17:11:37 crc kubenswrapper[4869]: I1001 17:11:37.102784 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="6ccd2468-311f-47cf-aff3-e79a32910afb" containerName="registry-server" Oct 01 17:11:37 crc kubenswrapper[4869]: I1001 17:11:37.102962 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="cc01041e-7f42-4831-bb4d-c663af563735" containerName="tobiko-tests-tobiko" Oct 01 17:11:37 crc kubenswrapper[4869]: I1001 17:11:37.102972 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="6ccd2468-311f-47cf-aff3-e79a32910afb" containerName="registry-server" Oct 01 17:11:37 crc kubenswrapper[4869]: I1001 17:11:37.103588 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/test-operator-logs-pod-tobiko-tobiko-tests-tobiko" Oct 01 17:11:37 crc kubenswrapper[4869]: I1001 17:11:37.122918 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/test-operator-logs-pod-tobiko-tobiko-tests-tobiko"] Oct 01 17:11:37 crc kubenswrapper[4869]: I1001 17:11:37.223223 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7lz9p\" (UniqueName: \"kubernetes.io/projected/fab05180-0ae2-40c0-afec-c925124a7d35-kube-api-access-7lz9p\") pod \"test-operator-logs-pod-tobiko-tobiko-tests-tobiko\" (UID: \"fab05180-0ae2-40c0-afec-c925124a7d35\") " pod="openstack/test-operator-logs-pod-tobiko-tobiko-tests-tobiko" Oct 01 17:11:37 crc kubenswrapper[4869]: I1001 17:11:37.223307 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"test-operator-logs-pod-tobiko-tobiko-tests-tobiko\" (UID: \"fab05180-0ae2-40c0-afec-c925124a7d35\") " pod="openstack/test-operator-logs-pod-tobiko-tobiko-tests-tobiko" Oct 01 17:11:37 crc kubenswrapper[4869]: I1001 17:11:37.325519 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7lz9p\" (UniqueName: \"kubernetes.io/projected/fab05180-0ae2-40c0-afec-c925124a7d35-kube-api-access-7lz9p\") pod \"test-operator-logs-pod-tobiko-tobiko-tests-tobiko\" (UID: \"fab05180-0ae2-40c0-afec-c925124a7d35\") " pod="openstack/test-operator-logs-pod-tobiko-tobiko-tests-tobiko" Oct 01 17:11:37 crc kubenswrapper[4869]: I1001 17:11:37.325914 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"test-operator-logs-pod-tobiko-tobiko-tests-tobiko\" (UID: \"fab05180-0ae2-40c0-afec-c925124a7d35\") " pod="openstack/test-operator-logs-pod-tobiko-tobiko-tests-tobiko" Oct 01 17:11:37 crc kubenswrapper[4869]: I1001 17:11:37.326320 4869 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"test-operator-logs-pod-tobiko-tobiko-tests-tobiko\" (UID: \"fab05180-0ae2-40c0-afec-c925124a7d35\") device mount path \"/mnt/openstack/pv07\"" pod="openstack/test-operator-logs-pod-tobiko-tobiko-tests-tobiko" Oct 01 17:11:37 crc kubenswrapper[4869]: I1001 17:11:37.344870 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7lz9p\" (UniqueName: \"kubernetes.io/projected/fab05180-0ae2-40c0-afec-c925124a7d35-kube-api-access-7lz9p\") pod \"test-operator-logs-pod-tobiko-tobiko-tests-tobiko\" (UID: \"fab05180-0ae2-40c0-afec-c925124a7d35\") " pod="openstack/test-operator-logs-pod-tobiko-tobiko-tests-tobiko" Oct 01 17:11:37 crc kubenswrapper[4869]: I1001 17:11:37.361877 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"test-operator-logs-pod-tobiko-tobiko-tests-tobiko\" (UID: \"fab05180-0ae2-40c0-afec-c925124a7d35\") " pod="openstack/test-operator-logs-pod-tobiko-tobiko-tests-tobiko" Oct 01 17:11:37 crc kubenswrapper[4869]: I1001 17:11:37.424751 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/test-operator-logs-pod-tobiko-tobiko-tests-tobiko" Oct 01 17:11:37 crc kubenswrapper[4869]: I1001 17:11:37.913680 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/test-operator-logs-pod-tobiko-tobiko-tests-tobiko"] Oct 01 17:11:38 crc kubenswrapper[4869]: I1001 17:11:38.186886 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/test-operator-logs-pod-tobiko-tobiko-tests-tobiko" event={"ID":"fab05180-0ae2-40c0-afec-c925124a7d35","Type":"ContainerStarted","Data":"c6a8ebe743d6424a0b58b2ec745bf022a3ff32c7e2b45f5c6b8f05d771078852"} Oct 01 17:11:39 crc kubenswrapper[4869]: I1001 17:11:39.202021 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/test-operator-logs-pod-tobiko-tobiko-tests-tobiko" event={"ID":"fab05180-0ae2-40c0-afec-c925124a7d35","Type":"ContainerStarted","Data":"1e1c820e9e7c3cd36eb6a22716f6325324ebf7b8ab457cb418e8d57cd3da1d8c"} Oct 01 17:11:39 crc kubenswrapper[4869]: I1001 17:11:39.228290 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/test-operator-logs-pod-tobiko-tobiko-tests-tobiko" podStartSLOduration=1.5958799369999999 podStartE2EDuration="2.228227975s" podCreationTimestamp="2025-10-01 17:11:37 +0000 UTC" firstStartedPulling="2025-10-01 17:11:37.931580993 +0000 UTC m=+7607.078424109" lastFinishedPulling="2025-10-01 17:11:38.563929021 +0000 UTC m=+7607.710772147" observedRunningTime="2025-10-01 17:11:39.21735449 +0000 UTC m=+7608.364197666" watchObservedRunningTime="2025-10-01 17:11:39.228227975 +0000 UTC m=+7608.375071121" Oct 01 17:11:43 crc kubenswrapper[4869]: I1001 17:11:43.354167 4869 patch_prober.go:28] interesting pod/machine-config-daemon-c86m8 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 01 17:11:43 crc kubenswrapper[4869]: I1001 17:11:43.354724 4869 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-c86m8" podUID="a4b64f7f-0b03-4f47-965b-9fde048b735c" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 01 17:11:56 crc kubenswrapper[4869]: I1001 17:11:56.861887 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ansibletest-ansibletest"] Oct 01 17:11:56 crc kubenswrapper[4869]: I1001 17:11:56.865101 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ansibletest-ansibletest" Oct 01 17:11:56 crc kubenswrapper[4869]: I1001 17:11:56.874314 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ansibletest-ansibletest"] Oct 01 17:11:56 crc kubenswrapper[4869]: I1001 17:11:56.906212 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Oct 01 17:11:56 crc kubenswrapper[4869]: I1001 17:11:56.911161 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"test-operator-controller-priv-key" Oct 01 17:11:56 crc kubenswrapper[4869]: I1001 17:11:56.962251 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"ansibletest-ansibletest\" (UID: \"5725efb6-5a19-4979-966f-2e6ee4e16109\") " pod="openstack/ansibletest-ansibletest" Oct 01 17:11:56 crc kubenswrapper[4869]: I1001 17:11:56.962306 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"compute-ssh-secret\" (UniqueName: \"kubernetes.io/secret/5725efb6-5a19-4979-966f-2e6ee4e16109-compute-ssh-secret\") pod \"ansibletest-ansibletest\" (UID: \"5725efb6-5a19-4979-966f-2e6ee4e16109\") " pod="openstack/ansibletest-ansibletest" Oct 01 17:11:56 crc kubenswrapper[4869]: I1001 17:11:56.962333 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"test-operator-ephemeral-workdir\" (UniqueName: \"kubernetes.io/empty-dir/5725efb6-5a19-4979-966f-2e6ee4e16109-test-operator-ephemeral-workdir\") pod \"ansibletest-ansibletest\" (UID: \"5725efb6-5a19-4979-966f-2e6ee4e16109\") " pod="openstack/ansibletest-ansibletest" Oct 01 17:11:56 crc kubenswrapper[4869]: I1001 17:11:56.962376 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"test-operator-ephemeral-temporary\" (UniqueName: \"kubernetes.io/empty-dir/5725efb6-5a19-4979-966f-2e6ee4e16109-test-operator-ephemeral-temporary\") pod \"ansibletest-ansibletest\" (UID: \"5725efb6-5a19-4979-966f-2e6ee4e16109\") " pod="openstack/ansibletest-ansibletest" Oct 01 17:11:56 crc kubenswrapper[4869]: I1001 17:11:56.962396 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/5725efb6-5a19-4979-966f-2e6ee4e16109-openstack-config\") pod \"ansibletest-ansibletest\" (UID: \"5725efb6-5a19-4979-966f-2e6ee4e16109\") " pod="openstack/ansibletest-ansibletest" Oct 01 17:11:56 crc kubenswrapper[4869]: I1001 17:11:56.963021 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ca-certs\" (UniqueName: \"kubernetes.io/secret/5725efb6-5a19-4979-966f-2e6ee4e16109-ca-certs\") pod \"ansibletest-ansibletest\" (UID: \"5725efb6-5a19-4979-966f-2e6ee4e16109\") " pod="openstack/ansibletest-ansibletest" Oct 01 17:11:56 crc kubenswrapper[4869]: I1001 17:11:56.963233 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"workload-ssh-secret\" (UniqueName: \"kubernetes.io/secret/5725efb6-5a19-4979-966f-2e6ee4e16109-workload-ssh-secret\") pod \"ansibletest-ansibletest\" (UID: \"5725efb6-5a19-4979-966f-2e6ee4e16109\") " pod="openstack/ansibletest-ansibletest" Oct 01 17:11:56 crc kubenswrapper[4869]: I1001 17:11:56.963437 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-r4gwz\" (UniqueName: \"kubernetes.io/projected/5725efb6-5a19-4979-966f-2e6ee4e16109-kube-api-access-r4gwz\") pod \"ansibletest-ansibletest\" (UID: \"5725efb6-5a19-4979-966f-2e6ee4e16109\") " pod="openstack/ansibletest-ansibletest" Oct 01 17:11:56 crc kubenswrapper[4869]: I1001 17:11:56.964004 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/5725efb6-5a19-4979-966f-2e6ee4e16109-ceph\") pod \"ansibletest-ansibletest\" (UID: \"5725efb6-5a19-4979-966f-2e6ee4e16109\") " pod="openstack/ansibletest-ansibletest" Oct 01 17:11:56 crc kubenswrapper[4869]: I1001 17:11:56.964442 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/5725efb6-5a19-4979-966f-2e6ee4e16109-openstack-config-secret\") pod \"ansibletest-ansibletest\" (UID: \"5725efb6-5a19-4979-966f-2e6ee4e16109\") " pod="openstack/ansibletest-ansibletest" Oct 01 17:11:57 crc kubenswrapper[4869]: I1001 17:11:57.066508 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"test-operator-ephemeral-temporary\" (UniqueName: \"kubernetes.io/empty-dir/5725efb6-5a19-4979-966f-2e6ee4e16109-test-operator-ephemeral-temporary\") pod \"ansibletest-ansibletest\" (UID: \"5725efb6-5a19-4979-966f-2e6ee4e16109\") " pod="openstack/ansibletest-ansibletest" Oct 01 17:11:57 crc kubenswrapper[4869]: I1001 17:11:57.066581 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/5725efb6-5a19-4979-966f-2e6ee4e16109-openstack-config\") pod \"ansibletest-ansibletest\" (UID: \"5725efb6-5a19-4979-966f-2e6ee4e16109\") " pod="openstack/ansibletest-ansibletest" Oct 01 17:11:57 crc kubenswrapper[4869]: I1001 17:11:57.066663 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ca-certs\" (UniqueName: \"kubernetes.io/secret/5725efb6-5a19-4979-966f-2e6ee4e16109-ca-certs\") pod \"ansibletest-ansibletest\" (UID: \"5725efb6-5a19-4979-966f-2e6ee4e16109\") " pod="openstack/ansibletest-ansibletest" Oct 01 17:11:57 crc kubenswrapper[4869]: I1001 17:11:57.066712 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"workload-ssh-secret\" (UniqueName: \"kubernetes.io/secret/5725efb6-5a19-4979-966f-2e6ee4e16109-workload-ssh-secret\") pod \"ansibletest-ansibletest\" (UID: \"5725efb6-5a19-4979-966f-2e6ee4e16109\") " pod="openstack/ansibletest-ansibletest" Oct 01 17:11:57 crc kubenswrapper[4869]: I1001 17:11:57.066746 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-r4gwz\" (UniqueName: \"kubernetes.io/projected/5725efb6-5a19-4979-966f-2e6ee4e16109-kube-api-access-r4gwz\") pod \"ansibletest-ansibletest\" (UID: \"5725efb6-5a19-4979-966f-2e6ee4e16109\") " pod="openstack/ansibletest-ansibletest" Oct 01 17:11:57 crc kubenswrapper[4869]: I1001 17:11:57.066843 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/5725efb6-5a19-4979-966f-2e6ee4e16109-ceph\") pod \"ansibletest-ansibletest\" (UID: \"5725efb6-5a19-4979-966f-2e6ee4e16109\") " pod="openstack/ansibletest-ansibletest" Oct 01 17:11:57 crc kubenswrapper[4869]: I1001 17:11:57.066901 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/5725efb6-5a19-4979-966f-2e6ee4e16109-openstack-config-secret\") pod \"ansibletest-ansibletest\" (UID: \"5725efb6-5a19-4979-966f-2e6ee4e16109\") " pod="openstack/ansibletest-ansibletest" Oct 01 17:11:57 crc kubenswrapper[4869]: I1001 17:11:57.066995 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"ansibletest-ansibletest\" (UID: \"5725efb6-5a19-4979-966f-2e6ee4e16109\") " pod="openstack/ansibletest-ansibletest" Oct 01 17:11:57 crc kubenswrapper[4869]: I1001 17:11:57.067026 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"compute-ssh-secret\" (UniqueName: \"kubernetes.io/secret/5725efb6-5a19-4979-966f-2e6ee4e16109-compute-ssh-secret\") pod \"ansibletest-ansibletest\" (UID: \"5725efb6-5a19-4979-966f-2e6ee4e16109\") " pod="openstack/ansibletest-ansibletest" Oct 01 17:11:57 crc kubenswrapper[4869]: I1001 17:11:57.067067 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"test-operator-ephemeral-workdir\" (UniqueName: \"kubernetes.io/empty-dir/5725efb6-5a19-4979-966f-2e6ee4e16109-test-operator-ephemeral-workdir\") pod \"ansibletest-ansibletest\" (UID: \"5725efb6-5a19-4979-966f-2e6ee4e16109\") " pod="openstack/ansibletest-ansibletest" Oct 01 17:11:57 crc kubenswrapper[4869]: I1001 17:11:57.067735 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"test-operator-ephemeral-workdir\" (UniqueName: \"kubernetes.io/empty-dir/5725efb6-5a19-4979-966f-2e6ee4e16109-test-operator-ephemeral-workdir\") pod \"ansibletest-ansibletest\" (UID: \"5725efb6-5a19-4979-966f-2e6ee4e16109\") " pod="openstack/ansibletest-ansibletest" Oct 01 17:11:57 crc kubenswrapper[4869]: I1001 17:11:57.069003 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/5725efb6-5a19-4979-966f-2e6ee4e16109-openstack-config\") pod \"ansibletest-ansibletest\" (UID: \"5725efb6-5a19-4979-966f-2e6ee4e16109\") " pod="openstack/ansibletest-ansibletest" Oct 01 17:11:57 crc kubenswrapper[4869]: I1001 17:11:57.069307 4869 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"ansibletest-ansibletest\" (UID: \"5725efb6-5a19-4979-966f-2e6ee4e16109\") device mount path \"/mnt/openstack/pv12\"" pod="openstack/ansibletest-ansibletest" Oct 01 17:11:57 crc kubenswrapper[4869]: I1001 17:11:57.069334 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"test-operator-ephemeral-temporary\" (UniqueName: \"kubernetes.io/empty-dir/5725efb6-5a19-4979-966f-2e6ee4e16109-test-operator-ephemeral-temporary\") pod \"ansibletest-ansibletest\" (UID: \"5725efb6-5a19-4979-966f-2e6ee4e16109\") " pod="openstack/ansibletest-ansibletest" Oct 01 17:11:57 crc kubenswrapper[4869]: I1001 17:11:57.075909 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"workload-ssh-secret\" (UniqueName: \"kubernetes.io/secret/5725efb6-5a19-4979-966f-2e6ee4e16109-workload-ssh-secret\") pod \"ansibletest-ansibletest\" (UID: \"5725efb6-5a19-4979-966f-2e6ee4e16109\") " pod="openstack/ansibletest-ansibletest" Oct 01 17:11:57 crc kubenswrapper[4869]: I1001 17:11:57.076110 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"compute-ssh-secret\" (UniqueName: \"kubernetes.io/secret/5725efb6-5a19-4979-966f-2e6ee4e16109-compute-ssh-secret\") pod \"ansibletest-ansibletest\" (UID: \"5725efb6-5a19-4979-966f-2e6ee4e16109\") " pod="openstack/ansibletest-ansibletest" Oct 01 17:11:57 crc kubenswrapper[4869]: I1001 17:11:57.076121 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/5725efb6-5a19-4979-966f-2e6ee4e16109-openstack-config-secret\") pod \"ansibletest-ansibletest\" (UID: \"5725efb6-5a19-4979-966f-2e6ee4e16109\") " pod="openstack/ansibletest-ansibletest" Oct 01 17:11:57 crc kubenswrapper[4869]: I1001 17:11:57.076949 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ca-certs\" (UniqueName: \"kubernetes.io/secret/5725efb6-5a19-4979-966f-2e6ee4e16109-ca-certs\") pod \"ansibletest-ansibletest\" (UID: \"5725efb6-5a19-4979-966f-2e6ee4e16109\") " pod="openstack/ansibletest-ansibletest" Oct 01 17:11:57 crc kubenswrapper[4869]: I1001 17:11:57.079983 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/5725efb6-5a19-4979-966f-2e6ee4e16109-ceph\") pod \"ansibletest-ansibletest\" (UID: \"5725efb6-5a19-4979-966f-2e6ee4e16109\") " pod="openstack/ansibletest-ansibletest" Oct 01 17:11:57 crc kubenswrapper[4869]: I1001 17:11:57.092175 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-r4gwz\" (UniqueName: \"kubernetes.io/projected/5725efb6-5a19-4979-966f-2e6ee4e16109-kube-api-access-r4gwz\") pod \"ansibletest-ansibletest\" (UID: \"5725efb6-5a19-4979-966f-2e6ee4e16109\") " pod="openstack/ansibletest-ansibletest" Oct 01 17:11:57 crc kubenswrapper[4869]: I1001 17:11:57.098892 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"ansibletest-ansibletest\" (UID: \"5725efb6-5a19-4979-966f-2e6ee4e16109\") " pod="openstack/ansibletest-ansibletest" Oct 01 17:11:57 crc kubenswrapper[4869]: I1001 17:11:57.231430 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ansibletest-ansibletest" Oct 01 17:11:57 crc kubenswrapper[4869]: I1001 17:11:57.803221 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ansibletest-ansibletest"] Oct 01 17:11:57 crc kubenswrapper[4869]: W1001 17:11:57.805602 4869 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod5725efb6_5a19_4979_966f_2e6ee4e16109.slice/crio-8dd25d7c74eec96f97abc36b1fcf579b2fabf3998c5ce932ab085307ba3a58d3 WatchSource:0}: Error finding container 8dd25d7c74eec96f97abc36b1fcf579b2fabf3998c5ce932ab085307ba3a58d3: Status 404 returned error can't find the container with id 8dd25d7c74eec96f97abc36b1fcf579b2fabf3998c5ce932ab085307ba3a58d3 Oct 01 17:11:57 crc kubenswrapper[4869]: I1001 17:11:57.808588 4869 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Oct 01 17:11:58 crc kubenswrapper[4869]: I1001 17:11:58.440143 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ansibletest-ansibletest" event={"ID":"5725efb6-5a19-4979-966f-2e6ee4e16109","Type":"ContainerStarted","Data":"8dd25d7c74eec96f97abc36b1fcf579b2fabf3998c5ce932ab085307ba3a58d3"} Oct 01 17:12:12 crc kubenswrapper[4869]: I1001 17:12:12.577421 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ansibletest-ansibletest" event={"ID":"5725efb6-5a19-4979-966f-2e6ee4e16109","Type":"ContainerStarted","Data":"0dcfc51eb10a1ecf1ebf32b634dd7152025079cd0b8d6cf1bdeb5c37891b904b"} Oct 01 17:12:12 crc kubenswrapper[4869]: I1001 17:12:12.603300 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ansibletest-ansibletest" podStartSLOduration=3.9389623179999997 podStartE2EDuration="17.603281748s" podCreationTimestamp="2025-10-01 17:11:55 +0000 UTC" firstStartedPulling="2025-10-01 17:11:57.808341027 +0000 UTC m=+7626.955184153" lastFinishedPulling="2025-10-01 17:12:11.472660367 +0000 UTC m=+7640.619503583" observedRunningTime="2025-10-01 17:12:12.593345087 +0000 UTC m=+7641.740188223" watchObservedRunningTime="2025-10-01 17:12:12.603281748 +0000 UTC m=+7641.750124864" Oct 01 17:12:13 crc kubenswrapper[4869]: I1001 17:12:13.354518 4869 patch_prober.go:28] interesting pod/machine-config-daemon-c86m8 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 01 17:12:13 crc kubenswrapper[4869]: I1001 17:12:13.354917 4869 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-c86m8" podUID="a4b64f7f-0b03-4f47-965b-9fde048b735c" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 01 17:12:14 crc kubenswrapper[4869]: I1001 17:12:14.600363 4869 generic.go:334] "Generic (PLEG): container finished" podID="5725efb6-5a19-4979-966f-2e6ee4e16109" containerID="0dcfc51eb10a1ecf1ebf32b634dd7152025079cd0b8d6cf1bdeb5c37891b904b" exitCode=0 Oct 01 17:12:14 crc kubenswrapper[4869]: I1001 17:12:14.600419 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ansibletest-ansibletest" event={"ID":"5725efb6-5a19-4979-966f-2e6ee4e16109","Type":"ContainerDied","Data":"0dcfc51eb10a1ecf1ebf32b634dd7152025079cd0b8d6cf1bdeb5c37891b904b"} Oct 01 17:12:16 crc kubenswrapper[4869]: I1001 17:12:16.009014 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ansibletest-ansibletest" Oct 01 17:12:16 crc kubenswrapper[4869]: I1001 17:12:16.089661 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"workload-ssh-secret\" (UniqueName: \"kubernetes.io/secret/5725efb6-5a19-4979-966f-2e6ee4e16109-workload-ssh-secret\") pod \"5725efb6-5a19-4979-966f-2e6ee4e16109\" (UID: \"5725efb6-5a19-4979-966f-2e6ee4e16109\") " Oct 01 17:12:16 crc kubenswrapper[4869]: I1001 17:12:16.089786 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"test-operator-logs\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"5725efb6-5a19-4979-966f-2e6ee4e16109\" (UID: \"5725efb6-5a19-4979-966f-2e6ee4e16109\") " Oct 01 17:12:16 crc kubenswrapper[4869]: I1001 17:12:16.089850 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-r4gwz\" (UniqueName: \"kubernetes.io/projected/5725efb6-5a19-4979-966f-2e6ee4e16109-kube-api-access-r4gwz\") pod \"5725efb6-5a19-4979-966f-2e6ee4e16109\" (UID: \"5725efb6-5a19-4979-966f-2e6ee4e16109\") " Oct 01 17:12:16 crc kubenswrapper[4869]: I1001 17:12:16.089878 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"test-operator-ephemeral-workdir\" (UniqueName: \"kubernetes.io/empty-dir/5725efb6-5a19-4979-966f-2e6ee4e16109-test-operator-ephemeral-workdir\") pod \"5725efb6-5a19-4979-966f-2e6ee4e16109\" (UID: \"5725efb6-5a19-4979-966f-2e6ee4e16109\") " Oct 01 17:12:16 crc kubenswrapper[4869]: I1001 17:12:16.089920 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/5725efb6-5a19-4979-966f-2e6ee4e16109-openstack-config-secret\") pod \"5725efb6-5a19-4979-966f-2e6ee4e16109\" (UID: \"5725efb6-5a19-4979-966f-2e6ee4e16109\") " Oct 01 17:12:16 crc kubenswrapper[4869]: I1001 17:12:16.089947 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ca-certs\" (UniqueName: \"kubernetes.io/secret/5725efb6-5a19-4979-966f-2e6ee4e16109-ca-certs\") pod \"5725efb6-5a19-4979-966f-2e6ee4e16109\" (UID: \"5725efb6-5a19-4979-966f-2e6ee4e16109\") " Oct 01 17:12:16 crc kubenswrapper[4869]: I1001 17:12:16.089977 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/5725efb6-5a19-4979-966f-2e6ee4e16109-openstack-config\") pod \"5725efb6-5a19-4979-966f-2e6ee4e16109\" (UID: \"5725efb6-5a19-4979-966f-2e6ee4e16109\") " Oct 01 17:12:16 crc kubenswrapper[4869]: I1001 17:12:16.090035 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/5725efb6-5a19-4979-966f-2e6ee4e16109-ceph\") pod \"5725efb6-5a19-4979-966f-2e6ee4e16109\" (UID: \"5725efb6-5a19-4979-966f-2e6ee4e16109\") " Oct 01 17:12:16 crc kubenswrapper[4869]: I1001 17:12:16.090087 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"compute-ssh-secret\" (UniqueName: \"kubernetes.io/secret/5725efb6-5a19-4979-966f-2e6ee4e16109-compute-ssh-secret\") pod \"5725efb6-5a19-4979-966f-2e6ee4e16109\" (UID: \"5725efb6-5a19-4979-966f-2e6ee4e16109\") " Oct 01 17:12:16 crc kubenswrapper[4869]: I1001 17:12:16.090170 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"test-operator-ephemeral-temporary\" (UniqueName: \"kubernetes.io/empty-dir/5725efb6-5a19-4979-966f-2e6ee4e16109-test-operator-ephemeral-temporary\") pod \"5725efb6-5a19-4979-966f-2e6ee4e16109\" (UID: \"5725efb6-5a19-4979-966f-2e6ee4e16109\") " Oct 01 17:12:16 crc kubenswrapper[4869]: I1001 17:12:16.090883 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5725efb6-5a19-4979-966f-2e6ee4e16109-test-operator-ephemeral-temporary" (OuterVolumeSpecName: "test-operator-ephemeral-temporary") pod "5725efb6-5a19-4979-966f-2e6ee4e16109" (UID: "5725efb6-5a19-4979-966f-2e6ee4e16109"). InnerVolumeSpecName "test-operator-ephemeral-temporary". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 17:12:16 crc kubenswrapper[4869]: I1001 17:12:16.094789 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5725efb6-5a19-4979-966f-2e6ee4e16109-ceph" (OuterVolumeSpecName: "ceph") pod "5725efb6-5a19-4979-966f-2e6ee4e16109" (UID: "5725efb6-5a19-4979-966f-2e6ee4e16109"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 17:12:16 crc kubenswrapper[4869]: I1001 17:12:16.095468 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5725efb6-5a19-4979-966f-2e6ee4e16109-kube-api-access-r4gwz" (OuterVolumeSpecName: "kube-api-access-r4gwz") pod "5725efb6-5a19-4979-966f-2e6ee4e16109" (UID: "5725efb6-5a19-4979-966f-2e6ee4e16109"). InnerVolumeSpecName "kube-api-access-r4gwz". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 17:12:16 crc kubenswrapper[4869]: I1001 17:12:16.103248 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5725efb6-5a19-4979-966f-2e6ee4e16109-test-operator-ephemeral-workdir" (OuterVolumeSpecName: "test-operator-ephemeral-workdir") pod "5725efb6-5a19-4979-966f-2e6ee4e16109" (UID: "5725efb6-5a19-4979-966f-2e6ee4e16109"). InnerVolumeSpecName "test-operator-ephemeral-workdir". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 17:12:16 crc kubenswrapper[4869]: I1001 17:12:16.107705 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage12-crc" (OuterVolumeSpecName: "test-operator-logs") pod "5725efb6-5a19-4979-966f-2e6ee4e16109" (UID: "5725efb6-5a19-4979-966f-2e6ee4e16109"). InnerVolumeSpecName "local-storage12-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Oct 01 17:12:16 crc kubenswrapper[4869]: I1001 17:12:16.117077 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5725efb6-5a19-4979-966f-2e6ee4e16109-workload-ssh-secret" (OuterVolumeSpecName: "workload-ssh-secret") pod "5725efb6-5a19-4979-966f-2e6ee4e16109" (UID: "5725efb6-5a19-4979-966f-2e6ee4e16109"). InnerVolumeSpecName "workload-ssh-secret". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 17:12:16 crc kubenswrapper[4869]: I1001 17:12:16.119762 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5725efb6-5a19-4979-966f-2e6ee4e16109-openstack-config-secret" (OuterVolumeSpecName: "openstack-config-secret") pod "5725efb6-5a19-4979-966f-2e6ee4e16109" (UID: "5725efb6-5a19-4979-966f-2e6ee4e16109"). InnerVolumeSpecName "openstack-config-secret". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 17:12:16 crc kubenswrapper[4869]: I1001 17:12:16.124079 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5725efb6-5a19-4979-966f-2e6ee4e16109-compute-ssh-secret" (OuterVolumeSpecName: "compute-ssh-secret") pod "5725efb6-5a19-4979-966f-2e6ee4e16109" (UID: "5725efb6-5a19-4979-966f-2e6ee4e16109"). InnerVolumeSpecName "compute-ssh-secret". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 17:12:16 crc kubenswrapper[4869]: I1001 17:12:16.144602 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5725efb6-5a19-4979-966f-2e6ee4e16109-openstack-config" (OuterVolumeSpecName: "openstack-config") pod "5725efb6-5a19-4979-966f-2e6ee4e16109" (UID: "5725efb6-5a19-4979-966f-2e6ee4e16109"). InnerVolumeSpecName "openstack-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 17:12:16 crc kubenswrapper[4869]: I1001 17:12:16.148107 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5725efb6-5a19-4979-966f-2e6ee4e16109-ca-certs" (OuterVolumeSpecName: "ca-certs") pod "5725efb6-5a19-4979-966f-2e6ee4e16109" (UID: "5725efb6-5a19-4979-966f-2e6ee4e16109"). InnerVolumeSpecName "ca-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 17:12:16 crc kubenswrapper[4869]: I1001 17:12:16.192481 4869 reconciler_common.go:293] "Volume detached for volume \"compute-ssh-secret\" (UniqueName: \"kubernetes.io/secret/5725efb6-5a19-4979-966f-2e6ee4e16109-compute-ssh-secret\") on node \"crc\" DevicePath \"\"" Oct 01 17:12:16 crc kubenswrapper[4869]: I1001 17:12:16.192520 4869 reconciler_common.go:293] "Volume detached for volume \"test-operator-ephemeral-temporary\" (UniqueName: \"kubernetes.io/empty-dir/5725efb6-5a19-4979-966f-2e6ee4e16109-test-operator-ephemeral-temporary\") on node \"crc\" DevicePath \"\"" Oct 01 17:12:16 crc kubenswrapper[4869]: I1001 17:12:16.192536 4869 reconciler_common.go:293] "Volume detached for volume \"workload-ssh-secret\" (UniqueName: \"kubernetes.io/secret/5725efb6-5a19-4979-966f-2e6ee4e16109-workload-ssh-secret\") on node \"crc\" DevicePath \"\"" Oct 01 17:12:16 crc kubenswrapper[4869]: I1001 17:12:16.192578 4869 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") on node \"crc\" " Oct 01 17:12:16 crc kubenswrapper[4869]: I1001 17:12:16.192592 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-r4gwz\" (UniqueName: \"kubernetes.io/projected/5725efb6-5a19-4979-966f-2e6ee4e16109-kube-api-access-r4gwz\") on node \"crc\" DevicePath \"\"" Oct 01 17:12:16 crc kubenswrapper[4869]: I1001 17:12:16.192605 4869 reconciler_common.go:293] "Volume detached for volume \"test-operator-ephemeral-workdir\" (UniqueName: \"kubernetes.io/empty-dir/5725efb6-5a19-4979-966f-2e6ee4e16109-test-operator-ephemeral-workdir\") on node \"crc\" DevicePath \"\"" Oct 01 17:12:16 crc kubenswrapper[4869]: I1001 17:12:16.192617 4869 reconciler_common.go:293] "Volume detached for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/5725efb6-5a19-4979-966f-2e6ee4e16109-openstack-config-secret\") on node \"crc\" DevicePath \"\"" Oct 01 17:12:16 crc kubenswrapper[4869]: I1001 17:12:16.192630 4869 reconciler_common.go:293] "Volume detached for volume \"ca-certs\" (UniqueName: \"kubernetes.io/secret/5725efb6-5a19-4979-966f-2e6ee4e16109-ca-certs\") on node \"crc\" DevicePath \"\"" Oct 01 17:12:16 crc kubenswrapper[4869]: I1001 17:12:16.192641 4869 reconciler_common.go:293] "Volume detached for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/5725efb6-5a19-4979-966f-2e6ee4e16109-openstack-config\") on node \"crc\" DevicePath \"\"" Oct 01 17:12:16 crc kubenswrapper[4869]: I1001 17:12:16.192652 4869 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/5725efb6-5a19-4979-966f-2e6ee4e16109-ceph\") on node \"crc\" DevicePath \"\"" Oct 01 17:12:16 crc kubenswrapper[4869]: I1001 17:12:16.212875 4869 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage12-crc" (UniqueName: "kubernetes.io/local-volume/local-storage12-crc") on node "crc" Oct 01 17:12:16 crc kubenswrapper[4869]: I1001 17:12:16.294686 4869 reconciler_common.go:293] "Volume detached for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") on node \"crc\" DevicePath \"\"" Oct 01 17:12:16 crc kubenswrapper[4869]: I1001 17:12:16.623882 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ansibletest-ansibletest" event={"ID":"5725efb6-5a19-4979-966f-2e6ee4e16109","Type":"ContainerDied","Data":"8dd25d7c74eec96f97abc36b1fcf579b2fabf3998c5ce932ab085307ba3a58d3"} Oct 01 17:12:16 crc kubenswrapper[4869]: I1001 17:12:16.624192 4869 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="8dd25d7c74eec96f97abc36b1fcf579b2fabf3998c5ce932ab085307ba3a58d3" Oct 01 17:12:16 crc kubenswrapper[4869]: I1001 17:12:16.624005 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ansibletest-ansibletest" Oct 01 17:12:20 crc kubenswrapper[4869]: I1001 17:12:20.404345 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/test-operator-logs-pod-ansibletest-ansibletest-ansibletest"] Oct 01 17:12:20 crc kubenswrapper[4869]: E1001 17:12:20.405577 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5725efb6-5a19-4979-966f-2e6ee4e16109" containerName="ansibletest-ansibletest" Oct 01 17:12:20 crc kubenswrapper[4869]: I1001 17:12:20.405600 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="5725efb6-5a19-4979-966f-2e6ee4e16109" containerName="ansibletest-ansibletest" Oct 01 17:12:20 crc kubenswrapper[4869]: I1001 17:12:20.405978 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="5725efb6-5a19-4979-966f-2e6ee4e16109" containerName="ansibletest-ansibletest" Oct 01 17:12:20 crc kubenswrapper[4869]: I1001 17:12:20.407090 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/test-operator-logs-pod-ansibletest-ansibletest-ansibletest" Oct 01 17:12:20 crc kubenswrapper[4869]: I1001 17:12:20.427431 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/test-operator-logs-pod-ansibletest-ansibletest-ansibletest"] Oct 01 17:12:20 crc kubenswrapper[4869]: I1001 17:12:20.482970 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-b8ql2\" (UniqueName: \"kubernetes.io/projected/e17c4e7e-6bf5-4519-8b1a-c5f39ebc5ccb-kube-api-access-b8ql2\") pod \"test-operator-logs-pod-ansibletest-ansibletest-ansibletest\" (UID: \"e17c4e7e-6bf5-4519-8b1a-c5f39ebc5ccb\") " pod="openstack/test-operator-logs-pod-ansibletest-ansibletest-ansibletest" Oct 01 17:12:20 crc kubenswrapper[4869]: I1001 17:12:20.483036 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"test-operator-logs-pod-ansibletest-ansibletest-ansibletest\" (UID: \"e17c4e7e-6bf5-4519-8b1a-c5f39ebc5ccb\") " pod="openstack/test-operator-logs-pod-ansibletest-ansibletest-ansibletest" Oct 01 17:12:20 crc kubenswrapper[4869]: I1001 17:12:20.585152 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-b8ql2\" (UniqueName: \"kubernetes.io/projected/e17c4e7e-6bf5-4519-8b1a-c5f39ebc5ccb-kube-api-access-b8ql2\") pod \"test-operator-logs-pod-ansibletest-ansibletest-ansibletest\" (UID: \"e17c4e7e-6bf5-4519-8b1a-c5f39ebc5ccb\") " pod="openstack/test-operator-logs-pod-ansibletest-ansibletest-ansibletest" Oct 01 17:12:20 crc kubenswrapper[4869]: I1001 17:12:20.585292 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"test-operator-logs-pod-ansibletest-ansibletest-ansibletest\" (UID: \"e17c4e7e-6bf5-4519-8b1a-c5f39ebc5ccb\") " pod="openstack/test-operator-logs-pod-ansibletest-ansibletest-ansibletest" Oct 01 17:12:20 crc kubenswrapper[4869]: I1001 17:12:20.585896 4869 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"test-operator-logs-pod-ansibletest-ansibletest-ansibletest\" (UID: \"e17c4e7e-6bf5-4519-8b1a-c5f39ebc5ccb\") device mount path \"/mnt/openstack/pv12\"" pod="openstack/test-operator-logs-pod-ansibletest-ansibletest-ansibletest" Oct 01 17:12:20 crc kubenswrapper[4869]: I1001 17:12:20.619838 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-b8ql2\" (UniqueName: \"kubernetes.io/projected/e17c4e7e-6bf5-4519-8b1a-c5f39ebc5ccb-kube-api-access-b8ql2\") pod \"test-operator-logs-pod-ansibletest-ansibletest-ansibletest\" (UID: \"e17c4e7e-6bf5-4519-8b1a-c5f39ebc5ccb\") " pod="openstack/test-operator-logs-pod-ansibletest-ansibletest-ansibletest" Oct 01 17:12:20 crc kubenswrapper[4869]: I1001 17:12:20.634699 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"test-operator-logs-pod-ansibletest-ansibletest-ansibletest\" (UID: \"e17c4e7e-6bf5-4519-8b1a-c5f39ebc5ccb\") " pod="openstack/test-operator-logs-pod-ansibletest-ansibletest-ansibletest" Oct 01 17:12:20 crc kubenswrapper[4869]: I1001 17:12:20.731165 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/test-operator-logs-pod-ansibletest-ansibletest-ansibletest" Oct 01 17:12:21 crc kubenswrapper[4869]: I1001 17:12:21.044601 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/test-operator-logs-pod-ansibletest-ansibletest-ansibletest"] Oct 01 17:12:21 crc kubenswrapper[4869]: I1001 17:12:21.675911 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/test-operator-logs-pod-ansibletest-ansibletest-ansibletest" event={"ID":"e17c4e7e-6bf5-4519-8b1a-c5f39ebc5ccb","Type":"ContainerStarted","Data":"b41c5b611760cc946452b28677ea2baefdf4ce341a92e8211fccfb06d32261a6"} Oct 01 17:12:22 crc kubenswrapper[4869]: I1001 17:12:22.690427 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/test-operator-logs-pod-ansibletest-ansibletest-ansibletest" event={"ID":"e17c4e7e-6bf5-4519-8b1a-c5f39ebc5ccb","Type":"ContainerStarted","Data":"01d7b7fa3ad1881b616a4b86ec670862cbe76a980b7bf37949a292fa925fb16a"} Oct 01 17:12:22 crc kubenswrapper[4869]: I1001 17:12:22.704967 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/test-operator-logs-pod-ansibletest-ansibletest-ansibletest" podStartSLOduration=2.002075244 podStartE2EDuration="2.704946593s" podCreationTimestamp="2025-10-01 17:12:20 +0000 UTC" firstStartedPulling="2025-10-01 17:12:21.060517989 +0000 UTC m=+7650.207361095" lastFinishedPulling="2025-10-01 17:12:21.763389328 +0000 UTC m=+7650.910232444" observedRunningTime="2025-10-01 17:12:22.701567148 +0000 UTC m=+7651.848410294" watchObservedRunningTime="2025-10-01 17:12:22.704946593 +0000 UTC m=+7651.851789709" Oct 01 17:12:36 crc kubenswrapper[4869]: I1001 17:12:36.040020 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-pq7nn"] Oct 01 17:12:36 crc kubenswrapper[4869]: I1001 17:12:36.043504 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-pq7nn" Oct 01 17:12:36 crc kubenswrapper[4869]: I1001 17:12:36.068542 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-pq7nn"] Oct 01 17:12:36 crc kubenswrapper[4869]: I1001 17:12:36.115422 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8f2f6cf7-4b6c-4469-a3c5-32fad5e2bf93-utilities\") pod \"redhat-marketplace-pq7nn\" (UID: \"8f2f6cf7-4b6c-4469-a3c5-32fad5e2bf93\") " pod="openshift-marketplace/redhat-marketplace-pq7nn" Oct 01 17:12:36 crc kubenswrapper[4869]: I1001 17:12:36.115475 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8f2f6cf7-4b6c-4469-a3c5-32fad5e2bf93-catalog-content\") pod \"redhat-marketplace-pq7nn\" (UID: \"8f2f6cf7-4b6c-4469-a3c5-32fad5e2bf93\") " pod="openshift-marketplace/redhat-marketplace-pq7nn" Oct 01 17:12:36 crc kubenswrapper[4869]: I1001 17:12:36.115906 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-thmsm\" (UniqueName: \"kubernetes.io/projected/8f2f6cf7-4b6c-4469-a3c5-32fad5e2bf93-kube-api-access-thmsm\") pod \"redhat-marketplace-pq7nn\" (UID: \"8f2f6cf7-4b6c-4469-a3c5-32fad5e2bf93\") " pod="openshift-marketplace/redhat-marketplace-pq7nn" Oct 01 17:12:36 crc kubenswrapper[4869]: I1001 17:12:36.218938 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-thmsm\" (UniqueName: \"kubernetes.io/projected/8f2f6cf7-4b6c-4469-a3c5-32fad5e2bf93-kube-api-access-thmsm\") pod \"redhat-marketplace-pq7nn\" (UID: \"8f2f6cf7-4b6c-4469-a3c5-32fad5e2bf93\") " pod="openshift-marketplace/redhat-marketplace-pq7nn" Oct 01 17:12:36 crc kubenswrapper[4869]: I1001 17:12:36.219213 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8f2f6cf7-4b6c-4469-a3c5-32fad5e2bf93-utilities\") pod \"redhat-marketplace-pq7nn\" (UID: \"8f2f6cf7-4b6c-4469-a3c5-32fad5e2bf93\") " pod="openshift-marketplace/redhat-marketplace-pq7nn" Oct 01 17:12:36 crc kubenswrapper[4869]: I1001 17:12:36.219314 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8f2f6cf7-4b6c-4469-a3c5-32fad5e2bf93-catalog-content\") pod \"redhat-marketplace-pq7nn\" (UID: \"8f2f6cf7-4b6c-4469-a3c5-32fad5e2bf93\") " pod="openshift-marketplace/redhat-marketplace-pq7nn" Oct 01 17:12:36 crc kubenswrapper[4869]: I1001 17:12:36.219890 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8f2f6cf7-4b6c-4469-a3c5-32fad5e2bf93-utilities\") pod \"redhat-marketplace-pq7nn\" (UID: \"8f2f6cf7-4b6c-4469-a3c5-32fad5e2bf93\") " pod="openshift-marketplace/redhat-marketplace-pq7nn" Oct 01 17:12:36 crc kubenswrapper[4869]: I1001 17:12:36.219925 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8f2f6cf7-4b6c-4469-a3c5-32fad5e2bf93-catalog-content\") pod \"redhat-marketplace-pq7nn\" (UID: \"8f2f6cf7-4b6c-4469-a3c5-32fad5e2bf93\") " pod="openshift-marketplace/redhat-marketplace-pq7nn" Oct 01 17:12:36 crc kubenswrapper[4869]: I1001 17:12:36.238499 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-thmsm\" (UniqueName: \"kubernetes.io/projected/8f2f6cf7-4b6c-4469-a3c5-32fad5e2bf93-kube-api-access-thmsm\") pod \"redhat-marketplace-pq7nn\" (UID: \"8f2f6cf7-4b6c-4469-a3c5-32fad5e2bf93\") " pod="openshift-marketplace/redhat-marketplace-pq7nn" Oct 01 17:12:36 crc kubenswrapper[4869]: I1001 17:12:36.382096 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-pq7nn" Oct 01 17:12:36 crc kubenswrapper[4869]: I1001 17:12:36.846217 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-pq7nn"] Oct 01 17:12:36 crc kubenswrapper[4869]: W1001 17:12:36.846516 4869 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod8f2f6cf7_4b6c_4469_a3c5_32fad5e2bf93.slice/crio-0414dc1588aa58f13bf59f2b38a2c846c759d9a8be0e3bb1335dceccd5c17c90 WatchSource:0}: Error finding container 0414dc1588aa58f13bf59f2b38a2c846c759d9a8be0e3bb1335dceccd5c17c90: Status 404 returned error can't find the container with id 0414dc1588aa58f13bf59f2b38a2c846c759d9a8be0e3bb1335dceccd5c17c90 Oct 01 17:12:36 crc kubenswrapper[4869]: I1001 17:12:36.874978 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-pq7nn" event={"ID":"8f2f6cf7-4b6c-4469-a3c5-32fad5e2bf93","Type":"ContainerStarted","Data":"0414dc1588aa58f13bf59f2b38a2c846c759d9a8be0e3bb1335dceccd5c17c90"} Oct 01 17:12:37 crc kubenswrapper[4869]: I1001 17:12:37.888208 4869 generic.go:334] "Generic (PLEG): container finished" podID="8f2f6cf7-4b6c-4469-a3c5-32fad5e2bf93" containerID="e2c8b92051550ea6a483b27fe19e744b33f615b467cfd36a28dd43fc0c5e2f36" exitCode=0 Oct 01 17:12:37 crc kubenswrapper[4869]: I1001 17:12:37.888318 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-pq7nn" event={"ID":"8f2f6cf7-4b6c-4469-a3c5-32fad5e2bf93","Type":"ContainerDied","Data":"e2c8b92051550ea6a483b27fe19e744b33f615b467cfd36a28dd43fc0c5e2f36"} Oct 01 17:12:39 crc kubenswrapper[4869]: I1001 17:12:39.918376 4869 generic.go:334] "Generic (PLEG): container finished" podID="8f2f6cf7-4b6c-4469-a3c5-32fad5e2bf93" containerID="1d185390dcc50669305c355c042252f36ccc19a1ab886c6714328776e07eb3cc" exitCode=0 Oct 01 17:12:39 crc kubenswrapper[4869]: I1001 17:12:39.918441 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-pq7nn" event={"ID":"8f2f6cf7-4b6c-4469-a3c5-32fad5e2bf93","Type":"ContainerDied","Data":"1d185390dcc50669305c355c042252f36ccc19a1ab886c6714328776e07eb3cc"} Oct 01 17:12:40 crc kubenswrapper[4869]: I1001 17:12:40.581900 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/horizontest-tests-horizontest"] Oct 01 17:12:40 crc kubenswrapper[4869]: I1001 17:12:40.584016 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizontest-tests-horizontest" Oct 01 17:12:40 crc kubenswrapper[4869]: I1001 17:12:40.592852 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"horizontest-tests-horizontesthorizontest-config" Oct 01 17:12:40 crc kubenswrapper[4869]: I1001 17:12:40.592913 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"test-operator-clouds-config" Oct 01 17:12:40 crc kubenswrapper[4869]: I1001 17:12:40.615204 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"test-operator-clouds-config\" (UniqueName: \"kubernetes.io/configmap/e28d6c91-53b4-4c43-885f-4eb32039fb5c-test-operator-clouds-config\") pod \"horizontest-tests-horizontest\" (UID: \"e28d6c91-53b4-4c43-885f-4eb32039fb5c\") " pod="openstack/horizontest-tests-horizontest" Oct 01 17:12:40 crc kubenswrapper[4869]: I1001 17:12:40.615400 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/e28d6c91-53b4-4c43-885f-4eb32039fb5c-openstack-config-secret\") pod \"horizontest-tests-horizontest\" (UID: \"e28d6c91-53b4-4c43-885f-4eb32039fb5c\") " pod="openstack/horizontest-tests-horizontest" Oct 01 17:12:40 crc kubenswrapper[4869]: I1001 17:12:40.632443 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizontest-tests-horizontest"] Oct 01 17:12:40 crc kubenswrapper[4869]: I1001 17:12:40.717724 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"horizontest-tests-horizontest\" (UID: \"e28d6c91-53b4-4c43-885f-4eb32039fb5c\") " pod="openstack/horizontest-tests-horizontest" Oct 01 17:12:40 crc kubenswrapper[4869]: I1001 17:12:40.717846 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"test-operator-clouds-config\" (UniqueName: \"kubernetes.io/configmap/e28d6c91-53b4-4c43-885f-4eb32039fb5c-test-operator-clouds-config\") pod \"horizontest-tests-horizontest\" (UID: \"e28d6c91-53b4-4c43-885f-4eb32039fb5c\") " pod="openstack/horizontest-tests-horizontest" Oct 01 17:12:40 crc kubenswrapper[4869]: I1001 17:12:40.717966 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ca-certs\" (UniqueName: \"kubernetes.io/secret/e28d6c91-53b4-4c43-885f-4eb32039fb5c-ca-certs\") pod \"horizontest-tests-horizontest\" (UID: \"e28d6c91-53b4-4c43-885f-4eb32039fb5c\") " pod="openstack/horizontest-tests-horizontest" Oct 01 17:12:40 crc kubenswrapper[4869]: I1001 17:12:40.718032 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"test-operator-ephemeral-temporary\" (UniqueName: \"kubernetes.io/empty-dir/e28d6c91-53b4-4c43-885f-4eb32039fb5c-test-operator-ephemeral-temporary\") pod \"horizontest-tests-horizontest\" (UID: \"e28d6c91-53b4-4c43-885f-4eb32039fb5c\") " pod="openstack/horizontest-tests-horizontest" Oct 01 17:12:40 crc kubenswrapper[4869]: I1001 17:12:40.718836 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"test-operator-ephemeral-workdir\" (UniqueName: \"kubernetes.io/empty-dir/e28d6c91-53b4-4c43-885f-4eb32039fb5c-test-operator-ephemeral-workdir\") pod \"horizontest-tests-horizontest\" (UID: \"e28d6c91-53b4-4c43-885f-4eb32039fb5c\") " pod="openstack/horizontest-tests-horizontest" Oct 01 17:12:40 crc kubenswrapper[4869]: I1001 17:12:40.718878 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/e28d6c91-53b4-4c43-885f-4eb32039fb5c-ceph\") pod \"horizontest-tests-horizontest\" (UID: \"e28d6c91-53b4-4c43-885f-4eb32039fb5c\") " pod="openstack/horizontest-tests-horizontest" Oct 01 17:12:40 crc kubenswrapper[4869]: I1001 17:12:40.718909 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dlfnz\" (UniqueName: \"kubernetes.io/projected/e28d6c91-53b4-4c43-885f-4eb32039fb5c-kube-api-access-dlfnz\") pod \"horizontest-tests-horizontest\" (UID: \"e28d6c91-53b4-4c43-885f-4eb32039fb5c\") " pod="openstack/horizontest-tests-horizontest" Oct 01 17:12:40 crc kubenswrapper[4869]: I1001 17:12:40.718929 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/e28d6c91-53b4-4c43-885f-4eb32039fb5c-openstack-config-secret\") pod \"horizontest-tests-horizontest\" (UID: \"e28d6c91-53b4-4c43-885f-4eb32039fb5c\") " pod="openstack/horizontest-tests-horizontest" Oct 01 17:12:40 crc kubenswrapper[4869]: I1001 17:12:40.719711 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"test-operator-clouds-config\" (UniqueName: \"kubernetes.io/configmap/e28d6c91-53b4-4c43-885f-4eb32039fb5c-test-operator-clouds-config\") pod \"horizontest-tests-horizontest\" (UID: \"e28d6c91-53b4-4c43-885f-4eb32039fb5c\") " pod="openstack/horizontest-tests-horizontest" Oct 01 17:12:40 crc kubenswrapper[4869]: I1001 17:12:40.728210 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/e28d6c91-53b4-4c43-885f-4eb32039fb5c-openstack-config-secret\") pod \"horizontest-tests-horizontest\" (UID: \"e28d6c91-53b4-4c43-885f-4eb32039fb5c\") " pod="openstack/horizontest-tests-horizontest" Oct 01 17:12:40 crc kubenswrapper[4869]: I1001 17:12:40.820695 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ca-certs\" (UniqueName: \"kubernetes.io/secret/e28d6c91-53b4-4c43-885f-4eb32039fb5c-ca-certs\") pod \"horizontest-tests-horizontest\" (UID: \"e28d6c91-53b4-4c43-885f-4eb32039fb5c\") " pod="openstack/horizontest-tests-horizontest" Oct 01 17:12:40 crc kubenswrapper[4869]: I1001 17:12:40.821183 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"test-operator-ephemeral-temporary\" (UniqueName: \"kubernetes.io/empty-dir/e28d6c91-53b4-4c43-885f-4eb32039fb5c-test-operator-ephemeral-temporary\") pod \"horizontest-tests-horizontest\" (UID: \"e28d6c91-53b4-4c43-885f-4eb32039fb5c\") " pod="openstack/horizontest-tests-horizontest" Oct 01 17:12:40 crc kubenswrapper[4869]: I1001 17:12:40.821222 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"test-operator-ephemeral-workdir\" (UniqueName: \"kubernetes.io/empty-dir/e28d6c91-53b4-4c43-885f-4eb32039fb5c-test-operator-ephemeral-workdir\") pod \"horizontest-tests-horizontest\" (UID: \"e28d6c91-53b4-4c43-885f-4eb32039fb5c\") " pod="openstack/horizontest-tests-horizontest" Oct 01 17:12:40 crc kubenswrapper[4869]: I1001 17:12:40.821249 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/e28d6c91-53b4-4c43-885f-4eb32039fb5c-ceph\") pod \"horizontest-tests-horizontest\" (UID: \"e28d6c91-53b4-4c43-885f-4eb32039fb5c\") " pod="openstack/horizontest-tests-horizontest" Oct 01 17:12:40 crc kubenswrapper[4869]: I1001 17:12:40.821344 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dlfnz\" (UniqueName: \"kubernetes.io/projected/e28d6c91-53b4-4c43-885f-4eb32039fb5c-kube-api-access-dlfnz\") pod \"horizontest-tests-horizontest\" (UID: \"e28d6c91-53b4-4c43-885f-4eb32039fb5c\") " pod="openstack/horizontest-tests-horizontest" Oct 01 17:12:40 crc kubenswrapper[4869]: I1001 17:12:40.821453 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"horizontest-tests-horizontest\" (UID: \"e28d6c91-53b4-4c43-885f-4eb32039fb5c\") " pod="openstack/horizontest-tests-horizontest" Oct 01 17:12:40 crc kubenswrapper[4869]: I1001 17:12:40.821883 4869 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"horizontest-tests-horizontest\" (UID: \"e28d6c91-53b4-4c43-885f-4eb32039fb5c\") device mount path \"/mnt/openstack/pv11\"" pod="openstack/horizontest-tests-horizontest" Oct 01 17:12:40 crc kubenswrapper[4869]: I1001 17:12:40.822039 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"test-operator-ephemeral-workdir\" (UniqueName: \"kubernetes.io/empty-dir/e28d6c91-53b4-4c43-885f-4eb32039fb5c-test-operator-ephemeral-workdir\") pod \"horizontest-tests-horizontest\" (UID: \"e28d6c91-53b4-4c43-885f-4eb32039fb5c\") " pod="openstack/horizontest-tests-horizontest" Oct 01 17:12:40 crc kubenswrapper[4869]: I1001 17:12:40.822092 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"test-operator-ephemeral-temporary\" (UniqueName: \"kubernetes.io/empty-dir/e28d6c91-53b4-4c43-885f-4eb32039fb5c-test-operator-ephemeral-temporary\") pod \"horizontest-tests-horizontest\" (UID: \"e28d6c91-53b4-4c43-885f-4eb32039fb5c\") " pod="openstack/horizontest-tests-horizontest" Oct 01 17:12:40 crc kubenswrapper[4869]: I1001 17:12:40.825088 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/e28d6c91-53b4-4c43-885f-4eb32039fb5c-ceph\") pod \"horizontest-tests-horizontest\" (UID: \"e28d6c91-53b4-4c43-885f-4eb32039fb5c\") " pod="openstack/horizontest-tests-horizontest" Oct 01 17:12:40 crc kubenswrapper[4869]: I1001 17:12:40.830686 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ca-certs\" (UniqueName: \"kubernetes.io/secret/e28d6c91-53b4-4c43-885f-4eb32039fb5c-ca-certs\") pod \"horizontest-tests-horizontest\" (UID: \"e28d6c91-53b4-4c43-885f-4eb32039fb5c\") " pod="openstack/horizontest-tests-horizontest" Oct 01 17:12:40 crc kubenswrapper[4869]: I1001 17:12:40.837628 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dlfnz\" (UniqueName: \"kubernetes.io/projected/e28d6c91-53b4-4c43-885f-4eb32039fb5c-kube-api-access-dlfnz\") pod \"horizontest-tests-horizontest\" (UID: \"e28d6c91-53b4-4c43-885f-4eb32039fb5c\") " pod="openstack/horizontest-tests-horizontest" Oct 01 17:12:40 crc kubenswrapper[4869]: I1001 17:12:40.857534 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"horizontest-tests-horizontest\" (UID: \"e28d6c91-53b4-4c43-885f-4eb32039fb5c\") " pod="openstack/horizontest-tests-horizontest" Oct 01 17:12:40 crc kubenswrapper[4869]: I1001 17:12:40.911272 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizontest-tests-horizontest" Oct 01 17:12:40 crc kubenswrapper[4869]: I1001 17:12:40.930756 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-pq7nn" event={"ID":"8f2f6cf7-4b6c-4469-a3c5-32fad5e2bf93","Type":"ContainerStarted","Data":"0b5c4d22b72ab7a98e560d990d92c340e951d7f48b747472663232bfc8817bf9"} Oct 01 17:12:40 crc kubenswrapper[4869]: I1001 17:12:40.950049 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-pq7nn" podStartSLOduration=3.492277233 podStartE2EDuration="5.950029976s" podCreationTimestamp="2025-10-01 17:12:35 +0000 UTC" firstStartedPulling="2025-10-01 17:12:37.892695493 +0000 UTC m=+7667.039538619" lastFinishedPulling="2025-10-01 17:12:40.350448246 +0000 UTC m=+7669.497291362" observedRunningTime="2025-10-01 17:12:40.949902923 +0000 UTC m=+7670.096746059" watchObservedRunningTime="2025-10-01 17:12:40.950029976 +0000 UTC m=+7670.096873112" Oct 01 17:12:41 crc kubenswrapper[4869]: I1001 17:12:41.410233 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizontest-tests-horizontest"] Oct 01 17:12:41 crc kubenswrapper[4869]: W1001 17:12:41.415471 4869 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pode28d6c91_53b4_4c43_885f_4eb32039fb5c.slice/crio-6a39b512400e748f9834e86ce7a2cb9968215b92c99db26212134ae7e20227da WatchSource:0}: Error finding container 6a39b512400e748f9834e86ce7a2cb9968215b92c99db26212134ae7e20227da: Status 404 returned error can't find the container with id 6a39b512400e748f9834e86ce7a2cb9968215b92c99db26212134ae7e20227da Oct 01 17:12:41 crc kubenswrapper[4869]: I1001 17:12:41.941835 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizontest-tests-horizontest" event={"ID":"e28d6c91-53b4-4c43-885f-4eb32039fb5c","Type":"ContainerStarted","Data":"6a39b512400e748f9834e86ce7a2cb9968215b92c99db26212134ae7e20227da"} Oct 01 17:12:43 crc kubenswrapper[4869]: I1001 17:12:43.353710 4869 patch_prober.go:28] interesting pod/machine-config-daemon-c86m8 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 01 17:12:43 crc kubenswrapper[4869]: I1001 17:12:43.354074 4869 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-c86m8" podUID="a4b64f7f-0b03-4f47-965b-9fde048b735c" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 01 17:12:43 crc kubenswrapper[4869]: I1001 17:12:43.354120 4869 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-c86m8" Oct 01 17:12:43 crc kubenswrapper[4869]: I1001 17:12:43.354937 4869 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"1c17c6f6d4b5b7ea03383059037e4e76a83710badfded0c48fa96960660db929"} pod="openshift-machine-config-operator/machine-config-daemon-c86m8" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Oct 01 17:12:43 crc kubenswrapper[4869]: I1001 17:12:43.354998 4869 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-c86m8" podUID="a4b64f7f-0b03-4f47-965b-9fde048b735c" containerName="machine-config-daemon" containerID="cri-o://1c17c6f6d4b5b7ea03383059037e4e76a83710badfded0c48fa96960660db929" gracePeriod=600 Oct 01 17:12:43 crc kubenswrapper[4869]: E1001 17:12:43.538949 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-c86m8_openshift-machine-config-operator(a4b64f7f-0b03-4f47-965b-9fde048b735c)\"" pod="openshift-machine-config-operator/machine-config-daemon-c86m8" podUID="a4b64f7f-0b03-4f47-965b-9fde048b735c" Oct 01 17:12:43 crc kubenswrapper[4869]: I1001 17:12:43.969275 4869 generic.go:334] "Generic (PLEG): container finished" podID="a4b64f7f-0b03-4f47-965b-9fde048b735c" containerID="1c17c6f6d4b5b7ea03383059037e4e76a83710badfded0c48fa96960660db929" exitCode=0 Oct 01 17:12:43 crc kubenswrapper[4869]: I1001 17:12:43.969323 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-c86m8" event={"ID":"a4b64f7f-0b03-4f47-965b-9fde048b735c","Type":"ContainerDied","Data":"1c17c6f6d4b5b7ea03383059037e4e76a83710badfded0c48fa96960660db929"} Oct 01 17:12:43 crc kubenswrapper[4869]: I1001 17:12:43.969376 4869 scope.go:117] "RemoveContainer" containerID="63f8525468ce6f604876dfd165c9ad9323212e1d06ef08033a784d7275c08f61" Oct 01 17:12:43 crc kubenswrapper[4869]: I1001 17:12:43.971704 4869 scope.go:117] "RemoveContainer" containerID="1c17c6f6d4b5b7ea03383059037e4e76a83710badfded0c48fa96960660db929" Oct 01 17:12:43 crc kubenswrapper[4869]: E1001 17:12:43.972897 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-c86m8_openshift-machine-config-operator(a4b64f7f-0b03-4f47-965b-9fde048b735c)\"" pod="openshift-machine-config-operator/machine-config-daemon-c86m8" podUID="a4b64f7f-0b03-4f47-965b-9fde048b735c" Oct 01 17:12:46 crc kubenswrapper[4869]: I1001 17:12:46.382814 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-pq7nn" Oct 01 17:12:46 crc kubenswrapper[4869]: I1001 17:12:46.383206 4869 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-pq7nn" Oct 01 17:12:46 crc kubenswrapper[4869]: I1001 17:12:46.467624 4869 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-pq7nn" Oct 01 17:12:47 crc kubenswrapper[4869]: I1001 17:12:47.044868 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-pq7nn" Oct 01 17:12:47 crc kubenswrapper[4869]: I1001 17:12:47.093141 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-pq7nn"] Oct 01 17:12:49 crc kubenswrapper[4869]: I1001 17:12:49.014404 4869 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-pq7nn" podUID="8f2f6cf7-4b6c-4469-a3c5-32fad5e2bf93" containerName="registry-server" containerID="cri-o://0b5c4d22b72ab7a98e560d990d92c340e951d7f48b747472663232bfc8817bf9" gracePeriod=2 Oct 01 17:12:50 crc kubenswrapper[4869]: I1001 17:12:50.026688 4869 generic.go:334] "Generic (PLEG): container finished" podID="8f2f6cf7-4b6c-4469-a3c5-32fad5e2bf93" containerID="0b5c4d22b72ab7a98e560d990d92c340e951d7f48b747472663232bfc8817bf9" exitCode=0 Oct 01 17:12:50 crc kubenswrapper[4869]: I1001 17:12:50.026782 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-pq7nn" event={"ID":"8f2f6cf7-4b6c-4469-a3c5-32fad5e2bf93","Type":"ContainerDied","Data":"0b5c4d22b72ab7a98e560d990d92c340e951d7f48b747472663232bfc8817bf9"} Oct 01 17:12:53 crc kubenswrapper[4869]: I1001 17:12:53.595958 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-qxdbx"] Oct 01 17:12:53 crc kubenswrapper[4869]: I1001 17:12:53.598519 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-qxdbx"] Oct 01 17:12:53 crc kubenswrapper[4869]: I1001 17:12:53.598713 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-qxdbx" Oct 01 17:12:53 crc kubenswrapper[4869]: I1001 17:12:53.705014 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sddst\" (UniqueName: \"kubernetes.io/projected/cf744061-ef80-421f-bb57-fa268c5f60fb-kube-api-access-sddst\") pod \"certified-operators-qxdbx\" (UID: \"cf744061-ef80-421f-bb57-fa268c5f60fb\") " pod="openshift-marketplace/certified-operators-qxdbx" Oct 01 17:12:53 crc kubenswrapper[4869]: I1001 17:12:53.708629 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/cf744061-ef80-421f-bb57-fa268c5f60fb-utilities\") pod \"certified-operators-qxdbx\" (UID: \"cf744061-ef80-421f-bb57-fa268c5f60fb\") " pod="openshift-marketplace/certified-operators-qxdbx" Oct 01 17:12:53 crc kubenswrapper[4869]: I1001 17:12:53.709434 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/cf744061-ef80-421f-bb57-fa268c5f60fb-catalog-content\") pod \"certified-operators-qxdbx\" (UID: \"cf744061-ef80-421f-bb57-fa268c5f60fb\") " pod="openshift-marketplace/certified-operators-qxdbx" Oct 01 17:12:53 crc kubenswrapper[4869]: I1001 17:12:53.810106 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/cf744061-ef80-421f-bb57-fa268c5f60fb-catalog-content\") pod \"certified-operators-qxdbx\" (UID: \"cf744061-ef80-421f-bb57-fa268c5f60fb\") " pod="openshift-marketplace/certified-operators-qxdbx" Oct 01 17:12:53 crc kubenswrapper[4869]: I1001 17:12:53.810182 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sddst\" (UniqueName: \"kubernetes.io/projected/cf744061-ef80-421f-bb57-fa268c5f60fb-kube-api-access-sddst\") pod \"certified-operators-qxdbx\" (UID: \"cf744061-ef80-421f-bb57-fa268c5f60fb\") " pod="openshift-marketplace/certified-operators-qxdbx" Oct 01 17:12:53 crc kubenswrapper[4869]: I1001 17:12:53.810209 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/cf744061-ef80-421f-bb57-fa268c5f60fb-utilities\") pod \"certified-operators-qxdbx\" (UID: \"cf744061-ef80-421f-bb57-fa268c5f60fb\") " pod="openshift-marketplace/certified-operators-qxdbx" Oct 01 17:12:53 crc kubenswrapper[4869]: I1001 17:12:53.810723 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/cf744061-ef80-421f-bb57-fa268c5f60fb-utilities\") pod \"certified-operators-qxdbx\" (UID: \"cf744061-ef80-421f-bb57-fa268c5f60fb\") " pod="openshift-marketplace/certified-operators-qxdbx" Oct 01 17:12:53 crc kubenswrapper[4869]: I1001 17:12:53.811000 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/cf744061-ef80-421f-bb57-fa268c5f60fb-catalog-content\") pod \"certified-operators-qxdbx\" (UID: \"cf744061-ef80-421f-bb57-fa268c5f60fb\") " pod="openshift-marketplace/certified-operators-qxdbx" Oct 01 17:12:53 crc kubenswrapper[4869]: I1001 17:12:53.842563 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sddst\" (UniqueName: \"kubernetes.io/projected/cf744061-ef80-421f-bb57-fa268c5f60fb-kube-api-access-sddst\") pod \"certified-operators-qxdbx\" (UID: \"cf744061-ef80-421f-bb57-fa268c5f60fb\") " pod="openshift-marketplace/certified-operators-qxdbx" Oct 01 17:12:53 crc kubenswrapper[4869]: I1001 17:12:53.944897 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-qxdbx" Oct 01 17:12:56 crc kubenswrapper[4869]: E1001 17:12:56.383710 4869 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 0b5c4d22b72ab7a98e560d990d92c340e951d7f48b747472663232bfc8817bf9 is running failed: container process not found" containerID="0b5c4d22b72ab7a98e560d990d92c340e951d7f48b747472663232bfc8817bf9" cmd=["grpc_health_probe","-addr=:50051"] Oct 01 17:12:56 crc kubenswrapper[4869]: E1001 17:12:56.384620 4869 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 0b5c4d22b72ab7a98e560d990d92c340e951d7f48b747472663232bfc8817bf9 is running failed: container process not found" containerID="0b5c4d22b72ab7a98e560d990d92c340e951d7f48b747472663232bfc8817bf9" cmd=["grpc_health_probe","-addr=:50051"] Oct 01 17:12:56 crc kubenswrapper[4869]: E1001 17:12:56.385178 4869 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 0b5c4d22b72ab7a98e560d990d92c340e951d7f48b747472663232bfc8817bf9 is running failed: container process not found" containerID="0b5c4d22b72ab7a98e560d990d92c340e951d7f48b747472663232bfc8817bf9" cmd=["grpc_health_probe","-addr=:50051"] Oct 01 17:12:56 crc kubenswrapper[4869]: E1001 17:12:56.385225 4869 prober.go:104] "Probe errored" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 0b5c4d22b72ab7a98e560d990d92c340e951d7f48b747472663232bfc8817bf9 is running failed: container process not found" probeType="Readiness" pod="openshift-marketplace/redhat-marketplace-pq7nn" podUID="8f2f6cf7-4b6c-4469-a3c5-32fad5e2bf93" containerName="registry-server" Oct 01 17:12:57 crc kubenswrapper[4869]: E1001 17:12:57.526714 4869 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-horizontest:current-podified" Oct 01 17:12:57 crc kubenswrapper[4869]: E1001 17:12:57.527122 4869 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:horizontest-tests-horizontest,Image:quay.io/podified-antelope-centos9/openstack-horizontest:current-podified,Command:[],Args:[],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:ADMIN_PASSWORD,Value:12345678,ValueFrom:nil,},EnvVar{Name:ADMIN_USERNAME,Value:admin,ValueFrom:nil,},EnvVar{Name:AUTH_URL,Value:https://keystone-public-openstack.apps-crc.testing,ValueFrom:nil,},EnvVar{Name:DASHBOARD_URL,Value:https://horizon-openstack.apps-crc.testing/,ValueFrom:nil,},EnvVar{Name:EXTRA_FLAG,Value:not pagination and test_users.py,ValueFrom:nil,},EnvVar{Name:FLAVOR_NAME,Value:m1.tiny,ValueFrom:nil,},EnvVar{Name:HORIZONTEST_DEBUG_MODE,Value:false,ValueFrom:nil,},EnvVar{Name:HORIZON_KEYS_FOLDER,Value:/etc/test_operator,ValueFrom:nil,},EnvVar{Name:HORIZON_LOGS_DIR_NAME,Value:horizon,ValueFrom:nil,},EnvVar{Name:HORIZON_REPO_BRANCH,Value:master,ValueFrom:nil,},EnvVar{Name:IMAGE_FILE,Value:/var/lib/horizontest/cirros-0.6.2-x86_64-disk.img,ValueFrom:nil,},EnvVar{Name:IMAGE_FILE_NAME,Value:cirros-0.6.2-x86_64-disk,ValueFrom:nil,},EnvVar{Name:IMAGE_URL,Value:http://download.cirros-cloud.net/0.6.2/cirros-0.6.2-x86_64-disk.img,ValueFrom:nil,},EnvVar{Name:PASSWORD,Value:horizontest,ValueFrom:nil,},EnvVar{Name:PROJECT_NAME,Value:horizontest,ValueFrom:nil,},EnvVar{Name:PROJECT_NAME_XPATH,Value://*[@class=\"context-project\"]//ancestor::ul,ValueFrom:nil,},EnvVar{Name:REPO_URL,Value:https://review.opendev.org/openstack/horizon,ValueFrom:nil,},EnvVar{Name:USER_NAME,Value:horizontest,ValueFrom:nil,},EnvVar{Name:USE_EXTERNAL_FILES,Value:True,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{2 0} {} 2 DecimalSI},memory: {{4294967296 0} {} 4Gi BinarySI},},Requests:ResourceList{cpu: {{1 0} {} 1 DecimalSI},memory: {{2147483648 0} {} 2Gi BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:test-operator-ephemeral-workdir,ReadOnly:false,MountPath:/var/lib/horizontest,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:test-operator-ephemeral-temporary,ReadOnly:false,MountPath:/tmp,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:test-operator-logs,ReadOnly:false,MountPath:/var/lib/horizontest/external_files,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:test-operator-clouds-config,ReadOnly:true,MountPath:/var/lib/horizontest/.config/openstack/clouds.yaml,SubPath:clouds.yaml,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:test-operator-clouds-config,ReadOnly:true,MountPath:/etc/openstack/clouds.yaml,SubPath:clouds.yaml,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:openstack-config-secret,ReadOnly:false,MountPath:/etc/openstack/secure.yaml,SubPath:secure.yaml,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:ca-certs,ReadOnly:true,MountPath:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem,SubPath:tls-ca-bundle.pem,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:ca-certs,ReadOnly:true,MountPath:/etc/pki/tls/certs/ca-bundle.trust.crt,SubPath:tls-ca-bundle.pem,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:ceph,ReadOnly:true,MountPath:/etc/ceph,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-dlfnz,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[NET_ADMIN NET_RAW],Drop:[],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*42455,RunAsNonRoot:*false,ReadOnlyRootFilesystem:*false,AllowPrivilegeEscalation:*true,RunAsGroup:*42455,ProcMount:nil,WindowsOptions:nil,SeccompProfile:&SeccompProfile{Type:RuntimeDefault,LocalhostProfile:nil,},AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod horizontest-tests-horizontest_openstack(e28d6c91-53b4-4c43-885f-4eb32039fb5c): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Oct 01 17:12:57 crc kubenswrapper[4869]: E1001 17:12:57.528635 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"horizontest-tests-horizontest\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/horizontest-tests-horizontest" podUID="e28d6c91-53b4-4c43-885f-4eb32039fb5c" Oct 01 17:12:57 crc kubenswrapper[4869]: I1001 17:12:57.898688 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-pq7nn" Oct 01 17:12:57 crc kubenswrapper[4869]: I1001 17:12:57.994329 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-qxdbx"] Oct 01 17:12:57 crc kubenswrapper[4869]: I1001 17:12:57.994989 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-thmsm\" (UniqueName: \"kubernetes.io/projected/8f2f6cf7-4b6c-4469-a3c5-32fad5e2bf93-kube-api-access-thmsm\") pod \"8f2f6cf7-4b6c-4469-a3c5-32fad5e2bf93\" (UID: \"8f2f6cf7-4b6c-4469-a3c5-32fad5e2bf93\") " Oct 01 17:12:57 crc kubenswrapper[4869]: I1001 17:12:57.995210 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8f2f6cf7-4b6c-4469-a3c5-32fad5e2bf93-catalog-content\") pod \"8f2f6cf7-4b6c-4469-a3c5-32fad5e2bf93\" (UID: \"8f2f6cf7-4b6c-4469-a3c5-32fad5e2bf93\") " Oct 01 17:12:57 crc kubenswrapper[4869]: I1001 17:12:57.995321 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8f2f6cf7-4b6c-4469-a3c5-32fad5e2bf93-utilities\") pod \"8f2f6cf7-4b6c-4469-a3c5-32fad5e2bf93\" (UID: \"8f2f6cf7-4b6c-4469-a3c5-32fad5e2bf93\") " Oct 01 17:12:57 crc kubenswrapper[4869]: I1001 17:12:57.996781 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8f2f6cf7-4b6c-4469-a3c5-32fad5e2bf93-utilities" (OuterVolumeSpecName: "utilities") pod "8f2f6cf7-4b6c-4469-a3c5-32fad5e2bf93" (UID: "8f2f6cf7-4b6c-4469-a3c5-32fad5e2bf93"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 17:12:58 crc kubenswrapper[4869]: I1001 17:12:58.002611 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8f2f6cf7-4b6c-4469-a3c5-32fad5e2bf93-kube-api-access-thmsm" (OuterVolumeSpecName: "kube-api-access-thmsm") pod "8f2f6cf7-4b6c-4469-a3c5-32fad5e2bf93" (UID: "8f2f6cf7-4b6c-4469-a3c5-32fad5e2bf93"). InnerVolumeSpecName "kube-api-access-thmsm". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 17:12:58 crc kubenswrapper[4869]: I1001 17:12:58.008121 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8f2f6cf7-4b6c-4469-a3c5-32fad5e2bf93-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "8f2f6cf7-4b6c-4469-a3c5-32fad5e2bf93" (UID: "8f2f6cf7-4b6c-4469-a3c5-32fad5e2bf93"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 17:12:58 crc kubenswrapper[4869]: I1001 17:12:58.097414 4869 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8f2f6cf7-4b6c-4469-a3c5-32fad5e2bf93-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 01 17:12:58 crc kubenswrapper[4869]: I1001 17:12:58.097734 4869 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8f2f6cf7-4b6c-4469-a3c5-32fad5e2bf93-utilities\") on node \"crc\" DevicePath \"\"" Oct 01 17:12:58 crc kubenswrapper[4869]: I1001 17:12:58.097744 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-thmsm\" (UniqueName: \"kubernetes.io/projected/8f2f6cf7-4b6c-4469-a3c5-32fad5e2bf93-kube-api-access-thmsm\") on node \"crc\" DevicePath \"\"" Oct 01 17:12:58 crc kubenswrapper[4869]: I1001 17:12:58.110818 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-pq7nn" event={"ID":"8f2f6cf7-4b6c-4469-a3c5-32fad5e2bf93","Type":"ContainerDied","Data":"0414dc1588aa58f13bf59f2b38a2c846c759d9a8be0e3bb1335dceccd5c17c90"} Oct 01 17:12:58 crc kubenswrapper[4869]: I1001 17:12:58.110831 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-pq7nn" Oct 01 17:12:58 crc kubenswrapper[4869]: I1001 17:12:58.111087 4869 scope.go:117] "RemoveContainer" containerID="0b5c4d22b72ab7a98e560d990d92c340e951d7f48b747472663232bfc8817bf9" Oct 01 17:12:58 crc kubenswrapper[4869]: I1001 17:12:58.114117 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-qxdbx" event={"ID":"cf744061-ef80-421f-bb57-fa268c5f60fb","Type":"ContainerStarted","Data":"d9c2d299fdc5dab64bf78394ae547dfa32a9b0d433fe94f3caeb6106bc309ba2"} Oct 01 17:12:58 crc kubenswrapper[4869]: E1001 17:12:58.114589 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"horizontest-tests-horizontest\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-horizontest:current-podified\\\"\"" pod="openstack/horizontest-tests-horizontest" podUID="e28d6c91-53b4-4c43-885f-4eb32039fb5c" Oct 01 17:12:58 crc kubenswrapper[4869]: I1001 17:12:58.139151 4869 scope.go:117] "RemoveContainer" containerID="1d185390dcc50669305c355c042252f36ccc19a1ab886c6714328776e07eb3cc" Oct 01 17:12:58 crc kubenswrapper[4869]: I1001 17:12:58.164646 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-pq7nn"] Oct 01 17:12:58 crc kubenswrapper[4869]: I1001 17:12:58.169104 4869 scope.go:117] "RemoveContainer" containerID="e2c8b92051550ea6a483b27fe19e744b33f615b467cfd36a28dd43fc0c5e2f36" Oct 01 17:12:58 crc kubenswrapper[4869]: I1001 17:12:58.175595 4869 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-pq7nn"] Oct 01 17:12:58 crc kubenswrapper[4869]: I1001 17:12:58.581215 4869 scope.go:117] "RemoveContainer" containerID="1c17c6f6d4b5b7ea03383059037e4e76a83710badfded0c48fa96960660db929" Oct 01 17:12:58 crc kubenswrapper[4869]: E1001 17:12:58.581714 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-c86m8_openshift-machine-config-operator(a4b64f7f-0b03-4f47-965b-9fde048b735c)\"" pod="openshift-machine-config-operator/machine-config-daemon-c86m8" podUID="a4b64f7f-0b03-4f47-965b-9fde048b735c" Oct 01 17:12:59 crc kubenswrapper[4869]: I1001 17:12:59.139516 4869 generic.go:334] "Generic (PLEG): container finished" podID="cf744061-ef80-421f-bb57-fa268c5f60fb" containerID="7ddc4f05391379cdc78fd411d6dc1ebbad88ff4f0113de4145f437a028d2dc39" exitCode=0 Oct 01 17:12:59 crc kubenswrapper[4869]: I1001 17:12:59.139806 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-qxdbx" event={"ID":"cf744061-ef80-421f-bb57-fa268c5f60fb","Type":"ContainerDied","Data":"7ddc4f05391379cdc78fd411d6dc1ebbad88ff4f0113de4145f437a028d2dc39"} Oct 01 17:12:59 crc kubenswrapper[4869]: I1001 17:12:59.594375 4869 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8f2f6cf7-4b6c-4469-a3c5-32fad5e2bf93" path="/var/lib/kubelet/pods/8f2f6cf7-4b6c-4469-a3c5-32fad5e2bf93/volumes" Oct 01 17:13:01 crc kubenswrapper[4869]: I1001 17:13:01.173995 4869 generic.go:334] "Generic (PLEG): container finished" podID="cf744061-ef80-421f-bb57-fa268c5f60fb" containerID="fb0af226eed09262a4e2f95bf6b6b178f9439d4d58e77620bbe709d299c8bda1" exitCode=0 Oct 01 17:13:01 crc kubenswrapper[4869]: I1001 17:13:01.174078 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-qxdbx" event={"ID":"cf744061-ef80-421f-bb57-fa268c5f60fb","Type":"ContainerDied","Data":"fb0af226eed09262a4e2f95bf6b6b178f9439d4d58e77620bbe709d299c8bda1"} Oct 01 17:13:03 crc kubenswrapper[4869]: I1001 17:13:03.195511 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-qxdbx" event={"ID":"cf744061-ef80-421f-bb57-fa268c5f60fb","Type":"ContainerStarted","Data":"6a7108c7619b05c6d35e97fff26d204ced47bf5fc774f52fd6e533a0f4115d3d"} Oct 01 17:13:03 crc kubenswrapper[4869]: I1001 17:13:03.219091 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-qxdbx" podStartSLOduration=7.404844658 podStartE2EDuration="10.219070692s" podCreationTimestamp="2025-10-01 17:12:53 +0000 UTC" firstStartedPulling="2025-10-01 17:12:59.141762622 +0000 UTC m=+7688.288605738" lastFinishedPulling="2025-10-01 17:13:01.955988646 +0000 UTC m=+7691.102831772" observedRunningTime="2025-10-01 17:13:03.209775987 +0000 UTC m=+7692.356619103" watchObservedRunningTime="2025-10-01 17:13:03.219070692 +0000 UTC m=+7692.365913808" Oct 01 17:13:03 crc kubenswrapper[4869]: I1001 17:13:03.945660 4869 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-qxdbx" Oct 01 17:13:03 crc kubenswrapper[4869]: I1001 17:13:03.946009 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-qxdbx" Oct 01 17:13:04 crc kubenswrapper[4869]: I1001 17:13:04.008852 4869 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-qxdbx" Oct 01 17:13:09 crc kubenswrapper[4869]: I1001 17:13:09.582806 4869 scope.go:117] "RemoveContainer" containerID="1c17c6f6d4b5b7ea03383059037e4e76a83710badfded0c48fa96960660db929" Oct 01 17:13:09 crc kubenswrapper[4869]: E1001 17:13:09.583643 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-c86m8_openshift-machine-config-operator(a4b64f7f-0b03-4f47-965b-9fde048b735c)\"" pod="openshift-machine-config-operator/machine-config-daemon-c86m8" podUID="a4b64f7f-0b03-4f47-965b-9fde048b735c" Oct 01 17:13:14 crc kubenswrapper[4869]: I1001 17:13:14.001551 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-qxdbx" Oct 01 17:13:14 crc kubenswrapper[4869]: I1001 17:13:14.066758 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-qxdbx"] Oct 01 17:13:14 crc kubenswrapper[4869]: I1001 17:13:14.325651 4869 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-qxdbx" podUID="cf744061-ef80-421f-bb57-fa268c5f60fb" containerName="registry-server" containerID="cri-o://6a7108c7619b05c6d35e97fff26d204ced47bf5fc774f52fd6e533a0f4115d3d" gracePeriod=2 Oct 01 17:13:14 crc kubenswrapper[4869]: I1001 17:13:14.731175 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-qxdbx" Oct 01 17:13:14 crc kubenswrapper[4869]: I1001 17:13:14.885513 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/cf744061-ef80-421f-bb57-fa268c5f60fb-utilities\") pod \"cf744061-ef80-421f-bb57-fa268c5f60fb\" (UID: \"cf744061-ef80-421f-bb57-fa268c5f60fb\") " Oct 01 17:13:14 crc kubenswrapper[4869]: I1001 17:13:14.885922 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-sddst\" (UniqueName: \"kubernetes.io/projected/cf744061-ef80-421f-bb57-fa268c5f60fb-kube-api-access-sddst\") pod \"cf744061-ef80-421f-bb57-fa268c5f60fb\" (UID: \"cf744061-ef80-421f-bb57-fa268c5f60fb\") " Oct 01 17:13:14 crc kubenswrapper[4869]: I1001 17:13:14.886011 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/cf744061-ef80-421f-bb57-fa268c5f60fb-catalog-content\") pod \"cf744061-ef80-421f-bb57-fa268c5f60fb\" (UID: \"cf744061-ef80-421f-bb57-fa268c5f60fb\") " Oct 01 17:13:14 crc kubenswrapper[4869]: I1001 17:13:14.886599 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/cf744061-ef80-421f-bb57-fa268c5f60fb-utilities" (OuterVolumeSpecName: "utilities") pod "cf744061-ef80-421f-bb57-fa268c5f60fb" (UID: "cf744061-ef80-421f-bb57-fa268c5f60fb"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 17:13:14 crc kubenswrapper[4869]: I1001 17:13:14.891906 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cf744061-ef80-421f-bb57-fa268c5f60fb-kube-api-access-sddst" (OuterVolumeSpecName: "kube-api-access-sddst") pod "cf744061-ef80-421f-bb57-fa268c5f60fb" (UID: "cf744061-ef80-421f-bb57-fa268c5f60fb"). InnerVolumeSpecName "kube-api-access-sddst". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 17:13:14 crc kubenswrapper[4869]: I1001 17:13:14.934563 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/cf744061-ef80-421f-bb57-fa268c5f60fb-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "cf744061-ef80-421f-bb57-fa268c5f60fb" (UID: "cf744061-ef80-421f-bb57-fa268c5f60fb"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 17:13:14 crc kubenswrapper[4869]: I1001 17:13:14.988431 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-sddst\" (UniqueName: \"kubernetes.io/projected/cf744061-ef80-421f-bb57-fa268c5f60fb-kube-api-access-sddst\") on node \"crc\" DevicePath \"\"" Oct 01 17:13:14 crc kubenswrapper[4869]: I1001 17:13:14.988665 4869 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/cf744061-ef80-421f-bb57-fa268c5f60fb-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 01 17:13:14 crc kubenswrapper[4869]: I1001 17:13:14.988674 4869 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/cf744061-ef80-421f-bb57-fa268c5f60fb-utilities\") on node \"crc\" DevicePath \"\"" Oct 01 17:13:15 crc kubenswrapper[4869]: I1001 17:13:15.337326 4869 generic.go:334] "Generic (PLEG): container finished" podID="cf744061-ef80-421f-bb57-fa268c5f60fb" containerID="6a7108c7619b05c6d35e97fff26d204ced47bf5fc774f52fd6e533a0f4115d3d" exitCode=0 Oct 01 17:13:15 crc kubenswrapper[4869]: I1001 17:13:15.337397 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-qxdbx" event={"ID":"cf744061-ef80-421f-bb57-fa268c5f60fb","Type":"ContainerDied","Data":"6a7108c7619b05c6d35e97fff26d204ced47bf5fc774f52fd6e533a0f4115d3d"} Oct 01 17:13:15 crc kubenswrapper[4869]: I1001 17:13:15.337430 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-qxdbx" event={"ID":"cf744061-ef80-421f-bb57-fa268c5f60fb","Type":"ContainerDied","Data":"d9c2d299fdc5dab64bf78394ae547dfa32a9b0d433fe94f3caeb6106bc309ba2"} Oct 01 17:13:15 crc kubenswrapper[4869]: I1001 17:13:15.337450 4869 scope.go:117] "RemoveContainer" containerID="6a7108c7619b05c6d35e97fff26d204ced47bf5fc774f52fd6e533a0f4115d3d" Oct 01 17:13:15 crc kubenswrapper[4869]: I1001 17:13:15.337619 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-qxdbx" Oct 01 17:13:15 crc kubenswrapper[4869]: I1001 17:13:15.341467 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizontest-tests-horizontest" event={"ID":"e28d6c91-53b4-4c43-885f-4eb32039fb5c","Type":"ContainerStarted","Data":"d0aa73b14681879d9527d5585f584e4ff7b196821dbc4f521a7655aa6d4dd1f1"} Oct 01 17:13:15 crc kubenswrapper[4869]: I1001 17:13:15.373513 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/horizontest-tests-horizontest" podStartSLOduration=3.640376951 podStartE2EDuration="36.373493165s" podCreationTimestamp="2025-10-01 17:12:39 +0000 UTC" firstStartedPulling="2025-10-01 17:12:41.419028599 +0000 UTC m=+7670.565871745" lastFinishedPulling="2025-10-01 17:13:14.152144843 +0000 UTC m=+7703.298987959" observedRunningTime="2025-10-01 17:13:15.360950539 +0000 UTC m=+7704.507793665" watchObservedRunningTime="2025-10-01 17:13:15.373493165 +0000 UTC m=+7704.520336281" Oct 01 17:13:15 crc kubenswrapper[4869]: I1001 17:13:15.379315 4869 scope.go:117] "RemoveContainer" containerID="fb0af226eed09262a4e2f95bf6b6b178f9439d4d58e77620bbe709d299c8bda1" Oct 01 17:13:15 crc kubenswrapper[4869]: I1001 17:13:15.396918 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-qxdbx"] Oct 01 17:13:15 crc kubenswrapper[4869]: I1001 17:13:15.408530 4869 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-qxdbx"] Oct 01 17:13:15 crc kubenswrapper[4869]: I1001 17:13:15.432740 4869 scope.go:117] "RemoveContainer" containerID="7ddc4f05391379cdc78fd411d6dc1ebbad88ff4f0113de4145f437a028d2dc39" Oct 01 17:13:15 crc kubenswrapper[4869]: I1001 17:13:15.464561 4869 scope.go:117] "RemoveContainer" containerID="6a7108c7619b05c6d35e97fff26d204ced47bf5fc774f52fd6e533a0f4115d3d" Oct 01 17:13:15 crc kubenswrapper[4869]: E1001 17:13:15.465337 4869 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6a7108c7619b05c6d35e97fff26d204ced47bf5fc774f52fd6e533a0f4115d3d\": container with ID starting with 6a7108c7619b05c6d35e97fff26d204ced47bf5fc774f52fd6e533a0f4115d3d not found: ID does not exist" containerID="6a7108c7619b05c6d35e97fff26d204ced47bf5fc774f52fd6e533a0f4115d3d" Oct 01 17:13:15 crc kubenswrapper[4869]: I1001 17:13:15.465396 4869 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6a7108c7619b05c6d35e97fff26d204ced47bf5fc774f52fd6e533a0f4115d3d"} err="failed to get container status \"6a7108c7619b05c6d35e97fff26d204ced47bf5fc774f52fd6e533a0f4115d3d\": rpc error: code = NotFound desc = could not find container \"6a7108c7619b05c6d35e97fff26d204ced47bf5fc774f52fd6e533a0f4115d3d\": container with ID starting with 6a7108c7619b05c6d35e97fff26d204ced47bf5fc774f52fd6e533a0f4115d3d not found: ID does not exist" Oct 01 17:13:15 crc kubenswrapper[4869]: I1001 17:13:15.465432 4869 scope.go:117] "RemoveContainer" containerID="fb0af226eed09262a4e2f95bf6b6b178f9439d4d58e77620bbe709d299c8bda1" Oct 01 17:13:15 crc kubenswrapper[4869]: E1001 17:13:15.466548 4869 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"fb0af226eed09262a4e2f95bf6b6b178f9439d4d58e77620bbe709d299c8bda1\": container with ID starting with fb0af226eed09262a4e2f95bf6b6b178f9439d4d58e77620bbe709d299c8bda1 not found: ID does not exist" containerID="fb0af226eed09262a4e2f95bf6b6b178f9439d4d58e77620bbe709d299c8bda1" Oct 01 17:13:15 crc kubenswrapper[4869]: I1001 17:13:15.466578 4869 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"fb0af226eed09262a4e2f95bf6b6b178f9439d4d58e77620bbe709d299c8bda1"} err="failed to get container status \"fb0af226eed09262a4e2f95bf6b6b178f9439d4d58e77620bbe709d299c8bda1\": rpc error: code = NotFound desc = could not find container \"fb0af226eed09262a4e2f95bf6b6b178f9439d4d58e77620bbe709d299c8bda1\": container with ID starting with fb0af226eed09262a4e2f95bf6b6b178f9439d4d58e77620bbe709d299c8bda1 not found: ID does not exist" Oct 01 17:13:15 crc kubenswrapper[4869]: I1001 17:13:15.466593 4869 scope.go:117] "RemoveContainer" containerID="7ddc4f05391379cdc78fd411d6dc1ebbad88ff4f0113de4145f437a028d2dc39" Oct 01 17:13:15 crc kubenswrapper[4869]: E1001 17:13:15.466990 4869 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7ddc4f05391379cdc78fd411d6dc1ebbad88ff4f0113de4145f437a028d2dc39\": container with ID starting with 7ddc4f05391379cdc78fd411d6dc1ebbad88ff4f0113de4145f437a028d2dc39 not found: ID does not exist" containerID="7ddc4f05391379cdc78fd411d6dc1ebbad88ff4f0113de4145f437a028d2dc39" Oct 01 17:13:15 crc kubenswrapper[4869]: I1001 17:13:15.467044 4869 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7ddc4f05391379cdc78fd411d6dc1ebbad88ff4f0113de4145f437a028d2dc39"} err="failed to get container status \"7ddc4f05391379cdc78fd411d6dc1ebbad88ff4f0113de4145f437a028d2dc39\": rpc error: code = NotFound desc = could not find container \"7ddc4f05391379cdc78fd411d6dc1ebbad88ff4f0113de4145f437a028d2dc39\": container with ID starting with 7ddc4f05391379cdc78fd411d6dc1ebbad88ff4f0113de4145f437a028d2dc39 not found: ID does not exist" Oct 01 17:13:15 crc kubenswrapper[4869]: I1001 17:13:15.603033 4869 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="cf744061-ef80-421f-bb57-fa268c5f60fb" path="/var/lib/kubelet/pods/cf744061-ef80-421f-bb57-fa268c5f60fb/volumes" Oct 01 17:13:21 crc kubenswrapper[4869]: I1001 17:13:21.589779 4869 scope.go:117] "RemoveContainer" containerID="1c17c6f6d4b5b7ea03383059037e4e76a83710badfded0c48fa96960660db929" Oct 01 17:13:21 crc kubenswrapper[4869]: E1001 17:13:21.590848 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-c86m8_openshift-machine-config-operator(a4b64f7f-0b03-4f47-965b-9fde048b735c)\"" pod="openshift-machine-config-operator/machine-config-daemon-c86m8" podUID="a4b64f7f-0b03-4f47-965b-9fde048b735c" Oct 01 17:13:32 crc kubenswrapper[4869]: I1001 17:13:32.581685 4869 scope.go:117] "RemoveContainer" containerID="1c17c6f6d4b5b7ea03383059037e4e76a83710badfded0c48fa96960660db929" Oct 01 17:13:32 crc kubenswrapper[4869]: E1001 17:13:32.582883 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-c86m8_openshift-machine-config-operator(a4b64f7f-0b03-4f47-965b-9fde048b735c)\"" pod="openshift-machine-config-operator/machine-config-daemon-c86m8" podUID="a4b64f7f-0b03-4f47-965b-9fde048b735c" Oct 01 17:13:44 crc kubenswrapper[4869]: I1001 17:13:44.581000 4869 scope.go:117] "RemoveContainer" containerID="1c17c6f6d4b5b7ea03383059037e4e76a83710badfded0c48fa96960660db929" Oct 01 17:13:44 crc kubenswrapper[4869]: E1001 17:13:44.581878 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-c86m8_openshift-machine-config-operator(a4b64f7f-0b03-4f47-965b-9fde048b735c)\"" pod="openshift-machine-config-operator/machine-config-daemon-c86m8" podUID="a4b64f7f-0b03-4f47-965b-9fde048b735c" Oct 01 17:13:55 crc kubenswrapper[4869]: I1001 17:13:55.581323 4869 scope.go:117] "RemoveContainer" containerID="1c17c6f6d4b5b7ea03383059037e4e76a83710badfded0c48fa96960660db929" Oct 01 17:13:55 crc kubenswrapper[4869]: E1001 17:13:55.582126 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-c86m8_openshift-machine-config-operator(a4b64f7f-0b03-4f47-965b-9fde048b735c)\"" pod="openshift-machine-config-operator/machine-config-daemon-c86m8" podUID="a4b64f7f-0b03-4f47-965b-9fde048b735c" Oct 01 17:14:09 crc kubenswrapper[4869]: I1001 17:14:09.585139 4869 scope.go:117] "RemoveContainer" containerID="1c17c6f6d4b5b7ea03383059037e4e76a83710badfded0c48fa96960660db929" Oct 01 17:14:09 crc kubenswrapper[4869]: E1001 17:14:09.585872 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-c86m8_openshift-machine-config-operator(a4b64f7f-0b03-4f47-965b-9fde048b735c)\"" pod="openshift-machine-config-operator/machine-config-daemon-c86m8" podUID="a4b64f7f-0b03-4f47-965b-9fde048b735c" Oct 01 17:14:20 crc kubenswrapper[4869]: I1001 17:14:20.584741 4869 scope.go:117] "RemoveContainer" containerID="1c17c6f6d4b5b7ea03383059037e4e76a83710badfded0c48fa96960660db929" Oct 01 17:14:20 crc kubenswrapper[4869]: E1001 17:14:20.585927 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-c86m8_openshift-machine-config-operator(a4b64f7f-0b03-4f47-965b-9fde048b735c)\"" pod="openshift-machine-config-operator/machine-config-daemon-c86m8" podUID="a4b64f7f-0b03-4f47-965b-9fde048b735c" Oct 01 17:14:33 crc kubenswrapper[4869]: I1001 17:14:33.582233 4869 scope.go:117] "RemoveContainer" containerID="1c17c6f6d4b5b7ea03383059037e4e76a83710badfded0c48fa96960660db929" Oct 01 17:14:33 crc kubenswrapper[4869]: E1001 17:14:33.583249 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-c86m8_openshift-machine-config-operator(a4b64f7f-0b03-4f47-965b-9fde048b735c)\"" pod="openshift-machine-config-operator/machine-config-daemon-c86m8" podUID="a4b64f7f-0b03-4f47-965b-9fde048b735c" Oct 01 17:14:47 crc kubenswrapper[4869]: I1001 17:14:47.580993 4869 scope.go:117] "RemoveContainer" containerID="1c17c6f6d4b5b7ea03383059037e4e76a83710badfded0c48fa96960660db929" Oct 01 17:14:47 crc kubenswrapper[4869]: E1001 17:14:47.581798 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-c86m8_openshift-machine-config-operator(a4b64f7f-0b03-4f47-965b-9fde048b735c)\"" pod="openshift-machine-config-operator/machine-config-daemon-c86m8" podUID="a4b64f7f-0b03-4f47-965b-9fde048b735c" Oct 01 17:15:00 crc kubenswrapper[4869]: I1001 17:15:00.162151 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29322315-sfjbz"] Oct 01 17:15:00 crc kubenswrapper[4869]: E1001 17:15:00.163126 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cf744061-ef80-421f-bb57-fa268c5f60fb" containerName="extract-content" Oct 01 17:15:00 crc kubenswrapper[4869]: I1001 17:15:00.163142 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="cf744061-ef80-421f-bb57-fa268c5f60fb" containerName="extract-content" Oct 01 17:15:00 crc kubenswrapper[4869]: E1001 17:15:00.163155 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8f2f6cf7-4b6c-4469-a3c5-32fad5e2bf93" containerName="registry-server" Oct 01 17:15:00 crc kubenswrapper[4869]: I1001 17:15:00.163162 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="8f2f6cf7-4b6c-4469-a3c5-32fad5e2bf93" containerName="registry-server" Oct 01 17:15:00 crc kubenswrapper[4869]: E1001 17:15:00.163173 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8f2f6cf7-4b6c-4469-a3c5-32fad5e2bf93" containerName="extract-content" Oct 01 17:15:00 crc kubenswrapper[4869]: I1001 17:15:00.163179 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="8f2f6cf7-4b6c-4469-a3c5-32fad5e2bf93" containerName="extract-content" Oct 01 17:15:00 crc kubenswrapper[4869]: E1001 17:15:00.163198 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cf744061-ef80-421f-bb57-fa268c5f60fb" containerName="registry-server" Oct 01 17:15:00 crc kubenswrapper[4869]: I1001 17:15:00.163204 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="cf744061-ef80-421f-bb57-fa268c5f60fb" containerName="registry-server" Oct 01 17:15:00 crc kubenswrapper[4869]: E1001 17:15:00.163211 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cf744061-ef80-421f-bb57-fa268c5f60fb" containerName="extract-utilities" Oct 01 17:15:00 crc kubenswrapper[4869]: I1001 17:15:00.163244 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="cf744061-ef80-421f-bb57-fa268c5f60fb" containerName="extract-utilities" Oct 01 17:15:00 crc kubenswrapper[4869]: E1001 17:15:00.163318 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8f2f6cf7-4b6c-4469-a3c5-32fad5e2bf93" containerName="extract-utilities" Oct 01 17:15:00 crc kubenswrapper[4869]: I1001 17:15:00.163330 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="8f2f6cf7-4b6c-4469-a3c5-32fad5e2bf93" containerName="extract-utilities" Oct 01 17:15:00 crc kubenswrapper[4869]: I1001 17:15:00.163545 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="cf744061-ef80-421f-bb57-fa268c5f60fb" containerName="registry-server" Oct 01 17:15:00 crc kubenswrapper[4869]: I1001 17:15:00.163568 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="8f2f6cf7-4b6c-4469-a3c5-32fad5e2bf93" containerName="registry-server" Oct 01 17:15:00 crc kubenswrapper[4869]: I1001 17:15:00.164285 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29322315-sfjbz" Oct 01 17:15:00 crc kubenswrapper[4869]: I1001 17:15:00.169116 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Oct 01 17:15:00 crc kubenswrapper[4869]: I1001 17:15:00.169125 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Oct 01 17:15:00 crc kubenswrapper[4869]: I1001 17:15:00.183465 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29322315-sfjbz"] Oct 01 17:15:00 crc kubenswrapper[4869]: I1001 17:15:00.231966 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-v2dsw\" (UniqueName: \"kubernetes.io/projected/a45793ed-cfde-4732-bfcc-3d83bc4cef0e-kube-api-access-v2dsw\") pod \"collect-profiles-29322315-sfjbz\" (UID: \"a45793ed-cfde-4732-bfcc-3d83bc4cef0e\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29322315-sfjbz" Oct 01 17:15:00 crc kubenswrapper[4869]: I1001 17:15:00.232066 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/a45793ed-cfde-4732-bfcc-3d83bc4cef0e-secret-volume\") pod \"collect-profiles-29322315-sfjbz\" (UID: \"a45793ed-cfde-4732-bfcc-3d83bc4cef0e\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29322315-sfjbz" Oct 01 17:15:00 crc kubenswrapper[4869]: I1001 17:15:00.232152 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/a45793ed-cfde-4732-bfcc-3d83bc4cef0e-config-volume\") pod \"collect-profiles-29322315-sfjbz\" (UID: \"a45793ed-cfde-4732-bfcc-3d83bc4cef0e\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29322315-sfjbz" Oct 01 17:15:00 crc kubenswrapper[4869]: I1001 17:15:00.334529 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/a45793ed-cfde-4732-bfcc-3d83bc4cef0e-config-volume\") pod \"collect-profiles-29322315-sfjbz\" (UID: \"a45793ed-cfde-4732-bfcc-3d83bc4cef0e\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29322315-sfjbz" Oct 01 17:15:00 crc kubenswrapper[4869]: I1001 17:15:00.334846 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-v2dsw\" (UniqueName: \"kubernetes.io/projected/a45793ed-cfde-4732-bfcc-3d83bc4cef0e-kube-api-access-v2dsw\") pod \"collect-profiles-29322315-sfjbz\" (UID: \"a45793ed-cfde-4732-bfcc-3d83bc4cef0e\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29322315-sfjbz" Oct 01 17:15:00 crc kubenswrapper[4869]: I1001 17:15:00.334972 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/a45793ed-cfde-4732-bfcc-3d83bc4cef0e-secret-volume\") pod \"collect-profiles-29322315-sfjbz\" (UID: \"a45793ed-cfde-4732-bfcc-3d83bc4cef0e\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29322315-sfjbz" Oct 01 17:15:00 crc kubenswrapper[4869]: I1001 17:15:00.338479 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/a45793ed-cfde-4732-bfcc-3d83bc4cef0e-config-volume\") pod \"collect-profiles-29322315-sfjbz\" (UID: \"a45793ed-cfde-4732-bfcc-3d83bc4cef0e\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29322315-sfjbz" Oct 01 17:15:00 crc kubenswrapper[4869]: I1001 17:15:00.355065 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/a45793ed-cfde-4732-bfcc-3d83bc4cef0e-secret-volume\") pod \"collect-profiles-29322315-sfjbz\" (UID: \"a45793ed-cfde-4732-bfcc-3d83bc4cef0e\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29322315-sfjbz" Oct 01 17:15:00 crc kubenswrapper[4869]: I1001 17:15:00.356610 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-v2dsw\" (UniqueName: \"kubernetes.io/projected/a45793ed-cfde-4732-bfcc-3d83bc4cef0e-kube-api-access-v2dsw\") pod \"collect-profiles-29322315-sfjbz\" (UID: \"a45793ed-cfde-4732-bfcc-3d83bc4cef0e\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29322315-sfjbz" Oct 01 17:15:00 crc kubenswrapper[4869]: I1001 17:15:00.498073 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29322315-sfjbz" Oct 01 17:15:00 crc kubenswrapper[4869]: I1001 17:15:00.998687 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29322315-sfjbz"] Oct 01 17:15:01 crc kubenswrapper[4869]: I1001 17:15:01.458359 4869 generic.go:334] "Generic (PLEG): container finished" podID="a45793ed-cfde-4732-bfcc-3d83bc4cef0e" containerID="9379edde19b5c5112a60e92d030ffa1167cbcb330ea57ee15fa6d5f8685eb214" exitCode=0 Oct 01 17:15:01 crc kubenswrapper[4869]: I1001 17:15:01.458750 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29322315-sfjbz" event={"ID":"a45793ed-cfde-4732-bfcc-3d83bc4cef0e","Type":"ContainerDied","Data":"9379edde19b5c5112a60e92d030ffa1167cbcb330ea57ee15fa6d5f8685eb214"} Oct 01 17:15:01 crc kubenswrapper[4869]: I1001 17:15:01.458781 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29322315-sfjbz" event={"ID":"a45793ed-cfde-4732-bfcc-3d83bc4cef0e","Type":"ContainerStarted","Data":"258e2bfb6d06c533f55477d7bebb158e53412841a12687b3118b902263b32069"} Oct 01 17:15:01 crc kubenswrapper[4869]: I1001 17:15:01.600963 4869 scope.go:117] "RemoveContainer" containerID="1c17c6f6d4b5b7ea03383059037e4e76a83710badfded0c48fa96960660db929" Oct 01 17:15:01 crc kubenswrapper[4869]: E1001 17:15:01.601677 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-c86m8_openshift-machine-config-operator(a4b64f7f-0b03-4f47-965b-9fde048b735c)\"" pod="openshift-machine-config-operator/machine-config-daemon-c86m8" podUID="a4b64f7f-0b03-4f47-965b-9fde048b735c" Oct 01 17:15:02 crc kubenswrapper[4869]: I1001 17:15:02.866070 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29322315-sfjbz" Oct 01 17:15:02 crc kubenswrapper[4869]: I1001 17:15:02.892218 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/a45793ed-cfde-4732-bfcc-3d83bc4cef0e-config-volume\") pod \"a45793ed-cfde-4732-bfcc-3d83bc4cef0e\" (UID: \"a45793ed-cfde-4732-bfcc-3d83bc4cef0e\") " Oct 01 17:15:02 crc kubenswrapper[4869]: I1001 17:15:02.892375 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/a45793ed-cfde-4732-bfcc-3d83bc4cef0e-secret-volume\") pod \"a45793ed-cfde-4732-bfcc-3d83bc4cef0e\" (UID: \"a45793ed-cfde-4732-bfcc-3d83bc4cef0e\") " Oct 01 17:15:02 crc kubenswrapper[4869]: I1001 17:15:02.892458 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-v2dsw\" (UniqueName: \"kubernetes.io/projected/a45793ed-cfde-4732-bfcc-3d83bc4cef0e-kube-api-access-v2dsw\") pod \"a45793ed-cfde-4732-bfcc-3d83bc4cef0e\" (UID: \"a45793ed-cfde-4732-bfcc-3d83bc4cef0e\") " Oct 01 17:15:02 crc kubenswrapper[4869]: I1001 17:15:02.893912 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a45793ed-cfde-4732-bfcc-3d83bc4cef0e-config-volume" (OuterVolumeSpecName: "config-volume") pod "a45793ed-cfde-4732-bfcc-3d83bc4cef0e" (UID: "a45793ed-cfde-4732-bfcc-3d83bc4cef0e"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 17:15:02 crc kubenswrapper[4869]: I1001 17:15:02.924946 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a45793ed-cfde-4732-bfcc-3d83bc4cef0e-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "a45793ed-cfde-4732-bfcc-3d83bc4cef0e" (UID: "a45793ed-cfde-4732-bfcc-3d83bc4cef0e"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 17:15:02 crc kubenswrapper[4869]: I1001 17:15:02.925167 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a45793ed-cfde-4732-bfcc-3d83bc4cef0e-kube-api-access-v2dsw" (OuterVolumeSpecName: "kube-api-access-v2dsw") pod "a45793ed-cfde-4732-bfcc-3d83bc4cef0e" (UID: "a45793ed-cfde-4732-bfcc-3d83bc4cef0e"). InnerVolumeSpecName "kube-api-access-v2dsw". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 17:15:02 crc kubenswrapper[4869]: I1001 17:15:02.996182 4869 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/a45793ed-cfde-4732-bfcc-3d83bc4cef0e-config-volume\") on node \"crc\" DevicePath \"\"" Oct 01 17:15:02 crc kubenswrapper[4869]: I1001 17:15:02.996326 4869 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/a45793ed-cfde-4732-bfcc-3d83bc4cef0e-secret-volume\") on node \"crc\" DevicePath \"\"" Oct 01 17:15:02 crc kubenswrapper[4869]: I1001 17:15:02.996354 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-v2dsw\" (UniqueName: \"kubernetes.io/projected/a45793ed-cfde-4732-bfcc-3d83bc4cef0e-kube-api-access-v2dsw\") on node \"crc\" DevicePath \"\"" Oct 01 17:15:03 crc kubenswrapper[4869]: I1001 17:15:03.481734 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29322315-sfjbz" event={"ID":"a45793ed-cfde-4732-bfcc-3d83bc4cef0e","Type":"ContainerDied","Data":"258e2bfb6d06c533f55477d7bebb158e53412841a12687b3118b902263b32069"} Oct 01 17:15:03 crc kubenswrapper[4869]: I1001 17:15:03.481793 4869 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="258e2bfb6d06c533f55477d7bebb158e53412841a12687b3118b902263b32069" Oct 01 17:15:03 crc kubenswrapper[4869]: I1001 17:15:03.481834 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29322315-sfjbz" Oct 01 17:15:03 crc kubenswrapper[4869]: I1001 17:15:03.960443 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29322270-pjv6n"] Oct 01 17:15:03 crc kubenswrapper[4869]: I1001 17:15:03.972794 4869 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29322270-pjv6n"] Oct 01 17:15:05 crc kubenswrapper[4869]: I1001 17:15:05.593883 4869 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="495ca456-ea2e-47b7-9b98-307bc08df870" path="/var/lib/kubelet/pods/495ca456-ea2e-47b7-9b98-307bc08df870/volumes" Oct 01 17:15:15 crc kubenswrapper[4869]: I1001 17:15:15.581633 4869 scope.go:117] "RemoveContainer" containerID="1c17c6f6d4b5b7ea03383059037e4e76a83710badfded0c48fa96960660db929" Oct 01 17:15:15 crc kubenswrapper[4869]: E1001 17:15:15.582539 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-c86m8_openshift-machine-config-operator(a4b64f7f-0b03-4f47-965b-9fde048b735c)\"" pod="openshift-machine-config-operator/machine-config-daemon-c86m8" podUID="a4b64f7f-0b03-4f47-965b-9fde048b735c" Oct 01 17:15:17 crc kubenswrapper[4869]: I1001 17:15:17.630890 4869 generic.go:334] "Generic (PLEG): container finished" podID="e28d6c91-53b4-4c43-885f-4eb32039fb5c" containerID="d0aa73b14681879d9527d5585f584e4ff7b196821dbc4f521a7655aa6d4dd1f1" exitCode=0 Oct 01 17:15:17 crc kubenswrapper[4869]: I1001 17:15:17.630987 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizontest-tests-horizontest" event={"ID":"e28d6c91-53b4-4c43-885f-4eb32039fb5c","Type":"ContainerDied","Data":"d0aa73b14681879d9527d5585f584e4ff7b196821dbc4f521a7655aa6d4dd1f1"} Oct 01 17:15:19 crc kubenswrapper[4869]: I1001 17:15:19.041016 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizontest-tests-horizontest" Oct 01 17:15:19 crc kubenswrapper[4869]: I1001 17:15:19.165544 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"test-operator-logs\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"e28d6c91-53b4-4c43-885f-4eb32039fb5c\" (UID: \"e28d6c91-53b4-4c43-885f-4eb32039fb5c\") " Oct 01 17:15:19 crc kubenswrapper[4869]: I1001 17:15:19.165720 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dlfnz\" (UniqueName: \"kubernetes.io/projected/e28d6c91-53b4-4c43-885f-4eb32039fb5c-kube-api-access-dlfnz\") pod \"e28d6c91-53b4-4c43-885f-4eb32039fb5c\" (UID: \"e28d6c91-53b4-4c43-885f-4eb32039fb5c\") " Oct 01 17:15:19 crc kubenswrapper[4869]: I1001 17:15:19.165745 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"test-operator-clouds-config\" (UniqueName: \"kubernetes.io/configmap/e28d6c91-53b4-4c43-885f-4eb32039fb5c-test-operator-clouds-config\") pod \"e28d6c91-53b4-4c43-885f-4eb32039fb5c\" (UID: \"e28d6c91-53b4-4c43-885f-4eb32039fb5c\") " Oct 01 17:15:19 crc kubenswrapper[4869]: I1001 17:15:19.165764 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/e28d6c91-53b4-4c43-885f-4eb32039fb5c-ceph\") pod \"e28d6c91-53b4-4c43-885f-4eb32039fb5c\" (UID: \"e28d6c91-53b4-4c43-885f-4eb32039fb5c\") " Oct 01 17:15:19 crc kubenswrapper[4869]: I1001 17:15:19.165885 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"test-operator-ephemeral-workdir\" (UniqueName: \"kubernetes.io/empty-dir/e28d6c91-53b4-4c43-885f-4eb32039fb5c-test-operator-ephemeral-workdir\") pod \"e28d6c91-53b4-4c43-885f-4eb32039fb5c\" (UID: \"e28d6c91-53b4-4c43-885f-4eb32039fb5c\") " Oct 01 17:15:19 crc kubenswrapper[4869]: I1001 17:15:19.165922 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ca-certs\" (UniqueName: \"kubernetes.io/secret/e28d6c91-53b4-4c43-885f-4eb32039fb5c-ca-certs\") pod \"e28d6c91-53b4-4c43-885f-4eb32039fb5c\" (UID: \"e28d6c91-53b4-4c43-885f-4eb32039fb5c\") " Oct 01 17:15:19 crc kubenswrapper[4869]: I1001 17:15:19.165954 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/e28d6c91-53b4-4c43-885f-4eb32039fb5c-openstack-config-secret\") pod \"e28d6c91-53b4-4c43-885f-4eb32039fb5c\" (UID: \"e28d6c91-53b4-4c43-885f-4eb32039fb5c\") " Oct 01 17:15:19 crc kubenswrapper[4869]: I1001 17:15:19.165997 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"test-operator-ephemeral-temporary\" (UniqueName: \"kubernetes.io/empty-dir/e28d6c91-53b4-4c43-885f-4eb32039fb5c-test-operator-ephemeral-temporary\") pod \"e28d6c91-53b4-4c43-885f-4eb32039fb5c\" (UID: \"e28d6c91-53b4-4c43-885f-4eb32039fb5c\") " Oct 01 17:15:19 crc kubenswrapper[4869]: I1001 17:15:19.167498 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e28d6c91-53b4-4c43-885f-4eb32039fb5c-test-operator-ephemeral-temporary" (OuterVolumeSpecName: "test-operator-ephemeral-temporary") pod "e28d6c91-53b4-4c43-885f-4eb32039fb5c" (UID: "e28d6c91-53b4-4c43-885f-4eb32039fb5c"). InnerVolumeSpecName "test-operator-ephemeral-temporary". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 17:15:19 crc kubenswrapper[4869]: I1001 17:15:19.171117 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage11-crc" (OuterVolumeSpecName: "test-operator-logs") pod "e28d6c91-53b4-4c43-885f-4eb32039fb5c" (UID: "e28d6c91-53b4-4c43-885f-4eb32039fb5c"). InnerVolumeSpecName "local-storage11-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Oct 01 17:15:19 crc kubenswrapper[4869]: I1001 17:15:19.173774 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e28d6c91-53b4-4c43-885f-4eb32039fb5c-kube-api-access-dlfnz" (OuterVolumeSpecName: "kube-api-access-dlfnz") pod "e28d6c91-53b4-4c43-885f-4eb32039fb5c" (UID: "e28d6c91-53b4-4c43-885f-4eb32039fb5c"). InnerVolumeSpecName "kube-api-access-dlfnz". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 17:15:19 crc kubenswrapper[4869]: I1001 17:15:19.176461 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e28d6c91-53b4-4c43-885f-4eb32039fb5c-ceph" (OuterVolumeSpecName: "ceph") pod "e28d6c91-53b4-4c43-885f-4eb32039fb5c" (UID: "e28d6c91-53b4-4c43-885f-4eb32039fb5c"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 17:15:19 crc kubenswrapper[4869]: I1001 17:15:19.226736 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e28d6c91-53b4-4c43-885f-4eb32039fb5c-test-operator-clouds-config" (OuterVolumeSpecName: "test-operator-clouds-config") pod "e28d6c91-53b4-4c43-885f-4eb32039fb5c" (UID: "e28d6c91-53b4-4c43-885f-4eb32039fb5c"). InnerVolumeSpecName "test-operator-clouds-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 17:15:19 crc kubenswrapper[4869]: I1001 17:15:19.228177 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e28d6c91-53b4-4c43-885f-4eb32039fb5c-openstack-config-secret" (OuterVolumeSpecName: "openstack-config-secret") pod "e28d6c91-53b4-4c43-885f-4eb32039fb5c" (UID: "e28d6c91-53b4-4c43-885f-4eb32039fb5c"). InnerVolumeSpecName "openstack-config-secret". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 17:15:19 crc kubenswrapper[4869]: I1001 17:15:19.268010 4869 reconciler_common.go:293] "Volume detached for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/e28d6c91-53b4-4c43-885f-4eb32039fb5c-openstack-config-secret\") on node \"crc\" DevicePath \"\"" Oct 01 17:15:19 crc kubenswrapper[4869]: I1001 17:15:19.268057 4869 reconciler_common.go:293] "Volume detached for volume \"test-operator-ephemeral-temporary\" (UniqueName: \"kubernetes.io/empty-dir/e28d6c91-53b4-4c43-885f-4eb32039fb5c-test-operator-ephemeral-temporary\") on node \"crc\" DevicePath \"\"" Oct 01 17:15:19 crc kubenswrapper[4869]: I1001 17:15:19.268095 4869 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") on node \"crc\" " Oct 01 17:15:19 crc kubenswrapper[4869]: I1001 17:15:19.268113 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dlfnz\" (UniqueName: \"kubernetes.io/projected/e28d6c91-53b4-4c43-885f-4eb32039fb5c-kube-api-access-dlfnz\") on node \"crc\" DevicePath \"\"" Oct 01 17:15:19 crc kubenswrapper[4869]: I1001 17:15:19.268133 4869 reconciler_common.go:293] "Volume detached for volume \"test-operator-clouds-config\" (UniqueName: \"kubernetes.io/configmap/e28d6c91-53b4-4c43-885f-4eb32039fb5c-test-operator-clouds-config\") on node \"crc\" DevicePath \"\"" Oct 01 17:15:19 crc kubenswrapper[4869]: I1001 17:15:19.268150 4869 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/e28d6c91-53b4-4c43-885f-4eb32039fb5c-ceph\") on node \"crc\" DevicePath \"\"" Oct 01 17:15:19 crc kubenswrapper[4869]: I1001 17:15:19.278492 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e28d6c91-53b4-4c43-885f-4eb32039fb5c-ca-certs" (OuterVolumeSpecName: "ca-certs") pod "e28d6c91-53b4-4c43-885f-4eb32039fb5c" (UID: "e28d6c91-53b4-4c43-885f-4eb32039fb5c"). InnerVolumeSpecName "ca-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 17:15:19 crc kubenswrapper[4869]: I1001 17:15:19.312532 4869 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage11-crc" (UniqueName: "kubernetes.io/local-volume/local-storage11-crc") on node "crc" Oct 01 17:15:19 crc kubenswrapper[4869]: I1001 17:15:19.369991 4869 reconciler_common.go:293] "Volume detached for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") on node \"crc\" DevicePath \"\"" Oct 01 17:15:19 crc kubenswrapper[4869]: I1001 17:15:19.370017 4869 reconciler_common.go:293] "Volume detached for volume \"ca-certs\" (UniqueName: \"kubernetes.io/secret/e28d6c91-53b4-4c43-885f-4eb32039fb5c-ca-certs\") on node \"crc\" DevicePath \"\"" Oct 01 17:15:19 crc kubenswrapper[4869]: I1001 17:15:19.393715 4869 scope.go:117] "RemoveContainer" containerID="bab5db0bd2f9e90691aebb0e21f37c930915052309ee2b4016701f0b6ac55e2c" Oct 01 17:15:19 crc kubenswrapper[4869]: I1001 17:15:19.428846 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e28d6c91-53b4-4c43-885f-4eb32039fb5c-test-operator-ephemeral-workdir" (OuterVolumeSpecName: "test-operator-ephemeral-workdir") pod "e28d6c91-53b4-4c43-885f-4eb32039fb5c" (UID: "e28d6c91-53b4-4c43-885f-4eb32039fb5c"). InnerVolumeSpecName "test-operator-ephemeral-workdir". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 17:15:19 crc kubenswrapper[4869]: I1001 17:15:19.471294 4869 reconciler_common.go:293] "Volume detached for volume \"test-operator-ephemeral-workdir\" (UniqueName: \"kubernetes.io/empty-dir/e28d6c91-53b4-4c43-885f-4eb32039fb5c-test-operator-ephemeral-workdir\") on node \"crc\" DevicePath \"\"" Oct 01 17:15:19 crc kubenswrapper[4869]: I1001 17:15:19.652488 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizontest-tests-horizontest" event={"ID":"e28d6c91-53b4-4c43-885f-4eb32039fb5c","Type":"ContainerDied","Data":"6a39b512400e748f9834e86ce7a2cb9968215b92c99db26212134ae7e20227da"} Oct 01 17:15:19 crc kubenswrapper[4869]: I1001 17:15:19.652533 4869 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="6a39b512400e748f9834e86ce7a2cb9968215b92c99db26212134ae7e20227da" Oct 01 17:15:19 crc kubenswrapper[4869]: I1001 17:15:19.652543 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizontest-tests-horizontest" Oct 01 17:15:24 crc kubenswrapper[4869]: I1001 17:15:24.077071 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/test-operator-logs-pod-horizontest-horizontest-tests-horizontest"] Oct 01 17:15:24 crc kubenswrapper[4869]: E1001 17:15:24.078861 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e28d6c91-53b4-4c43-885f-4eb32039fb5c" containerName="horizontest-tests-horizontest" Oct 01 17:15:24 crc kubenswrapper[4869]: I1001 17:15:24.078885 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="e28d6c91-53b4-4c43-885f-4eb32039fb5c" containerName="horizontest-tests-horizontest" Oct 01 17:15:24 crc kubenswrapper[4869]: E1001 17:15:24.078957 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a45793ed-cfde-4732-bfcc-3d83bc4cef0e" containerName="collect-profiles" Oct 01 17:15:24 crc kubenswrapper[4869]: I1001 17:15:24.078968 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="a45793ed-cfde-4732-bfcc-3d83bc4cef0e" containerName="collect-profiles" Oct 01 17:15:24 crc kubenswrapper[4869]: I1001 17:15:24.079235 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="e28d6c91-53b4-4c43-885f-4eb32039fb5c" containerName="horizontest-tests-horizontest" Oct 01 17:15:24 crc kubenswrapper[4869]: I1001 17:15:24.079298 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="a45793ed-cfde-4732-bfcc-3d83bc4cef0e" containerName="collect-profiles" Oct 01 17:15:24 crc kubenswrapper[4869]: I1001 17:15:24.080564 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/test-operator-logs-pod-horizontest-horizontest-tests-horizontest" Oct 01 17:15:24 crc kubenswrapper[4869]: I1001 17:15:24.087084 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/test-operator-logs-pod-horizontest-horizontest-tests-horizontest"] Oct 01 17:15:24 crc kubenswrapper[4869]: I1001 17:15:24.179914 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"test-operator-logs-pod-horizontest-horizontest-tests-horizontest\" (UID: \"5e00cfe0-d068-4ef3-9f8c-b50b2b1045f5\") " pod="openstack/test-operator-logs-pod-horizontest-horizontest-tests-horizontest" Oct 01 17:15:24 crc kubenswrapper[4869]: I1001 17:15:24.180077 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cnzff\" (UniqueName: \"kubernetes.io/projected/5e00cfe0-d068-4ef3-9f8c-b50b2b1045f5-kube-api-access-cnzff\") pod \"test-operator-logs-pod-horizontest-horizontest-tests-horizontest\" (UID: \"5e00cfe0-d068-4ef3-9f8c-b50b2b1045f5\") " pod="openstack/test-operator-logs-pod-horizontest-horizontest-tests-horizontest" Oct 01 17:15:24 crc kubenswrapper[4869]: I1001 17:15:24.281823 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cnzff\" (UniqueName: \"kubernetes.io/projected/5e00cfe0-d068-4ef3-9f8c-b50b2b1045f5-kube-api-access-cnzff\") pod \"test-operator-logs-pod-horizontest-horizontest-tests-horizontest\" (UID: \"5e00cfe0-d068-4ef3-9f8c-b50b2b1045f5\") " pod="openstack/test-operator-logs-pod-horizontest-horizontest-tests-horizontest" Oct 01 17:15:24 crc kubenswrapper[4869]: I1001 17:15:24.281936 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"test-operator-logs-pod-horizontest-horizontest-tests-horizontest\" (UID: \"5e00cfe0-d068-4ef3-9f8c-b50b2b1045f5\") " pod="openstack/test-operator-logs-pod-horizontest-horizontest-tests-horizontest" Oct 01 17:15:24 crc kubenswrapper[4869]: I1001 17:15:24.282379 4869 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"test-operator-logs-pod-horizontest-horizontest-tests-horizontest\" (UID: \"5e00cfe0-d068-4ef3-9f8c-b50b2b1045f5\") device mount path \"/mnt/openstack/pv11\"" pod="openstack/test-operator-logs-pod-horizontest-horizontest-tests-horizontest" Oct 01 17:15:24 crc kubenswrapper[4869]: I1001 17:15:24.309317 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cnzff\" (UniqueName: \"kubernetes.io/projected/5e00cfe0-d068-4ef3-9f8c-b50b2b1045f5-kube-api-access-cnzff\") pod \"test-operator-logs-pod-horizontest-horizontest-tests-horizontest\" (UID: \"5e00cfe0-d068-4ef3-9f8c-b50b2b1045f5\") " pod="openstack/test-operator-logs-pod-horizontest-horizontest-tests-horizontest" Oct 01 17:15:24 crc kubenswrapper[4869]: I1001 17:15:24.311135 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"test-operator-logs-pod-horizontest-horizontest-tests-horizontest\" (UID: \"5e00cfe0-d068-4ef3-9f8c-b50b2b1045f5\") " pod="openstack/test-operator-logs-pod-horizontest-horizontest-tests-horizontest" Oct 01 17:15:24 crc kubenswrapper[4869]: I1001 17:15:24.457799 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/test-operator-logs-pod-horizontest-horizontest-tests-horizontest" Oct 01 17:15:24 crc kubenswrapper[4869]: E1001 17:15:24.457974 4869 kubelet_pods.go:538] "Hostname for pod was too long, truncated it" podName="test-operator-logs-pod-horizontest-horizontest-tests-horizontest" hostnameMaxLen=63 truncatedHostname="test-operator-logs-pod-horizontest-horizontest-tests-horizontes" Oct 01 17:15:24 crc kubenswrapper[4869]: I1001 17:15:24.933798 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/test-operator-logs-pod-horizontest-horizontest-tests-horizontest"] Oct 01 17:15:24 crc kubenswrapper[4869]: E1001 17:15:24.939483 4869 kubelet_pods.go:538] "Hostname for pod was too long, truncated it" podName="test-operator-logs-pod-horizontest-horizontest-tests-horizontest" hostnameMaxLen=63 truncatedHostname="test-operator-logs-pod-horizontest-horizontest-tests-horizontes" Oct 01 17:15:25 crc kubenswrapper[4869]: E1001 17:15:25.638585 4869 kubelet_pods.go:538] "Hostname for pod was too long, truncated it" podName="test-operator-logs-pod-horizontest-horizontest-tests-horizontest" hostnameMaxLen=63 truncatedHostname="test-operator-logs-pod-horizontest-horizontest-tests-horizontes" Oct 01 17:15:25 crc kubenswrapper[4869]: I1001 17:15:25.722766 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/test-operator-logs-pod-horizontest-horizontest-tests-horizontest" event={"ID":"5e00cfe0-d068-4ef3-9f8c-b50b2b1045f5","Type":"ContainerStarted","Data":"6b2f0d51dac8025c48ed72944b860cdf868d0e7d92af9d5b5767b8e29b4714da"} Oct 01 17:15:26 crc kubenswrapper[4869]: I1001 17:15:26.736828 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/test-operator-logs-pod-horizontest-horizontest-tests-horizontest" event={"ID":"5e00cfe0-d068-4ef3-9f8c-b50b2b1045f5","Type":"ContainerStarted","Data":"2c7dbdedd7740af79d22691f38329fc23d2985069c3433632002657fa3fcc14d"} Oct 01 17:15:26 crc kubenswrapper[4869]: E1001 17:15:26.737621 4869 kubelet_pods.go:538] "Hostname for pod was too long, truncated it" podName="test-operator-logs-pod-horizontest-horizontest-tests-horizontest" hostnameMaxLen=63 truncatedHostname="test-operator-logs-pod-horizontest-horizontest-tests-horizontes" Oct 01 17:15:26 crc kubenswrapper[4869]: I1001 17:15:26.767794 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/test-operator-logs-pod-horizontest-horizontest-tests-horizontest" podStartSLOduration=2.070015064 podStartE2EDuration="2.767771214s" podCreationTimestamp="2025-10-01 17:15:24 +0000 UTC" firstStartedPulling="2025-10-01 17:15:24.940724837 +0000 UTC m=+7834.087567963" lastFinishedPulling="2025-10-01 17:15:25.638480957 +0000 UTC m=+7834.785324113" observedRunningTime="2025-10-01 17:15:26.755228917 +0000 UTC m=+7835.902072033" watchObservedRunningTime="2025-10-01 17:15:26.767771214 +0000 UTC m=+7835.914614330" Oct 01 17:15:27 crc kubenswrapper[4869]: E1001 17:15:27.747823 4869 kubelet_pods.go:538] "Hostname for pod was too long, truncated it" podName="test-operator-logs-pod-horizontest-horizontest-tests-horizontest" hostnameMaxLen=63 truncatedHostname="test-operator-logs-pod-horizontest-horizontest-tests-horizontes" Oct 01 17:15:28 crc kubenswrapper[4869]: I1001 17:15:28.581132 4869 scope.go:117] "RemoveContainer" containerID="1c17c6f6d4b5b7ea03383059037e4e76a83710badfded0c48fa96960660db929" Oct 01 17:15:28 crc kubenswrapper[4869]: E1001 17:15:28.581700 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-c86m8_openshift-machine-config-operator(a4b64f7f-0b03-4f47-965b-9fde048b735c)\"" pod="openshift-machine-config-operator/machine-config-daemon-c86m8" podUID="a4b64f7f-0b03-4f47-965b-9fde048b735c" Oct 01 17:15:43 crc kubenswrapper[4869]: I1001 17:15:43.581063 4869 scope.go:117] "RemoveContainer" containerID="1c17c6f6d4b5b7ea03383059037e4e76a83710badfded0c48fa96960660db929" Oct 01 17:15:43 crc kubenswrapper[4869]: E1001 17:15:43.582115 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-c86m8_openshift-machine-config-operator(a4b64f7f-0b03-4f47-965b-9fde048b735c)\"" pod="openshift-machine-config-operator/machine-config-daemon-c86m8" podUID="a4b64f7f-0b03-4f47-965b-9fde048b735c" Oct 01 17:15:56 crc kubenswrapper[4869]: I1001 17:15:56.581978 4869 scope.go:117] "RemoveContainer" containerID="1c17c6f6d4b5b7ea03383059037e4e76a83710badfded0c48fa96960660db929" Oct 01 17:15:56 crc kubenswrapper[4869]: E1001 17:15:56.583077 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-c86m8_openshift-machine-config-operator(a4b64f7f-0b03-4f47-965b-9fde048b735c)\"" pod="openshift-machine-config-operator/machine-config-daemon-c86m8" podUID="a4b64f7f-0b03-4f47-965b-9fde048b735c" Oct 01 17:16:10 crc kubenswrapper[4869]: I1001 17:16:10.630275 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-w7d9w/must-gather-qp92d"] Oct 01 17:16:10 crc kubenswrapper[4869]: I1001 17:16:10.632150 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-w7d9w/must-gather-qp92d" Oct 01 17:16:10 crc kubenswrapper[4869]: I1001 17:16:10.634431 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-must-gather-w7d9w"/"openshift-service-ca.crt" Oct 01 17:16:10 crc kubenswrapper[4869]: I1001 17:16:10.634598 4869 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-must-gather-w7d9w"/"default-dockercfg-fqw9r" Oct 01 17:16:10 crc kubenswrapper[4869]: I1001 17:16:10.634854 4869 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-must-gather-w7d9w"/"kube-root-ca.crt" Oct 01 17:16:10 crc kubenswrapper[4869]: I1001 17:16:10.639103 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-must-gather-w7d9w/must-gather-qp92d"] Oct 01 17:16:10 crc kubenswrapper[4869]: I1001 17:16:10.777485 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/4d51b371-a81b-4c7f-9117-3694ccd6464d-must-gather-output\") pod \"must-gather-qp92d\" (UID: \"4d51b371-a81b-4c7f-9117-3694ccd6464d\") " pod="openshift-must-gather-w7d9w/must-gather-qp92d" Oct 01 17:16:10 crc kubenswrapper[4869]: I1001 17:16:10.777650 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-krdvx\" (UniqueName: \"kubernetes.io/projected/4d51b371-a81b-4c7f-9117-3694ccd6464d-kube-api-access-krdvx\") pod \"must-gather-qp92d\" (UID: \"4d51b371-a81b-4c7f-9117-3694ccd6464d\") " pod="openshift-must-gather-w7d9w/must-gather-qp92d" Oct 01 17:16:10 crc kubenswrapper[4869]: I1001 17:16:10.879727 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/4d51b371-a81b-4c7f-9117-3694ccd6464d-must-gather-output\") pod \"must-gather-qp92d\" (UID: \"4d51b371-a81b-4c7f-9117-3694ccd6464d\") " pod="openshift-must-gather-w7d9w/must-gather-qp92d" Oct 01 17:16:10 crc kubenswrapper[4869]: I1001 17:16:10.879816 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-krdvx\" (UniqueName: \"kubernetes.io/projected/4d51b371-a81b-4c7f-9117-3694ccd6464d-kube-api-access-krdvx\") pod \"must-gather-qp92d\" (UID: \"4d51b371-a81b-4c7f-9117-3694ccd6464d\") " pod="openshift-must-gather-w7d9w/must-gather-qp92d" Oct 01 17:16:10 crc kubenswrapper[4869]: I1001 17:16:10.880249 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/4d51b371-a81b-4c7f-9117-3694ccd6464d-must-gather-output\") pod \"must-gather-qp92d\" (UID: \"4d51b371-a81b-4c7f-9117-3694ccd6464d\") " pod="openshift-must-gather-w7d9w/must-gather-qp92d" Oct 01 17:16:10 crc kubenswrapper[4869]: I1001 17:16:10.899693 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-krdvx\" (UniqueName: \"kubernetes.io/projected/4d51b371-a81b-4c7f-9117-3694ccd6464d-kube-api-access-krdvx\") pod \"must-gather-qp92d\" (UID: \"4d51b371-a81b-4c7f-9117-3694ccd6464d\") " pod="openshift-must-gather-w7d9w/must-gather-qp92d" Oct 01 17:16:10 crc kubenswrapper[4869]: I1001 17:16:10.949619 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-w7d9w/must-gather-qp92d" Oct 01 17:16:11 crc kubenswrapper[4869]: I1001 17:16:11.512101 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-must-gather-w7d9w/must-gather-qp92d"] Oct 01 17:16:11 crc kubenswrapper[4869]: I1001 17:16:11.588801 4869 scope.go:117] "RemoveContainer" containerID="1c17c6f6d4b5b7ea03383059037e4e76a83710badfded0c48fa96960660db929" Oct 01 17:16:11 crc kubenswrapper[4869]: E1001 17:16:11.589098 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-c86m8_openshift-machine-config-operator(a4b64f7f-0b03-4f47-965b-9fde048b735c)\"" pod="openshift-machine-config-operator/machine-config-daemon-c86m8" podUID="a4b64f7f-0b03-4f47-965b-9fde048b735c" Oct 01 17:16:12 crc kubenswrapper[4869]: I1001 17:16:12.232579 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-w7d9w/must-gather-qp92d" event={"ID":"4d51b371-a81b-4c7f-9117-3694ccd6464d","Type":"ContainerStarted","Data":"5875104a974b14f9f17ff8b881176281390e5552ac2c0dea8b469460fb4609a3"} Oct 01 17:16:17 crc kubenswrapper[4869]: I1001 17:16:17.277192 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-w7d9w/must-gather-qp92d" event={"ID":"4d51b371-a81b-4c7f-9117-3694ccd6464d","Type":"ContainerStarted","Data":"6625528abc4c76cdb12832e69cdf23f751deca26117085e8a20e629bfb741244"} Oct 01 17:16:17 crc kubenswrapper[4869]: I1001 17:16:17.277855 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-w7d9w/must-gather-qp92d" event={"ID":"4d51b371-a81b-4c7f-9117-3694ccd6464d","Type":"ContainerStarted","Data":"e5991c9d3c8de729fd9ecdfa1a176de7b67f9a2c32ce532088efca33f472b3c3"} Oct 01 17:16:17 crc kubenswrapper[4869]: I1001 17:16:17.295016 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-must-gather-w7d9w/must-gather-qp92d" podStartSLOduration=2.796107454 podStartE2EDuration="7.29499486s" podCreationTimestamp="2025-10-01 17:16:10 +0000 UTC" firstStartedPulling="2025-10-01 17:16:11.471237629 +0000 UTC m=+7880.618080745" lastFinishedPulling="2025-10-01 17:16:15.970125035 +0000 UTC m=+7885.116968151" observedRunningTime="2025-10-01 17:16:17.290861186 +0000 UTC m=+7886.437704302" watchObservedRunningTime="2025-10-01 17:16:17.29499486 +0000 UTC m=+7886.441837986" Oct 01 17:16:25 crc kubenswrapper[4869]: I1001 17:16:25.823095 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-w7d9w/crc-debug-7brtj"] Oct 01 17:16:25 crc kubenswrapper[4869]: I1001 17:16:25.825632 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-w7d9w/crc-debug-7brtj" Oct 01 17:16:25 crc kubenswrapper[4869]: I1001 17:16:25.909776 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/5ae3cce1-42a4-4050-ad9f-1ef58e2e4d5c-host\") pod \"crc-debug-7brtj\" (UID: \"5ae3cce1-42a4-4050-ad9f-1ef58e2e4d5c\") " pod="openshift-must-gather-w7d9w/crc-debug-7brtj" Oct 01 17:16:25 crc kubenswrapper[4869]: I1001 17:16:25.909843 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-h7d8d\" (UniqueName: \"kubernetes.io/projected/5ae3cce1-42a4-4050-ad9f-1ef58e2e4d5c-kube-api-access-h7d8d\") pod \"crc-debug-7brtj\" (UID: \"5ae3cce1-42a4-4050-ad9f-1ef58e2e4d5c\") " pod="openshift-must-gather-w7d9w/crc-debug-7brtj" Oct 01 17:16:26 crc kubenswrapper[4869]: I1001 17:16:26.011753 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-h7d8d\" (UniqueName: \"kubernetes.io/projected/5ae3cce1-42a4-4050-ad9f-1ef58e2e4d5c-kube-api-access-h7d8d\") pod \"crc-debug-7brtj\" (UID: \"5ae3cce1-42a4-4050-ad9f-1ef58e2e4d5c\") " pod="openshift-must-gather-w7d9w/crc-debug-7brtj" Oct 01 17:16:26 crc kubenswrapper[4869]: I1001 17:16:26.011952 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/5ae3cce1-42a4-4050-ad9f-1ef58e2e4d5c-host\") pod \"crc-debug-7brtj\" (UID: \"5ae3cce1-42a4-4050-ad9f-1ef58e2e4d5c\") " pod="openshift-must-gather-w7d9w/crc-debug-7brtj" Oct 01 17:16:26 crc kubenswrapper[4869]: I1001 17:16:26.012069 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/5ae3cce1-42a4-4050-ad9f-1ef58e2e4d5c-host\") pod \"crc-debug-7brtj\" (UID: \"5ae3cce1-42a4-4050-ad9f-1ef58e2e4d5c\") " pod="openshift-must-gather-w7d9w/crc-debug-7brtj" Oct 01 17:16:26 crc kubenswrapper[4869]: I1001 17:16:26.031852 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-h7d8d\" (UniqueName: \"kubernetes.io/projected/5ae3cce1-42a4-4050-ad9f-1ef58e2e4d5c-kube-api-access-h7d8d\") pod \"crc-debug-7brtj\" (UID: \"5ae3cce1-42a4-4050-ad9f-1ef58e2e4d5c\") " pod="openshift-must-gather-w7d9w/crc-debug-7brtj" Oct 01 17:16:26 crc kubenswrapper[4869]: I1001 17:16:26.145554 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-w7d9w/crc-debug-7brtj" Oct 01 17:16:26 crc kubenswrapper[4869]: W1001 17:16:26.180931 4869 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod5ae3cce1_42a4_4050_ad9f_1ef58e2e4d5c.slice/crio-5953e247a9377621ff8cefcd610ed91ce7af54a422ee9d524150e21ac01bf1c7 WatchSource:0}: Error finding container 5953e247a9377621ff8cefcd610ed91ce7af54a422ee9d524150e21ac01bf1c7: Status 404 returned error can't find the container with id 5953e247a9377621ff8cefcd610ed91ce7af54a422ee9d524150e21ac01bf1c7 Oct 01 17:16:26 crc kubenswrapper[4869]: I1001 17:16:26.428909 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-w7d9w/crc-debug-7brtj" event={"ID":"5ae3cce1-42a4-4050-ad9f-1ef58e2e4d5c","Type":"ContainerStarted","Data":"5953e247a9377621ff8cefcd610ed91ce7af54a422ee9d524150e21ac01bf1c7"} Oct 01 17:16:26 crc kubenswrapper[4869]: I1001 17:16:26.593856 4869 scope.go:117] "RemoveContainer" containerID="1c17c6f6d4b5b7ea03383059037e4e76a83710badfded0c48fa96960660db929" Oct 01 17:16:26 crc kubenswrapper[4869]: E1001 17:16:26.594625 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-c86m8_openshift-machine-config-operator(a4b64f7f-0b03-4f47-965b-9fde048b735c)\"" pod="openshift-machine-config-operator/machine-config-daemon-c86m8" podUID="a4b64f7f-0b03-4f47-965b-9fde048b735c" Oct 01 17:16:39 crc kubenswrapper[4869]: I1001 17:16:39.572377 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-w7d9w/crc-debug-7brtj" event={"ID":"5ae3cce1-42a4-4050-ad9f-1ef58e2e4d5c","Type":"ContainerStarted","Data":"6a4e5c535aef702a50f6ff6ea7f2fbde2651a1b487c1f73a04bd6f313bdc74db"} Oct 01 17:16:39 crc kubenswrapper[4869]: I1001 17:16:39.580587 4869 scope.go:117] "RemoveContainer" containerID="1c17c6f6d4b5b7ea03383059037e4e76a83710badfded0c48fa96960660db929" Oct 01 17:16:39 crc kubenswrapper[4869]: E1001 17:16:39.580887 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-c86m8_openshift-machine-config-operator(a4b64f7f-0b03-4f47-965b-9fde048b735c)\"" pod="openshift-machine-config-operator/machine-config-daemon-c86m8" podUID="a4b64f7f-0b03-4f47-965b-9fde048b735c" Oct 01 17:16:43 crc kubenswrapper[4869]: E1001 17:16:43.580778 4869 kubelet_pods.go:538] "Hostname for pod was too long, truncated it" podName="test-operator-logs-pod-horizontest-horizontest-tests-horizontest" hostnameMaxLen=63 truncatedHostname="test-operator-logs-pod-horizontest-horizontest-tests-horizontes" Oct 01 17:16:54 crc kubenswrapper[4869]: I1001 17:16:54.581471 4869 scope.go:117] "RemoveContainer" containerID="1c17c6f6d4b5b7ea03383059037e4e76a83710badfded0c48fa96960660db929" Oct 01 17:16:54 crc kubenswrapper[4869]: E1001 17:16:54.582416 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-c86m8_openshift-machine-config-operator(a4b64f7f-0b03-4f47-965b-9fde048b735c)\"" pod="openshift-machine-config-operator/machine-config-daemon-c86m8" podUID="a4b64f7f-0b03-4f47-965b-9fde048b735c" Oct 01 17:17:09 crc kubenswrapper[4869]: I1001 17:17:09.581650 4869 scope.go:117] "RemoveContainer" containerID="1c17c6f6d4b5b7ea03383059037e4e76a83710badfded0c48fa96960660db929" Oct 01 17:17:09 crc kubenswrapper[4869]: E1001 17:17:09.582560 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-c86m8_openshift-machine-config-operator(a4b64f7f-0b03-4f47-965b-9fde048b735c)\"" pod="openshift-machine-config-operator/machine-config-daemon-c86m8" podUID="a4b64f7f-0b03-4f47-965b-9fde048b735c" Oct 01 17:17:24 crc kubenswrapper[4869]: I1001 17:17:24.580699 4869 scope.go:117] "RemoveContainer" containerID="1c17c6f6d4b5b7ea03383059037e4e76a83710badfded0c48fa96960660db929" Oct 01 17:17:24 crc kubenswrapper[4869]: E1001 17:17:24.581301 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-c86m8_openshift-machine-config-operator(a4b64f7f-0b03-4f47-965b-9fde048b735c)\"" pod="openshift-machine-config-operator/machine-config-daemon-c86m8" podUID="a4b64f7f-0b03-4f47-965b-9fde048b735c" Oct 01 17:17:28 crc kubenswrapper[4869]: I1001 17:17:28.445533 4869 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ansibletest-ansibletest_5725efb6-5a19-4979-966f-2e6ee4e16109/ansibletest-ansibletest/0.log" Oct 01 17:17:28 crc kubenswrapper[4869]: I1001 17:17:28.708733 4869 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-api-57c668579d-grs6h_bbb60a0d-b89d-4ab8-aa53-fb753317799f/barbican-api/0.log" Oct 01 17:17:29 crc kubenswrapper[4869]: I1001 17:17:29.243910 4869 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-api-57c668579d-grs6h_bbb60a0d-b89d-4ab8-aa53-fb753317799f/barbican-api-log/0.log" Oct 01 17:17:29 crc kubenswrapper[4869]: I1001 17:17:29.447454 4869 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-keystone-listener-6d6689c8d-7r6k4_af712290-aa09-4efa-80ac-6f655752332e/barbican-keystone-listener/0.log" Oct 01 17:17:30 crc kubenswrapper[4869]: I1001 17:17:30.007758 4869 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-keystone-listener-6d6689c8d-7r6k4_af712290-aa09-4efa-80ac-6f655752332e/barbican-keystone-listener-log/0.log" Oct 01 17:17:30 crc kubenswrapper[4869]: I1001 17:17:30.168317 4869 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-worker-7b96966b79-mtqf2_b0cf993c-1e4c-425b-8266-e087119e45b2/barbican-worker/0.log" Oct 01 17:17:30 crc kubenswrapper[4869]: I1001 17:17:30.319480 4869 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-worker-7b96966b79-mtqf2_b0cf993c-1e4c-425b-8266-e087119e45b2/barbican-worker-log/0.log" Oct 01 17:17:30 crc kubenswrapper[4869]: I1001 17:17:30.642103 4869 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_bootstrap-edpm-deployment-openstack-edpm-ipam-4fzsw_498b6c97-b2b2-4a70-9886-86d2fad1852f/bootstrap-edpm-deployment-openstack-edpm-ipam/0.log" Oct 01 17:17:30 crc kubenswrapper[4869]: I1001 17:17:30.943819 4869 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_0458e249-c518-4c3a-83d7-dda2beb25763/ceilometer-central-agent/0.log" Oct 01 17:17:31 crc kubenswrapper[4869]: I1001 17:17:31.191390 4869 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_0458e249-c518-4c3a-83d7-dda2beb25763/ceilometer-notification-agent/0.log" Oct 01 17:17:31 crc kubenswrapper[4869]: I1001 17:17:31.236492 4869 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_0458e249-c518-4c3a-83d7-dda2beb25763/proxy-httpd/0.log" Oct 01 17:17:31 crc kubenswrapper[4869]: I1001 17:17:31.332359 4869 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_0458e249-c518-4c3a-83d7-dda2beb25763/sg-core/0.log" Oct 01 17:17:31 crc kubenswrapper[4869]: I1001 17:17:31.476893 4869 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceph-client-edpm-deployment-openstack-edpm-ipam-4gtgk_6852e50e-d598-467e-8588-1aba32529660/ceph-client-edpm-deployment-openstack-edpm-ipam/0.log" Oct 01 17:17:31 crc kubenswrapper[4869]: I1001 17:17:31.808861 4869 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-jwnpq_b062c1f1-4b36-476f-8c2e-ca3b1a7e709c/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam/0.log" Oct 01 17:17:32 crc kubenswrapper[4869]: I1001 17:17:32.005779 4869 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-api-0_0af42303-cf8f-4774-82a6-cdc0818f976c/cinder-api/0.log" Oct 01 17:17:32 crc kubenswrapper[4869]: I1001 17:17:32.085235 4869 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-api-0_0af42303-cf8f-4774-82a6-cdc0818f976c/cinder-api-log/0.log" Oct 01 17:17:32 crc kubenswrapper[4869]: I1001 17:17:32.472803 4869 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-backup-0_cd72163f-65c7-4984-a967-de8f42861de4/probe/0.log" Oct 01 17:17:32 crc kubenswrapper[4869]: I1001 17:17:32.590698 4869 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-backup-0_cd72163f-65c7-4984-a967-de8f42861de4/cinder-backup/0.log" Oct 01 17:17:32 crc kubenswrapper[4869]: I1001 17:17:32.664847 4869 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-scheduler-0_8e8826d4-6549-4216-bcae-afc6a135af5f/cinder-scheduler/0.log" Oct 01 17:17:32 crc kubenswrapper[4869]: I1001 17:17:32.807560 4869 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-scheduler-0_8e8826d4-6549-4216-bcae-afc6a135af5f/probe/0.log" Oct 01 17:17:32 crc kubenswrapper[4869]: I1001 17:17:32.960162 4869 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-volume-volume1-0_456f4cbe-3d22-4705-abbe-09cadc1c0ce2/cinder-volume/0.log" Oct 01 17:17:33 crc kubenswrapper[4869]: I1001 17:17:33.105756 4869 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-volume-volume1-0_456f4cbe-3d22-4705-abbe-09cadc1c0ce2/probe/0.log" Oct 01 17:17:33 crc kubenswrapper[4869]: I1001 17:17:33.218376 4869 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_configure-network-edpm-deployment-openstack-edpm-ipam-h46vg_3836336e-8f74-48bb-a28b-ed64d526085b/configure-network-edpm-deployment-openstack-edpm-ipam/0.log" Oct 01 17:17:33 crc kubenswrapper[4869]: I1001 17:17:33.401094 4869 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_configure-os-edpm-deployment-openstack-edpm-ipam-cjbfj_9fb1424f-221b-46db-98b8-71a60daace2d/configure-os-edpm-deployment-openstack-edpm-ipam/0.log" Oct 01 17:17:33 crc kubenswrapper[4869]: I1001 17:17:33.721631 4869 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_dnsmasq-dns-5b88556f9c-4f47f_e4066add-a6a5-4393-a327-5c22cc8bd69e/init/0.log" Oct 01 17:17:34 crc kubenswrapper[4869]: I1001 17:17:34.014910 4869 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_dnsmasq-dns-5b88556f9c-4f47f_e4066add-a6a5-4393-a327-5c22cc8bd69e/init/0.log" Oct 01 17:17:34 crc kubenswrapper[4869]: I1001 17:17:34.176738 4869 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_dnsmasq-dns-5b88556f9c-4f47f_e4066add-a6a5-4393-a327-5c22cc8bd69e/dnsmasq-dns/0.log" Oct 01 17:17:34 crc kubenswrapper[4869]: I1001 17:17:34.270561 4869 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-external-api-0_734729c7-8311-4c3f-ab6c-55592dfcf7c2/glance-httpd/0.log" Oct 01 17:17:34 crc kubenswrapper[4869]: I1001 17:17:34.304243 4869 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-external-api-0_734729c7-8311-4c3f-ab6c-55592dfcf7c2/glance-log/0.log" Oct 01 17:17:34 crc kubenswrapper[4869]: I1001 17:17:34.525785 4869 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-internal-api-0_2a8c2f28-3cab-44c4-911a-c32162850921/glance-log/0.log" Oct 01 17:17:34 crc kubenswrapper[4869]: I1001 17:17:34.537081 4869 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-internal-api-0_2a8c2f28-3cab-44c4-911a-c32162850921/glance-httpd/0.log" Oct 01 17:17:34 crc kubenswrapper[4869]: I1001 17:17:34.807671 4869 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_horizon-5f66f6967d-mnbqz_eb62e045-ca51-4b33-a63d-9c53b247cc91/horizon/0.log" Oct 01 17:17:35 crc kubenswrapper[4869]: I1001 17:17:35.060187 4869 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_horizontest-tests-horizontest_e28d6c91-53b4-4c43-885f-4eb32039fb5c/horizontest-tests-horizontest/0.log" Oct 01 17:17:35 crc kubenswrapper[4869]: I1001 17:17:35.314847 4869 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_install-certs-edpm-deployment-openstack-edpm-ipam-d9zw8_ae422061-7694-44f9-a6ea-d6ce97da502d/install-certs-edpm-deployment-openstack-edpm-ipam/0.log" Oct 01 17:17:35 crc kubenswrapper[4869]: I1001 17:17:35.555595 4869 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_install-os-edpm-deployment-openstack-edpm-ipam-64bv5_9c034182-bd4b-4756-b834-c66984c690bf/install-os-edpm-deployment-openstack-edpm-ipam/0.log" Oct 01 17:17:35 crc kubenswrapper[4869]: I1001 17:17:35.587549 4869 scope.go:117] "RemoveContainer" containerID="1c17c6f6d4b5b7ea03383059037e4e76a83710badfded0c48fa96960660db929" Oct 01 17:17:35 crc kubenswrapper[4869]: E1001 17:17:35.587738 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-c86m8_openshift-machine-config-operator(a4b64f7f-0b03-4f47-965b-9fde048b735c)\"" pod="openshift-machine-config-operator/machine-config-daemon-c86m8" podUID="a4b64f7f-0b03-4f47-965b-9fde048b735c" Oct 01 17:17:36 crc kubenswrapper[4869]: I1001 17:17:36.140108 4869 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_horizon-5f66f6967d-mnbqz_eb62e045-ca51-4b33-a63d-9c53b247cc91/horizon-log/0.log" Oct 01 17:17:36 crc kubenswrapper[4869]: I1001 17:17:36.299144 4869 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_keystone-cron-29322241-7648v_d67f52c3-ce6c-486a-8e11-91d25894472f/keystone-cron/0.log" Oct 01 17:17:36 crc kubenswrapper[4869]: I1001 17:17:36.554771 4869 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_keystone-cron-29322301-hpmjg_701f9b68-20f9-4937-8541-8e2f104908ef/keystone-cron/0.log" Oct 01 17:17:36 crc kubenswrapper[4869]: I1001 17:17:36.816833 4869 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_kube-state-metrics-0_43f59ffb-9d2a-48b9-a6b4-44ca953e1314/kube-state-metrics/0.log" Oct 01 17:17:37 crc kubenswrapper[4869]: I1001 17:17:37.243939 4869 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_libvirt-edpm-deployment-openstack-edpm-ipam-4m5m5_49bff277-b5e6-4b61-b964-2a615ff1cf94/libvirt-edpm-deployment-openstack-edpm-ipam/0.log" Oct 01 17:17:37 crc kubenswrapper[4869]: I1001 17:17:37.547115 4869 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_manila-api-0_6fecded1-d25b-40a1-b7ce-c7819d11f929/manila-api-log/0.log" Oct 01 17:17:37 crc kubenswrapper[4869]: I1001 17:17:37.639155 4869 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_manila-api-0_6fecded1-d25b-40a1-b7ce-c7819d11f929/manila-api/0.log" Oct 01 17:17:38 crc kubenswrapper[4869]: I1001 17:17:38.055983 4869 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_keystone-6b94b79f97-gtm9w_543a3ce0-c8bc-45e3-bc25-617c4e65c08f/keystone-api/0.log" Oct 01 17:17:38 crc kubenswrapper[4869]: I1001 17:17:38.158872 4869 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_manila-scheduler-0_ae87b093-3a64-4d75-9b85-45fed188f715/manila-scheduler/0.log" Oct 01 17:17:38 crc kubenswrapper[4869]: I1001 17:17:38.332591 4869 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_manila-scheduler-0_ae87b093-3a64-4d75-9b85-45fed188f715/probe/0.log" Oct 01 17:17:38 crc kubenswrapper[4869]: I1001 17:17:38.445965 4869 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_manila-share-share1-0_d86b56fc-3eca-4fd9-9abb-d6831a9d12db/manila-share/0.log" Oct 01 17:17:38 crc kubenswrapper[4869]: I1001 17:17:38.535713 4869 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_manila-share-share1-0_d86b56fc-3eca-4fd9-9abb-d6831a9d12db/probe/0.log" Oct 01 17:17:40 crc kubenswrapper[4869]: I1001 17:17:40.045809 4869 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_neutron-96b4fb6d7-g25hj_e2f39f35-09b7-4953-b846-2cba520d5325/neutron-httpd/0.log" Oct 01 17:17:40 crc kubenswrapper[4869]: I1001 17:17:40.515186 4869 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_neutron-metadata-edpm-deployment-openstack-edpm-ipam-bgt6n_acf87e0a-4aea-4c68-b46c-d5397c47e5b3/neutron-metadata-edpm-deployment-openstack-edpm-ipam/0.log" Oct 01 17:17:40 crc kubenswrapper[4869]: I1001 17:17:40.608347 4869 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_neutron-96b4fb6d7-g25hj_e2f39f35-09b7-4953-b846-2cba520d5325/neutron-api/0.log" Oct 01 17:17:44 crc kubenswrapper[4869]: I1001 17:17:44.726197 4869 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-api-0_2eb2029a-beab-4fec-8235-eb5cdfd2ff1a/nova-api-log/0.log" Oct 01 17:17:45 crc kubenswrapper[4869]: I1001 17:17:45.425224 4869 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-api-0_2eb2029a-beab-4fec-8235-eb5cdfd2ff1a/nova-api-api/0.log" Oct 01 17:17:45 crc kubenswrapper[4869]: I1001 17:17:45.775179 4869 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-cell0-conductor-0_ec5954ef-7c3a-46ad-b42c-f2fa802fedcf/nova-cell0-conductor-conductor/0.log" Oct 01 17:17:45 crc kubenswrapper[4869]: I1001 17:17:45.951334 4869 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-cell1-conductor-0_f6ff894f-b32f-46d8-9d9c-79c08c97478c/nova-cell1-conductor-conductor/0.log" Oct 01 17:17:46 crc kubenswrapper[4869]: I1001 17:17:46.283573 4869 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-cell1-novncproxy-0_c57e77ab-553d-4e64-b104-0a3c434d680b/nova-cell1-novncproxy-novncproxy/0.log" Oct 01 17:17:46 crc kubenswrapper[4869]: I1001 17:17:46.510920 4869 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-kkddc_91f3b9d8-e4a2-4c04-978d-e43153d4af93/nova-custom-ceph-edpm-deployment-openstack-edpm-ipam/0.log" Oct 01 17:17:46 crc kubenswrapper[4869]: I1001 17:17:46.580740 4869 scope.go:117] "RemoveContainer" containerID="1c17c6f6d4b5b7ea03383059037e4e76a83710badfded0c48fa96960660db929" Oct 01 17:17:46 crc kubenswrapper[4869]: I1001 17:17:46.845797 4869 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-metadata-0_448e4afc-2c6f-48b3-8a80-cd7c5bfbabc2/nova-metadata-log/0.log" Oct 01 17:17:47 crc kubenswrapper[4869]: I1001 17:17:47.247450 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-c86m8" event={"ID":"a4b64f7f-0b03-4f47-965b-9fde048b735c","Type":"ContainerStarted","Data":"244ffa7b3483f330573b185ecbcd30b8ee2fd9a9414df3e81c3e654d5578f87a"} Oct 01 17:17:47 crc kubenswrapper[4869]: I1001 17:17:47.271984 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-must-gather-w7d9w/crc-debug-7brtj" podStartSLOduration=69.800227255 podStartE2EDuration="1m22.271957948s" podCreationTimestamp="2025-10-01 17:16:25 +0000 UTC" firstStartedPulling="2025-10-01 17:16:26.183043 +0000 UTC m=+7895.329886116" lastFinishedPulling="2025-10-01 17:16:38.654773693 +0000 UTC m=+7907.801616809" observedRunningTime="2025-10-01 17:16:39.590333787 +0000 UTC m=+7908.737176923" watchObservedRunningTime="2025-10-01 17:17:47.271957948 +0000 UTC m=+7976.418801084" Oct 01 17:17:48 crc kubenswrapper[4869]: I1001 17:17:48.606952 4869 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-scheduler-0_f1cbcc35-99b2-4580-80c2-727dc1cb96d7/nova-scheduler-scheduler/0.log" Oct 01 17:17:49 crc kubenswrapper[4869]: I1001 17:17:49.140775 4869 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-cell1-galera-0_6c2b4697-41ce-422e-8602-a1c0190745df/mysql-bootstrap/0.log" Oct 01 17:17:49 crc kubenswrapper[4869]: I1001 17:17:49.437428 4869 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-cell1-galera-0_6c2b4697-41ce-422e-8602-a1c0190745df/mysql-bootstrap/0.log" Oct 01 17:17:49 crc kubenswrapper[4869]: E1001 17:17:49.586019 4869 kubelet_pods.go:538] "Hostname for pod was too long, truncated it" podName="test-operator-logs-pod-horizontest-horizontest-tests-horizontest" hostnameMaxLen=63 truncatedHostname="test-operator-logs-pod-horizontest-horizontest-tests-horizontes" Oct 01 17:17:49 crc kubenswrapper[4869]: I1001 17:17:49.722215 4869 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-cell1-galera-0_6c2b4697-41ce-422e-8602-a1c0190745df/galera/0.log" Oct 01 17:17:50 crc kubenswrapper[4869]: I1001 17:17:50.270123 4869 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-galera-0_7980b3ae-7c3f-42ad-a9e4-9bfb05cb934a/mysql-bootstrap/0.log" Oct 01 17:17:50 crc kubenswrapper[4869]: I1001 17:17:50.503704 4869 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-galera-0_7980b3ae-7c3f-42ad-a9e4-9bfb05cb934a/mysql-bootstrap/0.log" Oct 01 17:17:50 crc kubenswrapper[4869]: I1001 17:17:50.781642 4869 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-galera-0_7980b3ae-7c3f-42ad-a9e4-9bfb05cb934a/galera/0.log" Oct 01 17:17:50 crc kubenswrapper[4869]: I1001 17:17:50.994797 4869 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-metadata-0_448e4afc-2c6f-48b3-8a80-cd7c5bfbabc2/nova-metadata-metadata/0.log" Oct 01 17:17:51 crc kubenswrapper[4869]: I1001 17:17:51.243866 4869 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstackclient_a25ceadd-9d07-4575-83d0-44bd065cca59/openstackclient/0.log" Oct 01 17:17:51 crc kubenswrapper[4869]: I1001 17:17:51.503633 4869 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-8xxqn_fce7eb65-0111-43b3-9265-700c584695fa/ovn-controller/0.log" Oct 01 17:17:51 crc kubenswrapper[4869]: I1001 17:17:51.708857 4869 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-metrics-rn9pl_2d3dbd60-2b6c-431d-a4ce-3d8d606ee5e9/openstack-network-exporter/0.log" Oct 01 17:17:51 crc kubenswrapper[4869]: I1001 17:17:51.884456 4869 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-jhxfd_01ddd573-45b0-4379-8b00-fa92a1da0ec1/ovsdb-server-init/0.log" Oct 01 17:17:52 crc kubenswrapper[4869]: I1001 17:17:52.115327 4869 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-jhxfd_01ddd573-45b0-4379-8b00-fa92a1da0ec1/ovsdb-server-init/0.log" Oct 01 17:17:52 crc kubenswrapper[4869]: I1001 17:17:52.248170 4869 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-jhxfd_01ddd573-45b0-4379-8b00-fa92a1da0ec1/ovs-vswitchd/0.log" Oct 01 17:17:52 crc kubenswrapper[4869]: I1001 17:17:52.282482 4869 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-jhxfd_01ddd573-45b0-4379-8b00-fa92a1da0ec1/ovsdb-server/0.log" Oct 01 17:17:52 crc kubenswrapper[4869]: I1001 17:17:52.539537 4869 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-edpm-deployment-openstack-edpm-ipam-tx4s2_f72fd252-cf04-4a98-831e-0424d9f38724/ovn-edpm-deployment-openstack-edpm-ipam/0.log" Oct 01 17:17:52 crc kubenswrapper[4869]: I1001 17:17:52.788674 4869 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-northd-0_9941e8c7-ec7d-4385-bae8-fd5fc9250689/openstack-network-exporter/0.log" Oct 01 17:17:52 crc kubenswrapper[4869]: I1001 17:17:52.966235 4869 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-northd-0_9941e8c7-ec7d-4385-bae8-fd5fc9250689/ovn-northd/0.log" Oct 01 17:17:53 crc kubenswrapper[4869]: I1001 17:17:53.129855 4869 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-nb-0_5733eb7d-fef5-44fd-8d97-68660485d909/openstack-network-exporter/0.log" Oct 01 17:17:53 crc kubenswrapper[4869]: I1001 17:17:53.426809 4869 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-nb-0_5733eb7d-fef5-44fd-8d97-68660485d909/ovsdbserver-nb/0.log" Oct 01 17:17:53 crc kubenswrapper[4869]: I1001 17:17:53.524790 4869 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-sb-0_b4bcb7f8-7b79-424f-9b67-e341b25a5ac1/openstack-network-exporter/0.log" Oct 01 17:17:53 crc kubenswrapper[4869]: I1001 17:17:53.636396 4869 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-sb-0_b4bcb7f8-7b79-424f-9b67-e341b25a5ac1/ovsdbserver-sb/0.log" Oct 01 17:17:54 crc kubenswrapper[4869]: I1001 17:17:54.593389 4869 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_placement-8454446974-2h6ft_77f02e38-9109-4d44-b448-b29e38a252d1/placement-api/0.log" Oct 01 17:17:54 crc kubenswrapper[4869]: I1001 17:17:54.986618 4869 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_placement-8454446974-2h6ft_77f02e38-9109-4d44-b448-b29e38a252d1/placement-log/0.log" Oct 01 17:17:54 crc kubenswrapper[4869]: I1001 17:17:54.994160 4869 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-cell1-server-0_70946011-083d-41f8-acf9-ab0c4711b48b/setup-container/0.log" Oct 01 17:17:55 crc kubenswrapper[4869]: I1001 17:17:55.187914 4869 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-cell1-server-0_70946011-083d-41f8-acf9-ab0c4711b48b/setup-container/0.log" Oct 01 17:17:55 crc kubenswrapper[4869]: I1001 17:17:55.205562 4869 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-cell1-server-0_70946011-083d-41f8-acf9-ab0c4711b48b/rabbitmq/0.log" Oct 01 17:17:55 crc kubenswrapper[4869]: I1001 17:17:55.383588 4869 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-server-0_e2d4e177-3ee6-497a-a0c6-db9305809a81/setup-container/0.log" Oct 01 17:17:55 crc kubenswrapper[4869]: I1001 17:17:55.606608 4869 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_memcached-0_05395c1c-e984-4b4c-abb8-6309d6a961da/memcached/0.log" Oct 01 17:17:55 crc kubenswrapper[4869]: I1001 17:17:55.650354 4869 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-server-0_e2d4e177-3ee6-497a-a0c6-db9305809a81/rabbitmq/0.log" Oct 01 17:17:55 crc kubenswrapper[4869]: I1001 17:17:55.656126 4869 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-server-0_e2d4e177-3ee6-497a-a0c6-db9305809a81/setup-container/0.log" Oct 01 17:17:55 crc kubenswrapper[4869]: I1001 17:17:55.814404 4869 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_reboot-os-edpm-deployment-openstack-edpm-ipam-6l9z8_e438986b-3250-42b7-a8d6-910aae80e576/reboot-os-edpm-deployment-openstack-edpm-ipam/0.log" Oct 01 17:17:56 crc kubenswrapper[4869]: I1001 17:17:56.001177 4869 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_repo-setup-edpm-deployment-openstack-edpm-ipam-qz9pw_b943448c-de46-41bc-a516-1364799a4eba/repo-setup-edpm-deployment-openstack-edpm-ipam/0.log" Oct 01 17:17:56 crc kubenswrapper[4869]: I1001 17:17:56.155683 4869 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_run-os-edpm-deployment-openstack-edpm-ipam-gv92z_b61ff6db-ea0f-458d-a0cf-8f65e610a90e/run-os-edpm-deployment-openstack-edpm-ipam/0.log" Oct 01 17:17:56 crc kubenswrapper[4869]: I1001 17:17:56.199195 4869 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ssh-known-hosts-edpm-deployment-jtzdt_66ab3eff-a1be-4cee-b460-39ac1d384491/ssh-known-hosts-edpm-deployment/0.log" Oct 01 17:17:56 crc kubenswrapper[4869]: I1001 17:17:56.396479 4869 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_tempest-tests-tempest-s00-full_78543bf0-aa4b-45dc-a7c6-37a22a5be6ea/tempest-tests-tempest-tests-runner/0.log" Oct 01 17:17:56 crc kubenswrapper[4869]: I1001 17:17:56.574287 4869 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_tempest-tests-tempest-s01-single-test_26dfe6ee-15f8-424e-b16a-58a57d5bc4f8/tempest-tests-tempest-tests-runner/0.log" Oct 01 17:17:56 crc kubenswrapper[4869]: I1001 17:17:56.610383 4869 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_test-operator-logs-pod-ansibletest-ansibletest-ansibletest_e17c4e7e-6bf5-4519-8b1a-c5f39ebc5ccb/test-operator-logs-container/0.log" Oct 01 17:17:56 crc kubenswrapper[4869]: I1001 17:17:56.728969 4869 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_test-operator-logs-pod-horizontest-horizontest-tests-horizontest_5e00cfe0-d068-4ef3-9f8c-b50b2b1045f5/test-operator-logs-container/0.log" Oct 01 17:17:56 crc kubenswrapper[4869]: I1001 17:17:56.906432 4869 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_test-operator-logs-pod-tempest-tempest-tests-tempest_105ad49f-b7e5-40d7-a021-7eef559250ea/test-operator-logs-container/0.log" Oct 01 17:17:56 crc kubenswrapper[4869]: I1001 17:17:56.962957 4869 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_test-operator-logs-pod-tobiko-tobiko-tests-tobiko_fab05180-0ae2-40c0-afec-c925124a7d35/test-operator-logs-container/0.log" Oct 01 17:17:57 crc kubenswrapper[4869]: I1001 17:17:57.256991 4869 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_tobiko-tests-tobiko-s00-podified-functional_a1a92aa0-ac91-4391-949a-fd2bcfa3e714/tobiko-tests-tobiko/0.log" Oct 01 17:17:57 crc kubenswrapper[4869]: I1001 17:17:57.358198 4869 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_tobiko-tests-tobiko-s01-sanity_cc01041e-7f42-4831-bb4d-c663af563735/tobiko-tests-tobiko/0.log" Oct 01 17:17:57 crc kubenswrapper[4869]: I1001 17:17:57.484962 4869 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_validate-network-edpm-deployment-openstack-edpm-ipam-gthv5_76589bac-382f-430e-83f3-ff32e9634017/validate-network-edpm-deployment-openstack-edpm-ipam/0.log" Oct 01 17:19:13 crc kubenswrapper[4869]: I1001 17:19:13.197652 4869 generic.go:334] "Generic (PLEG): container finished" podID="5ae3cce1-42a4-4050-ad9f-1ef58e2e4d5c" containerID="6a4e5c535aef702a50f6ff6ea7f2fbde2651a1b487c1f73a04bd6f313bdc74db" exitCode=0 Oct 01 17:19:13 crc kubenswrapper[4869]: I1001 17:19:13.197706 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-w7d9w/crc-debug-7brtj" event={"ID":"5ae3cce1-42a4-4050-ad9f-1ef58e2e4d5c","Type":"ContainerDied","Data":"6a4e5c535aef702a50f6ff6ea7f2fbde2651a1b487c1f73a04bd6f313bdc74db"} Oct 01 17:19:14 crc kubenswrapper[4869]: I1001 17:19:14.338764 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-w7d9w/crc-debug-7brtj" Oct 01 17:19:14 crc kubenswrapper[4869]: I1001 17:19:14.389903 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-w7d9w/crc-debug-7brtj"] Oct 01 17:19:14 crc kubenswrapper[4869]: I1001 17:19:14.408600 4869 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-w7d9w/crc-debug-7brtj"] Oct 01 17:19:14 crc kubenswrapper[4869]: I1001 17:19:14.424502 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/5ae3cce1-42a4-4050-ad9f-1ef58e2e4d5c-host\") pod \"5ae3cce1-42a4-4050-ad9f-1ef58e2e4d5c\" (UID: \"5ae3cce1-42a4-4050-ad9f-1ef58e2e4d5c\") " Oct 01 17:19:14 crc kubenswrapper[4869]: I1001 17:19:14.424626 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-h7d8d\" (UniqueName: \"kubernetes.io/projected/5ae3cce1-42a4-4050-ad9f-1ef58e2e4d5c-kube-api-access-h7d8d\") pod \"5ae3cce1-42a4-4050-ad9f-1ef58e2e4d5c\" (UID: \"5ae3cce1-42a4-4050-ad9f-1ef58e2e4d5c\") " Oct 01 17:19:14 crc kubenswrapper[4869]: I1001 17:19:14.424619 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/5ae3cce1-42a4-4050-ad9f-1ef58e2e4d5c-host" (OuterVolumeSpecName: "host") pod "5ae3cce1-42a4-4050-ad9f-1ef58e2e4d5c" (UID: "5ae3cce1-42a4-4050-ad9f-1ef58e2e4d5c"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 01 17:19:14 crc kubenswrapper[4869]: I1001 17:19:14.425274 4869 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/5ae3cce1-42a4-4050-ad9f-1ef58e2e4d5c-host\") on node \"crc\" DevicePath \"\"" Oct 01 17:19:14 crc kubenswrapper[4869]: I1001 17:19:14.434605 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5ae3cce1-42a4-4050-ad9f-1ef58e2e4d5c-kube-api-access-h7d8d" (OuterVolumeSpecName: "kube-api-access-h7d8d") pod "5ae3cce1-42a4-4050-ad9f-1ef58e2e4d5c" (UID: "5ae3cce1-42a4-4050-ad9f-1ef58e2e4d5c"). InnerVolumeSpecName "kube-api-access-h7d8d". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 17:19:14 crc kubenswrapper[4869]: I1001 17:19:14.527099 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-h7d8d\" (UniqueName: \"kubernetes.io/projected/5ae3cce1-42a4-4050-ad9f-1ef58e2e4d5c-kube-api-access-h7d8d\") on node \"crc\" DevicePath \"\"" Oct 01 17:19:15 crc kubenswrapper[4869]: I1001 17:19:15.222372 4869 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="5953e247a9377621ff8cefcd610ed91ce7af54a422ee9d524150e21ac01bf1c7" Oct 01 17:19:15 crc kubenswrapper[4869]: I1001 17:19:15.222430 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-w7d9w/crc-debug-7brtj" Oct 01 17:19:15 crc kubenswrapper[4869]: I1001 17:19:15.524004 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-w7d9w/crc-debug-sd4s2"] Oct 01 17:19:15 crc kubenswrapper[4869]: E1001 17:19:15.524385 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5ae3cce1-42a4-4050-ad9f-1ef58e2e4d5c" containerName="container-00" Oct 01 17:19:15 crc kubenswrapper[4869]: I1001 17:19:15.524395 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="5ae3cce1-42a4-4050-ad9f-1ef58e2e4d5c" containerName="container-00" Oct 01 17:19:15 crc kubenswrapper[4869]: I1001 17:19:15.524585 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="5ae3cce1-42a4-4050-ad9f-1ef58e2e4d5c" containerName="container-00" Oct 01 17:19:15 crc kubenswrapper[4869]: I1001 17:19:15.525173 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-w7d9w/crc-debug-sd4s2" Oct 01 17:19:15 crc kubenswrapper[4869]: I1001 17:19:15.595371 4869 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5ae3cce1-42a4-4050-ad9f-1ef58e2e4d5c" path="/var/lib/kubelet/pods/5ae3cce1-42a4-4050-ad9f-1ef58e2e4d5c/volumes" Oct 01 17:19:15 crc kubenswrapper[4869]: I1001 17:19:15.649279 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zkclt\" (UniqueName: \"kubernetes.io/projected/0e734cfc-b1eb-4c2d-a4d2-f2da1cf3524a-kube-api-access-zkclt\") pod \"crc-debug-sd4s2\" (UID: \"0e734cfc-b1eb-4c2d-a4d2-f2da1cf3524a\") " pod="openshift-must-gather-w7d9w/crc-debug-sd4s2" Oct 01 17:19:15 crc kubenswrapper[4869]: I1001 17:19:15.649724 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/0e734cfc-b1eb-4c2d-a4d2-f2da1cf3524a-host\") pod \"crc-debug-sd4s2\" (UID: \"0e734cfc-b1eb-4c2d-a4d2-f2da1cf3524a\") " pod="openshift-must-gather-w7d9w/crc-debug-sd4s2" Oct 01 17:19:15 crc kubenswrapper[4869]: I1001 17:19:15.752613 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zkclt\" (UniqueName: \"kubernetes.io/projected/0e734cfc-b1eb-4c2d-a4d2-f2da1cf3524a-kube-api-access-zkclt\") pod \"crc-debug-sd4s2\" (UID: \"0e734cfc-b1eb-4c2d-a4d2-f2da1cf3524a\") " pod="openshift-must-gather-w7d9w/crc-debug-sd4s2" Oct 01 17:19:15 crc kubenswrapper[4869]: I1001 17:19:15.752876 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/0e734cfc-b1eb-4c2d-a4d2-f2da1cf3524a-host\") pod \"crc-debug-sd4s2\" (UID: \"0e734cfc-b1eb-4c2d-a4d2-f2da1cf3524a\") " pod="openshift-must-gather-w7d9w/crc-debug-sd4s2" Oct 01 17:19:15 crc kubenswrapper[4869]: I1001 17:19:15.752982 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/0e734cfc-b1eb-4c2d-a4d2-f2da1cf3524a-host\") pod \"crc-debug-sd4s2\" (UID: \"0e734cfc-b1eb-4c2d-a4d2-f2da1cf3524a\") " pod="openshift-must-gather-w7d9w/crc-debug-sd4s2" Oct 01 17:19:15 crc kubenswrapper[4869]: I1001 17:19:15.786043 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zkclt\" (UniqueName: \"kubernetes.io/projected/0e734cfc-b1eb-4c2d-a4d2-f2da1cf3524a-kube-api-access-zkclt\") pod \"crc-debug-sd4s2\" (UID: \"0e734cfc-b1eb-4c2d-a4d2-f2da1cf3524a\") " pod="openshift-must-gather-w7d9w/crc-debug-sd4s2" Oct 01 17:19:15 crc kubenswrapper[4869]: I1001 17:19:15.841563 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-w7d9w/crc-debug-sd4s2" Oct 01 17:19:16 crc kubenswrapper[4869]: I1001 17:19:16.234756 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-w7d9w/crc-debug-sd4s2" event={"ID":"0e734cfc-b1eb-4c2d-a4d2-f2da1cf3524a","Type":"ContainerStarted","Data":"b1cd448762592f4977a06bad3b54c4973f720e19257b3bbd411c858b42dad5ec"} Oct 01 17:19:16 crc kubenswrapper[4869]: I1001 17:19:16.235571 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-w7d9w/crc-debug-sd4s2" event={"ID":"0e734cfc-b1eb-4c2d-a4d2-f2da1cf3524a","Type":"ContainerStarted","Data":"0499640bd10875e9ae2e106ff10c419a16cab95de5f23736780bad890450dda2"} Oct 01 17:19:16 crc kubenswrapper[4869]: I1001 17:19:16.255150 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-must-gather-w7d9w/crc-debug-sd4s2" podStartSLOduration=1.2551314310000001 podStartE2EDuration="1.255131431s" podCreationTimestamp="2025-10-01 17:19:15 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-01 17:19:16.251855018 +0000 UTC m=+8065.398698134" watchObservedRunningTime="2025-10-01 17:19:16.255131431 +0000 UTC m=+8065.401974547" Oct 01 17:19:17 crc kubenswrapper[4869]: I1001 17:19:17.244406 4869 generic.go:334] "Generic (PLEG): container finished" podID="0e734cfc-b1eb-4c2d-a4d2-f2da1cf3524a" containerID="b1cd448762592f4977a06bad3b54c4973f720e19257b3bbd411c858b42dad5ec" exitCode=0 Oct 01 17:19:17 crc kubenswrapper[4869]: I1001 17:19:17.244464 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-w7d9w/crc-debug-sd4s2" event={"ID":"0e734cfc-b1eb-4c2d-a4d2-f2da1cf3524a","Type":"ContainerDied","Data":"b1cd448762592f4977a06bad3b54c4973f720e19257b3bbd411c858b42dad5ec"} Oct 01 17:19:18 crc kubenswrapper[4869]: I1001 17:19:18.363666 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-w7d9w/crc-debug-sd4s2" Oct 01 17:19:18 crc kubenswrapper[4869]: I1001 17:19:18.501519 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/0e734cfc-b1eb-4c2d-a4d2-f2da1cf3524a-host\") pod \"0e734cfc-b1eb-4c2d-a4d2-f2da1cf3524a\" (UID: \"0e734cfc-b1eb-4c2d-a4d2-f2da1cf3524a\") " Oct 01 17:19:18 crc kubenswrapper[4869]: I1001 17:19:18.501569 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zkclt\" (UniqueName: \"kubernetes.io/projected/0e734cfc-b1eb-4c2d-a4d2-f2da1cf3524a-kube-api-access-zkclt\") pod \"0e734cfc-b1eb-4c2d-a4d2-f2da1cf3524a\" (UID: \"0e734cfc-b1eb-4c2d-a4d2-f2da1cf3524a\") " Oct 01 17:19:18 crc kubenswrapper[4869]: I1001 17:19:18.501626 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/0e734cfc-b1eb-4c2d-a4d2-f2da1cf3524a-host" (OuterVolumeSpecName: "host") pod "0e734cfc-b1eb-4c2d-a4d2-f2da1cf3524a" (UID: "0e734cfc-b1eb-4c2d-a4d2-f2da1cf3524a"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 01 17:19:18 crc kubenswrapper[4869]: I1001 17:19:18.501992 4869 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/0e734cfc-b1eb-4c2d-a4d2-f2da1cf3524a-host\") on node \"crc\" DevicePath \"\"" Oct 01 17:19:18 crc kubenswrapper[4869]: I1001 17:19:18.509071 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0e734cfc-b1eb-4c2d-a4d2-f2da1cf3524a-kube-api-access-zkclt" (OuterVolumeSpecName: "kube-api-access-zkclt") pod "0e734cfc-b1eb-4c2d-a4d2-f2da1cf3524a" (UID: "0e734cfc-b1eb-4c2d-a4d2-f2da1cf3524a"). InnerVolumeSpecName "kube-api-access-zkclt". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 17:19:18 crc kubenswrapper[4869]: E1001 17:19:18.581298 4869 kubelet_pods.go:538] "Hostname for pod was too long, truncated it" podName="test-operator-logs-pod-horizontest-horizontest-tests-horizontest" hostnameMaxLen=63 truncatedHostname="test-operator-logs-pod-horizontest-horizontest-tests-horizontes" Oct 01 17:19:18 crc kubenswrapper[4869]: I1001 17:19:18.603460 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zkclt\" (UniqueName: \"kubernetes.io/projected/0e734cfc-b1eb-4c2d-a4d2-f2da1cf3524a-kube-api-access-zkclt\") on node \"crc\" DevicePath \"\"" Oct 01 17:19:19 crc kubenswrapper[4869]: I1001 17:19:19.262016 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-w7d9w/crc-debug-sd4s2" event={"ID":"0e734cfc-b1eb-4c2d-a4d2-f2da1cf3524a","Type":"ContainerDied","Data":"0499640bd10875e9ae2e106ff10c419a16cab95de5f23736780bad890450dda2"} Oct 01 17:19:19 crc kubenswrapper[4869]: I1001 17:19:19.262345 4869 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="0499640bd10875e9ae2e106ff10c419a16cab95de5f23736780bad890450dda2" Oct 01 17:19:19 crc kubenswrapper[4869]: I1001 17:19:19.262058 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-w7d9w/crc-debug-sd4s2" Oct 01 17:19:27 crc kubenswrapper[4869]: I1001 17:19:27.070243 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-w7d9w/crc-debug-sd4s2"] Oct 01 17:19:27 crc kubenswrapper[4869]: I1001 17:19:27.077801 4869 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-w7d9w/crc-debug-sd4s2"] Oct 01 17:19:27 crc kubenswrapper[4869]: I1001 17:19:27.596409 4869 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0e734cfc-b1eb-4c2d-a4d2-f2da1cf3524a" path="/var/lib/kubelet/pods/0e734cfc-b1eb-4c2d-a4d2-f2da1cf3524a/volumes" Oct 01 17:19:28 crc kubenswrapper[4869]: I1001 17:19:28.231884 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-w7d9w/crc-debug-rsvvj"] Oct 01 17:19:28 crc kubenswrapper[4869]: E1001 17:19:28.232319 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0e734cfc-b1eb-4c2d-a4d2-f2da1cf3524a" containerName="container-00" Oct 01 17:19:28 crc kubenswrapper[4869]: I1001 17:19:28.232331 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="0e734cfc-b1eb-4c2d-a4d2-f2da1cf3524a" containerName="container-00" Oct 01 17:19:28 crc kubenswrapper[4869]: I1001 17:19:28.232545 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="0e734cfc-b1eb-4c2d-a4d2-f2da1cf3524a" containerName="container-00" Oct 01 17:19:28 crc kubenswrapper[4869]: I1001 17:19:28.233149 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-w7d9w/crc-debug-rsvvj" Oct 01 17:19:28 crc kubenswrapper[4869]: I1001 17:19:28.275361 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/71bab402-5ec0-4ea8-904f-0574481967ed-host\") pod \"crc-debug-rsvvj\" (UID: \"71bab402-5ec0-4ea8-904f-0574481967ed\") " pod="openshift-must-gather-w7d9w/crc-debug-rsvvj" Oct 01 17:19:28 crc kubenswrapper[4869]: I1001 17:19:28.275696 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7z98q\" (UniqueName: \"kubernetes.io/projected/71bab402-5ec0-4ea8-904f-0574481967ed-kube-api-access-7z98q\") pod \"crc-debug-rsvvj\" (UID: \"71bab402-5ec0-4ea8-904f-0574481967ed\") " pod="openshift-must-gather-w7d9w/crc-debug-rsvvj" Oct 01 17:19:28 crc kubenswrapper[4869]: I1001 17:19:28.378008 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/71bab402-5ec0-4ea8-904f-0574481967ed-host\") pod \"crc-debug-rsvvj\" (UID: \"71bab402-5ec0-4ea8-904f-0574481967ed\") " pod="openshift-must-gather-w7d9w/crc-debug-rsvvj" Oct 01 17:19:28 crc kubenswrapper[4869]: I1001 17:19:28.378328 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7z98q\" (UniqueName: \"kubernetes.io/projected/71bab402-5ec0-4ea8-904f-0574481967ed-kube-api-access-7z98q\") pod \"crc-debug-rsvvj\" (UID: \"71bab402-5ec0-4ea8-904f-0574481967ed\") " pod="openshift-must-gather-w7d9w/crc-debug-rsvvj" Oct 01 17:19:28 crc kubenswrapper[4869]: I1001 17:19:28.378151 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/71bab402-5ec0-4ea8-904f-0574481967ed-host\") pod \"crc-debug-rsvvj\" (UID: \"71bab402-5ec0-4ea8-904f-0574481967ed\") " pod="openshift-must-gather-w7d9w/crc-debug-rsvvj" Oct 01 17:19:28 crc kubenswrapper[4869]: I1001 17:19:28.409508 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7z98q\" (UniqueName: \"kubernetes.io/projected/71bab402-5ec0-4ea8-904f-0574481967ed-kube-api-access-7z98q\") pod \"crc-debug-rsvvj\" (UID: \"71bab402-5ec0-4ea8-904f-0574481967ed\") " pod="openshift-must-gather-w7d9w/crc-debug-rsvvj" Oct 01 17:19:28 crc kubenswrapper[4869]: I1001 17:19:28.556580 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-w7d9w/crc-debug-rsvvj" Oct 01 17:19:29 crc kubenswrapper[4869]: I1001 17:19:29.345755 4869 generic.go:334] "Generic (PLEG): container finished" podID="71bab402-5ec0-4ea8-904f-0574481967ed" containerID="0f293c8ff4fcde6ebf1368d913052258cb1219127b923a5578e004551314c8a4" exitCode=0 Oct 01 17:19:29 crc kubenswrapper[4869]: I1001 17:19:29.345961 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-w7d9w/crc-debug-rsvvj" event={"ID":"71bab402-5ec0-4ea8-904f-0574481967ed","Type":"ContainerDied","Data":"0f293c8ff4fcde6ebf1368d913052258cb1219127b923a5578e004551314c8a4"} Oct 01 17:19:29 crc kubenswrapper[4869]: I1001 17:19:29.346094 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-w7d9w/crc-debug-rsvvj" event={"ID":"71bab402-5ec0-4ea8-904f-0574481967ed","Type":"ContainerStarted","Data":"6fb23c5bf1ae9faf1c2be6898b73887b9bff6719d5a827563f0fcd6458c6e7f3"} Oct 01 17:19:29 crc kubenswrapper[4869]: I1001 17:19:29.386508 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-w7d9w/crc-debug-rsvvj"] Oct 01 17:19:29 crc kubenswrapper[4869]: I1001 17:19:29.394299 4869 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-w7d9w/crc-debug-rsvvj"] Oct 01 17:19:30 crc kubenswrapper[4869]: I1001 17:19:30.486743 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-w7d9w/crc-debug-rsvvj" Oct 01 17:19:30 crc kubenswrapper[4869]: I1001 17:19:30.520949 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/71bab402-5ec0-4ea8-904f-0574481967ed-host\") pod \"71bab402-5ec0-4ea8-904f-0574481967ed\" (UID: \"71bab402-5ec0-4ea8-904f-0574481967ed\") " Oct 01 17:19:30 crc kubenswrapper[4869]: I1001 17:19:30.521090 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/71bab402-5ec0-4ea8-904f-0574481967ed-host" (OuterVolumeSpecName: "host") pod "71bab402-5ec0-4ea8-904f-0574481967ed" (UID: "71bab402-5ec0-4ea8-904f-0574481967ed"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 01 17:19:30 crc kubenswrapper[4869]: I1001 17:19:30.521191 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7z98q\" (UniqueName: \"kubernetes.io/projected/71bab402-5ec0-4ea8-904f-0574481967ed-kube-api-access-7z98q\") pod \"71bab402-5ec0-4ea8-904f-0574481967ed\" (UID: \"71bab402-5ec0-4ea8-904f-0574481967ed\") " Oct 01 17:19:30 crc kubenswrapper[4869]: I1001 17:19:30.521834 4869 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/71bab402-5ec0-4ea8-904f-0574481967ed-host\") on node \"crc\" DevicePath \"\"" Oct 01 17:19:30 crc kubenswrapper[4869]: I1001 17:19:30.530078 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/71bab402-5ec0-4ea8-904f-0574481967ed-kube-api-access-7z98q" (OuterVolumeSpecName: "kube-api-access-7z98q") pod "71bab402-5ec0-4ea8-904f-0574481967ed" (UID: "71bab402-5ec0-4ea8-904f-0574481967ed"). InnerVolumeSpecName "kube-api-access-7z98q". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 17:19:30 crc kubenswrapper[4869]: I1001 17:19:30.623824 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7z98q\" (UniqueName: \"kubernetes.io/projected/71bab402-5ec0-4ea8-904f-0574481967ed-kube-api-access-7z98q\") on node \"crc\" DevicePath \"\"" Oct 01 17:19:30 crc kubenswrapper[4869]: I1001 17:19:30.978079 4869 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_0e56acba034a4fe5e21c87878f7cabfd0ead2befdef111b141ca11c71a867s4_e6a47fc5-70fe-4006-a6a2-bca9667eec47/util/0.log" Oct 01 17:19:31 crc kubenswrapper[4869]: I1001 17:19:31.175300 4869 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_0e56acba034a4fe5e21c87878f7cabfd0ead2befdef111b141ca11c71a867s4_e6a47fc5-70fe-4006-a6a2-bca9667eec47/pull/0.log" Oct 01 17:19:31 crc kubenswrapper[4869]: I1001 17:19:31.186666 4869 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_0e56acba034a4fe5e21c87878f7cabfd0ead2befdef111b141ca11c71a867s4_e6a47fc5-70fe-4006-a6a2-bca9667eec47/pull/0.log" Oct 01 17:19:31 crc kubenswrapper[4869]: I1001 17:19:31.191999 4869 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_0e56acba034a4fe5e21c87878f7cabfd0ead2befdef111b141ca11c71a867s4_e6a47fc5-70fe-4006-a6a2-bca9667eec47/util/0.log" Oct 01 17:19:31 crc kubenswrapper[4869]: I1001 17:19:31.346388 4869 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_0e56acba034a4fe5e21c87878f7cabfd0ead2befdef111b141ca11c71a867s4_e6a47fc5-70fe-4006-a6a2-bca9667eec47/util/0.log" Oct 01 17:19:31 crc kubenswrapper[4869]: I1001 17:19:31.365936 4869 scope.go:117] "RemoveContainer" containerID="0f293c8ff4fcde6ebf1368d913052258cb1219127b923a5578e004551314c8a4" Oct 01 17:19:31 crc kubenswrapper[4869]: I1001 17:19:31.366109 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-w7d9w/crc-debug-rsvvj" Oct 01 17:19:31 crc kubenswrapper[4869]: I1001 17:19:31.435495 4869 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_0e56acba034a4fe5e21c87878f7cabfd0ead2befdef111b141ca11c71a867s4_e6a47fc5-70fe-4006-a6a2-bca9667eec47/extract/0.log" Oct 01 17:19:31 crc kubenswrapper[4869]: I1001 17:19:31.559601 4869 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_0e56acba034a4fe5e21c87878f7cabfd0ead2befdef111b141ca11c71a867s4_e6a47fc5-70fe-4006-a6a2-bca9667eec47/pull/0.log" Oct 01 17:19:31 crc kubenswrapper[4869]: I1001 17:19:31.574821 4869 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_barbican-operator-controller-manager-f7f98cb69-79zfb_62fc6158-1408-44bd-891b-ef7ead1f5867/kube-rbac-proxy/0.log" Oct 01 17:19:31 crc kubenswrapper[4869]: I1001 17:19:31.591915 4869 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="71bab402-5ec0-4ea8-904f-0574481967ed" path="/var/lib/kubelet/pods/71bab402-5ec0-4ea8-904f-0574481967ed/volumes" Oct 01 17:19:31 crc kubenswrapper[4869]: I1001 17:19:31.692993 4869 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_barbican-operator-controller-manager-f7f98cb69-79zfb_62fc6158-1408-44bd-891b-ef7ead1f5867/manager/0.log" Oct 01 17:19:31 crc kubenswrapper[4869]: I1001 17:19:31.758985 4869 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_cinder-operator-controller-manager-859cd486d-s8dwj_3e866523-b046-49e3-88f5-1c657a204a14/kube-rbac-proxy/0.log" Oct 01 17:19:31 crc kubenswrapper[4869]: I1001 17:19:31.843617 4869 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_cinder-operator-controller-manager-859cd486d-s8dwj_3e866523-b046-49e3-88f5-1c657a204a14/manager/0.log" Oct 01 17:19:31 crc kubenswrapper[4869]: I1001 17:19:31.903804 4869 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_designate-operator-controller-manager-77fb7bcf5b-7pl4z_c7cb96bc-1269-4c0c-b3f3-1575ee10543e/kube-rbac-proxy/0.log" Oct 01 17:19:31 crc kubenswrapper[4869]: I1001 17:19:31.947356 4869 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_designate-operator-controller-manager-77fb7bcf5b-7pl4z_c7cb96bc-1269-4c0c-b3f3-1575ee10543e/manager/0.log" Oct 01 17:19:32 crc kubenswrapper[4869]: I1001 17:19:32.045518 4869 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_glance-operator-controller-manager-8bc4775b5-wmx8b_ee8b5119-5b8d-494f-8864-8f0cf2a10631/kube-rbac-proxy/0.log" Oct 01 17:19:32 crc kubenswrapper[4869]: I1001 17:19:32.124330 4869 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_glance-operator-controller-manager-8bc4775b5-wmx8b_ee8b5119-5b8d-494f-8864-8f0cf2a10631/manager/0.log" Oct 01 17:19:32 crc kubenswrapper[4869]: I1001 17:19:32.177117 4869 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_heat-operator-controller-manager-5b4fc86755-g27k4_a67caefc-004c-4cd3-92b1-191f9531044a/kube-rbac-proxy/0.log" Oct 01 17:19:32 crc kubenswrapper[4869]: I1001 17:19:32.247410 4869 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_heat-operator-controller-manager-5b4fc86755-g27k4_a67caefc-004c-4cd3-92b1-191f9531044a/manager/0.log" Oct 01 17:19:32 crc kubenswrapper[4869]: I1001 17:19:32.348532 4869 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_horizon-operator-controller-manager-679b4759bb-mrrm6_1f43a837-ea14-4bdb-9b91-ffbd20f1bad3/kube-rbac-proxy/0.log" Oct 01 17:19:32 crc kubenswrapper[4869]: I1001 17:19:32.370902 4869 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_horizon-operator-controller-manager-679b4759bb-mrrm6_1f43a837-ea14-4bdb-9b91-ffbd20f1bad3/manager/0.log" Oct 01 17:19:32 crc kubenswrapper[4869]: I1001 17:19:32.513840 4869 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_infra-operator-controller-manager-5c8fdc4d5c-pd6wq_ccda625c-cf12-415b-9a87-dd77a4c0fa1b/kube-rbac-proxy/0.log" Oct 01 17:19:32 crc kubenswrapper[4869]: I1001 17:19:32.665732 4869 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ironic-operator-controller-manager-5f45cd594f-mh4jm_d7e4e3d7-4b52-46b2-8097-b00b4de3b87a/kube-rbac-proxy/0.log" Oct 01 17:19:32 crc kubenswrapper[4869]: I1001 17:19:32.706819 4869 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_infra-operator-controller-manager-5c8fdc4d5c-pd6wq_ccda625c-cf12-415b-9a87-dd77a4c0fa1b/manager/0.log" Oct 01 17:19:32 crc kubenswrapper[4869]: I1001 17:19:32.718544 4869 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ironic-operator-controller-manager-5f45cd594f-mh4jm_d7e4e3d7-4b52-46b2-8097-b00b4de3b87a/manager/0.log" Oct 01 17:19:32 crc kubenswrapper[4869]: I1001 17:19:32.863589 4869 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_keystone-operator-controller-manager-59d7dc95cf-mqdq6_e1f0cffe-e44a-432b-bd66-03be980080b2/kube-rbac-proxy/0.log" Oct 01 17:19:32 crc kubenswrapper[4869]: I1001 17:19:32.938852 4869 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_keystone-operator-controller-manager-59d7dc95cf-mqdq6_e1f0cffe-e44a-432b-bd66-03be980080b2/manager/0.log" Oct 01 17:19:33 crc kubenswrapper[4869]: I1001 17:19:33.003090 4869 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_manila-operator-controller-manager-b7cf8cb5f-6clgt_e51d646d-01fe-48e5-af48-29db1e16c849/kube-rbac-proxy/0.log" Oct 01 17:19:33 crc kubenswrapper[4869]: I1001 17:19:33.105319 4869 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_manila-operator-controller-manager-b7cf8cb5f-6clgt_e51d646d-01fe-48e5-af48-29db1e16c849/manager/0.log" Oct 01 17:19:33 crc kubenswrapper[4869]: I1001 17:19:33.125161 4869 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_mariadb-operator-controller-manager-67bf5bb885-fv6pk_1ee62730-fdac-40bd-b923-d5544be938e1/kube-rbac-proxy/0.log" Oct 01 17:19:33 crc kubenswrapper[4869]: I1001 17:19:33.247844 4869 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_mariadb-operator-controller-manager-67bf5bb885-fv6pk_1ee62730-fdac-40bd-b923-d5544be938e1/manager/0.log" Oct 01 17:19:33 crc kubenswrapper[4869]: I1001 17:19:33.316059 4869 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_neutron-operator-controller-manager-54fbbfcd44-j6zn8_cbe9be45-3694-4fe5-ae10-c03fbd176bbc/kube-rbac-proxy/0.log" Oct 01 17:19:33 crc kubenswrapper[4869]: I1001 17:19:33.362928 4869 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_neutron-operator-controller-manager-54fbbfcd44-j6zn8_cbe9be45-3694-4fe5-ae10-c03fbd176bbc/manager/0.log" Oct 01 17:19:33 crc kubenswrapper[4869]: I1001 17:19:33.512585 4869 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_nova-operator-controller-manager-7fd5b6bbc6-5tcmh_0d140b62-4c85-405b-9eff-8dc02ad9e2ed/kube-rbac-proxy/0.log" Oct 01 17:19:33 crc kubenswrapper[4869]: I1001 17:19:33.571638 4869 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_nova-operator-controller-manager-7fd5b6bbc6-5tcmh_0d140b62-4c85-405b-9eff-8dc02ad9e2ed/manager/0.log" Oct 01 17:19:33 crc kubenswrapper[4869]: I1001 17:19:33.625500 4869 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_octavia-operator-controller-manager-75f8d67d86-42l4j_64c75872-eff8-45af-bf14-88b5896489ee/kube-rbac-proxy/0.log" Oct 01 17:19:33 crc kubenswrapper[4869]: I1001 17:19:33.752833 4869 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_octavia-operator-controller-manager-75f8d67d86-42l4j_64c75872-eff8-45af-bf14-88b5896489ee/manager/0.log" Oct 01 17:19:33 crc kubenswrapper[4869]: I1001 17:19:33.767737 4869 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-baremetal-operator-controller-manager-659bb84579nbl75_14d20f5e-b22a-4c46-8712-f65e973ee387/kube-rbac-proxy/0.log" Oct 01 17:19:33 crc kubenswrapper[4869]: I1001 17:19:33.831319 4869 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-baremetal-operator-controller-manager-659bb84579nbl75_14d20f5e-b22a-4c46-8712-f65e973ee387/manager/0.log" Oct 01 17:19:34 crc kubenswrapper[4869]: I1001 17:19:34.040438 4869 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-controller-manager-6c7b6bcb7c-hnlxr_3f500075-2bfe-440a-856c-976d2404158f/kube-rbac-proxy/0.log" Oct 01 17:19:34 crc kubenswrapper[4869]: I1001 17:19:34.186363 4869 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-controller-operator-6b47f79668-mq25c_4fdc8588-1f96-4122-ba09-bda7cc861582/kube-rbac-proxy/0.log" Oct 01 17:19:34 crc kubenswrapper[4869]: I1001 17:19:34.240894 4869 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-controller-operator-6b47f79668-mq25c_4fdc8588-1f96-4122-ba09-bda7cc861582/operator/0.log" Oct 01 17:19:34 crc kubenswrapper[4869]: I1001 17:19:34.424533 4869 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ovn-operator-controller-manager-84c745747f-gv26d_bcc95d48-0c42-425f-97de-90db5f8d02c8/kube-rbac-proxy/0.log" Oct 01 17:19:34 crc kubenswrapper[4869]: I1001 17:19:34.536887 4869 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-index-t6m4z_ec4e2c58-823d-4a10-a96e-5e4a1a8955f2/registry-server/0.log" Oct 01 17:19:34 crc kubenswrapper[4869]: I1001 17:19:34.557459 4869 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ovn-operator-controller-manager-84c745747f-gv26d_bcc95d48-0c42-425f-97de-90db5f8d02c8/manager/0.log" Oct 01 17:19:34 crc kubenswrapper[4869]: I1001 17:19:34.626157 4869 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_placement-operator-controller-manager-598c4c8547-lq48f_c74d5ad2-5385-45ee-af5a-db7c45af2bef/kube-rbac-proxy/0.log" Oct 01 17:19:34 crc kubenswrapper[4869]: I1001 17:19:34.804213 4869 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_placement-operator-controller-manager-598c4c8547-lq48f_c74d5ad2-5385-45ee-af5a-db7c45af2bef/manager/0.log" Oct 01 17:19:34 crc kubenswrapper[4869]: I1001 17:19:34.937744 4869 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_rabbitmq-cluster-operator-manager-5f97d8c699-6mcvk_34b12def-835e-430c-9e9a-29f191900a00/operator/0.log" Oct 01 17:19:35 crc kubenswrapper[4869]: I1001 17:19:35.001397 4869 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_swift-operator-controller-manager-689b4f76c9-7fhd4_a49df948-6460-4d87-82d4-f65bf570cb7b/manager/0.log" Oct 01 17:19:35 crc kubenswrapper[4869]: I1001 17:19:35.014312 4869 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_swift-operator-controller-manager-689b4f76c9-7fhd4_a49df948-6460-4d87-82d4-f65bf570cb7b/kube-rbac-proxy/0.log" Oct 01 17:19:35 crc kubenswrapper[4869]: I1001 17:19:35.205639 4869 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_telemetry-operator-controller-manager-cb66d6b59-r69lh_4d9e486b-4e16-422f-b594-f6e6bf76c569/kube-rbac-proxy/0.log" Oct 01 17:19:35 crc kubenswrapper[4869]: I1001 17:19:35.246920 4869 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_telemetry-operator-controller-manager-cb66d6b59-r69lh_4d9e486b-4e16-422f-b594-f6e6bf76c569/manager/0.log" Oct 01 17:19:35 crc kubenswrapper[4869]: I1001 17:19:35.256432 4869 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-controller-manager-6c7b6bcb7c-hnlxr_3f500075-2bfe-440a-856c-976d2404158f/manager/0.log" Oct 01 17:19:35 crc kubenswrapper[4869]: I1001 17:19:35.328868 4869 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_test-operator-controller-manager-788d856b94-q2j77_15d016d4-52ca-4f58-8ccc-388d070c739c/kube-rbac-proxy/0.log" Oct 01 17:19:35 crc kubenswrapper[4869]: I1001 17:19:35.391934 4869 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_test-operator-controller-manager-788d856b94-q2j77_15d016d4-52ca-4f58-8ccc-388d070c739c/manager/0.log" Oct 01 17:19:35 crc kubenswrapper[4869]: I1001 17:19:35.441178 4869 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_watcher-operator-controller-manager-68d7bc5569-9wzmb_22cc979e-ec88-4ed2-bed9-fe4e685cae46/kube-rbac-proxy/0.log" Oct 01 17:19:35 crc kubenswrapper[4869]: I1001 17:19:35.495885 4869 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_watcher-operator-controller-manager-68d7bc5569-9wzmb_22cc979e-ec88-4ed2-bed9-fe4e685cae46/manager/0.log" Oct 01 17:19:50 crc kubenswrapper[4869]: I1001 17:19:50.643463 4869 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_control-plane-machine-set-operator-78cbb6b69f-vkxf8_2ec0b99f-cb20-410b-8142-a3d046ed6578/control-plane-machine-set-operator/0.log" Oct 01 17:19:50 crc kubenswrapper[4869]: I1001 17:19:50.820033 4869 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_machine-api-operator-5694c8668f-jdrcg_2b6c50d9-ffec-4ecb-914e-c683a2d3b9e4/kube-rbac-proxy/0.log" Oct 01 17:19:50 crc kubenswrapper[4869]: I1001 17:19:50.871143 4869 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_machine-api-operator-5694c8668f-jdrcg_2b6c50d9-ffec-4ecb-914e-c683a2d3b9e4/machine-api-operator/0.log" Oct 01 17:20:02 crc kubenswrapper[4869]: I1001 17:20:02.683122 4869 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-5b446d88c5-s86t5_d8389967-5803-4508-8abf-572bb2024d84/cert-manager-controller/0.log" Oct 01 17:20:02 crc kubenswrapper[4869]: I1001 17:20:02.852780 4869 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-cainjector-7f985d654d-zjw2v_84af5e11-443d-4eae-b1cb-70397019f8f5/cert-manager-cainjector/0.log" Oct 01 17:20:02 crc kubenswrapper[4869]: I1001 17:20:02.896574 4869 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-webhook-5655c58dd6-jpszr_ccc7b391-9817-440d-a687-496243444ae9/cert-manager-webhook/0.log" Oct 01 17:20:13 crc kubenswrapper[4869]: I1001 17:20:13.354338 4869 patch_prober.go:28] interesting pod/machine-config-daemon-c86m8 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 01 17:20:13 crc kubenswrapper[4869]: I1001 17:20:13.355035 4869 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-c86m8" podUID="a4b64f7f-0b03-4f47-965b-9fde048b735c" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 01 17:20:14 crc kubenswrapper[4869]: I1001 17:20:14.413864 4869 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-console-plugin-6b874cbd85-2mvc5_a8204861-b466-42de-bda3-448b67dc02f2/nmstate-console-plugin/0.log" Oct 01 17:20:14 crc kubenswrapper[4869]: I1001 17:20:14.591060 4869 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-metrics-fdff9cb8d-n4x26_77a31948-6e77-47cd-b110-fa1af1087629/kube-rbac-proxy/0.log" Oct 01 17:20:14 crc kubenswrapper[4869]: I1001 17:20:14.617100 4869 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-handler-p56ll_fb760180-2040-4f8f-8a57-e8f2fdb6d1ed/nmstate-handler/0.log" Oct 01 17:20:14 crc kubenswrapper[4869]: I1001 17:20:14.632405 4869 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-metrics-fdff9cb8d-n4x26_77a31948-6e77-47cd-b110-fa1af1087629/nmstate-metrics/0.log" Oct 01 17:20:14 crc kubenswrapper[4869]: I1001 17:20:14.826893 4869 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-webhook-6cdbc54649-k6lhz_17e9486a-5271-47f0-9851-30b1c293f6c7/nmstate-webhook/0.log" Oct 01 17:20:14 crc kubenswrapper[4869]: I1001 17:20:14.827178 4869 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-operator-858ddd8f98-m2cf8_f9bf39e5-d129-4dc7-881e-3a312469e6f9/nmstate-operator/0.log" Oct 01 17:20:27 crc kubenswrapper[4869]: I1001 17:20:27.909608 4869 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_controller-68d546b9d8-l2jbk_18d835b6-473f-4ff7-9a54-bd4f280896c2/kube-rbac-proxy/0.log" Oct 01 17:20:27 crc kubenswrapper[4869]: I1001 17:20:27.972345 4869 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_controller-68d546b9d8-l2jbk_18d835b6-473f-4ff7-9a54-bd4f280896c2/controller/0.log" Oct 01 17:20:28 crc kubenswrapper[4869]: I1001 17:20:28.144092 4869 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-8795b_e748d49d-c9ae-445f-87d7-311d7ef79b37/cp-frr-files/0.log" Oct 01 17:20:28 crc kubenswrapper[4869]: I1001 17:20:28.289054 4869 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-8795b_e748d49d-c9ae-445f-87d7-311d7ef79b37/cp-metrics/0.log" Oct 01 17:20:28 crc kubenswrapper[4869]: I1001 17:20:28.342218 4869 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-8795b_e748d49d-c9ae-445f-87d7-311d7ef79b37/cp-frr-files/0.log" Oct 01 17:20:28 crc kubenswrapper[4869]: I1001 17:20:28.355850 4869 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-8795b_e748d49d-c9ae-445f-87d7-311d7ef79b37/cp-reloader/0.log" Oct 01 17:20:28 crc kubenswrapper[4869]: I1001 17:20:28.359401 4869 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-8795b_e748d49d-c9ae-445f-87d7-311d7ef79b37/cp-reloader/0.log" Oct 01 17:20:28 crc kubenswrapper[4869]: I1001 17:20:28.562880 4869 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-8795b_e748d49d-c9ae-445f-87d7-311d7ef79b37/cp-metrics/0.log" Oct 01 17:20:28 crc kubenswrapper[4869]: I1001 17:20:28.562946 4869 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-8795b_e748d49d-c9ae-445f-87d7-311d7ef79b37/cp-frr-files/0.log" Oct 01 17:20:28 crc kubenswrapper[4869]: I1001 17:20:28.562968 4869 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-8795b_e748d49d-c9ae-445f-87d7-311d7ef79b37/cp-reloader/0.log" Oct 01 17:20:28 crc kubenswrapper[4869]: I1001 17:20:28.625783 4869 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-8795b_e748d49d-c9ae-445f-87d7-311d7ef79b37/cp-metrics/0.log" Oct 01 17:20:28 crc kubenswrapper[4869]: I1001 17:20:28.728670 4869 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-8795b_e748d49d-c9ae-445f-87d7-311d7ef79b37/cp-frr-files/0.log" Oct 01 17:20:28 crc kubenswrapper[4869]: I1001 17:20:28.757955 4869 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-8795b_e748d49d-c9ae-445f-87d7-311d7ef79b37/cp-reloader/0.log" Oct 01 17:20:28 crc kubenswrapper[4869]: I1001 17:20:28.794076 4869 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-8795b_e748d49d-c9ae-445f-87d7-311d7ef79b37/cp-metrics/0.log" Oct 01 17:20:28 crc kubenswrapper[4869]: I1001 17:20:28.820230 4869 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-8795b_e748d49d-c9ae-445f-87d7-311d7ef79b37/controller/0.log" Oct 01 17:20:29 crc kubenswrapper[4869]: I1001 17:20:29.010150 4869 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-8795b_e748d49d-c9ae-445f-87d7-311d7ef79b37/kube-rbac-proxy/0.log" Oct 01 17:20:29 crc kubenswrapper[4869]: I1001 17:20:29.019663 4869 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-8795b_e748d49d-c9ae-445f-87d7-311d7ef79b37/kube-rbac-proxy-frr/0.log" Oct 01 17:20:29 crc kubenswrapper[4869]: I1001 17:20:29.043295 4869 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-8795b_e748d49d-c9ae-445f-87d7-311d7ef79b37/frr-metrics/0.log" Oct 01 17:20:29 crc kubenswrapper[4869]: I1001 17:20:29.253423 4869 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-webhook-server-64bf5d555-w6xl4_517c59e1-4387-4182-8db2-374a1dd516e6/frr-k8s-webhook-server/0.log" Oct 01 17:20:29 crc kubenswrapper[4869]: I1001 17:20:29.305748 4869 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-8795b_e748d49d-c9ae-445f-87d7-311d7ef79b37/frr/0.log" Oct 01 17:20:29 crc kubenswrapper[4869]: I1001 17:20:29.327525 4869 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-8795b_e748d49d-c9ae-445f-87d7-311d7ef79b37/reloader/0.log" Oct 01 17:20:29 crc kubenswrapper[4869]: I1001 17:20:29.518770 4869 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_metallb-operator-webhook-server-869db94fcd-5ccjx_cff5eb0a-37a8-473e-94b2-384c18f64054/webhook-server/0.log" Oct 01 17:20:29 crc kubenswrapper[4869]: I1001 17:20:29.528920 4869 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_metallb-operator-controller-manager-ddc944bf4-hrp74_77b7849a-6b82-4c2c-a23b-f5dd31c16a9f/manager/0.log" Oct 01 17:20:29 crc kubenswrapper[4869]: I1001 17:20:29.694654 4869 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_speaker-6gmkw_86377387-9ca6-4577-a44d-125364686f83/kube-rbac-proxy/0.log" Oct 01 17:20:29 crc kubenswrapper[4869]: I1001 17:20:29.926412 4869 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_speaker-6gmkw_86377387-9ca6-4577-a44d-125364686f83/speaker/0.log" Oct 01 17:20:42 crc kubenswrapper[4869]: I1001 17:20:42.320239 4869 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2tlppj_0025b8f5-fdce-434f-a9c6-393fd4c93273/util/0.log" Oct 01 17:20:42 crc kubenswrapper[4869]: I1001 17:20:42.500621 4869 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2tlppj_0025b8f5-fdce-434f-a9c6-393fd4c93273/util/0.log" Oct 01 17:20:42 crc kubenswrapper[4869]: I1001 17:20:42.503936 4869 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2tlppj_0025b8f5-fdce-434f-a9c6-393fd4c93273/pull/0.log" Oct 01 17:20:42 crc kubenswrapper[4869]: I1001 17:20:42.550363 4869 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2tlppj_0025b8f5-fdce-434f-a9c6-393fd4c93273/pull/0.log" Oct 01 17:20:42 crc kubenswrapper[4869]: I1001 17:20:42.688692 4869 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2tlppj_0025b8f5-fdce-434f-a9c6-393fd4c93273/util/0.log" Oct 01 17:20:42 crc kubenswrapper[4869]: I1001 17:20:42.720913 4869 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2tlppj_0025b8f5-fdce-434f-a9c6-393fd4c93273/extract/0.log" Oct 01 17:20:42 crc kubenswrapper[4869]: I1001 17:20:42.729673 4869 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2tlppj_0025b8f5-fdce-434f-a9c6-393fd4c93273/pull/0.log" Oct 01 17:20:42 crc kubenswrapper[4869]: I1001 17:20:42.891897 4869 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bcwzr4g_56c96162-675d-4e1a-9945-a0a0b16de0d2/util/0.log" Oct 01 17:20:43 crc kubenswrapper[4869]: I1001 17:20:43.022522 4869 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bcwzr4g_56c96162-675d-4e1a-9945-a0a0b16de0d2/util/0.log" Oct 01 17:20:43 crc kubenswrapper[4869]: I1001 17:20:43.037785 4869 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bcwzr4g_56c96162-675d-4e1a-9945-a0a0b16de0d2/pull/0.log" Oct 01 17:20:43 crc kubenswrapper[4869]: I1001 17:20:43.077991 4869 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bcwzr4g_56c96162-675d-4e1a-9945-a0a0b16de0d2/pull/0.log" Oct 01 17:20:43 crc kubenswrapper[4869]: I1001 17:20:43.250061 4869 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bcwzr4g_56c96162-675d-4e1a-9945-a0a0b16de0d2/extract/0.log" Oct 01 17:20:43 crc kubenswrapper[4869]: I1001 17:20:43.270965 4869 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bcwzr4g_56c96162-675d-4e1a-9945-a0a0b16de0d2/util/0.log" Oct 01 17:20:43 crc kubenswrapper[4869]: I1001 17:20:43.353625 4869 patch_prober.go:28] interesting pod/machine-config-daemon-c86m8 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 01 17:20:43 crc kubenswrapper[4869]: I1001 17:20:43.353670 4869 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-c86m8" podUID="a4b64f7f-0b03-4f47-965b-9fde048b735c" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 01 17:20:43 crc kubenswrapper[4869]: I1001 17:20:43.354421 4869 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bcwzr4g_56c96162-675d-4e1a-9945-a0a0b16de0d2/pull/0.log" Oct 01 17:20:43 crc kubenswrapper[4869]: I1001 17:20:43.421614 4869 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-nvb7t_12b56197-83b7-4f2e-a7f3-d3b304b7c2fa/extract-utilities/0.log" Oct 01 17:20:43 crc kubenswrapper[4869]: I1001 17:20:43.637372 4869 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-nvb7t_12b56197-83b7-4f2e-a7f3-d3b304b7c2fa/extract-utilities/0.log" Oct 01 17:20:43 crc kubenswrapper[4869]: I1001 17:20:43.672184 4869 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-nvb7t_12b56197-83b7-4f2e-a7f3-d3b304b7c2fa/extract-content/0.log" Oct 01 17:20:43 crc kubenswrapper[4869]: I1001 17:20:43.682587 4869 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-nvb7t_12b56197-83b7-4f2e-a7f3-d3b304b7c2fa/extract-content/0.log" Oct 01 17:20:43 crc kubenswrapper[4869]: I1001 17:20:43.852734 4869 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-nvb7t_12b56197-83b7-4f2e-a7f3-d3b304b7c2fa/extract-content/0.log" Oct 01 17:20:43 crc kubenswrapper[4869]: I1001 17:20:43.876225 4869 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-nvb7t_12b56197-83b7-4f2e-a7f3-d3b304b7c2fa/extract-utilities/0.log" Oct 01 17:20:44 crc kubenswrapper[4869]: I1001 17:20:44.104534 4869 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-clqzs_9bae054d-44c8-41e1-9383-128da7767e72/extract-utilities/0.log" Oct 01 17:20:44 crc kubenswrapper[4869]: I1001 17:20:44.363444 4869 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-clqzs_9bae054d-44c8-41e1-9383-128da7767e72/extract-content/0.log" Oct 01 17:20:44 crc kubenswrapper[4869]: I1001 17:20:44.388522 4869 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-clqzs_9bae054d-44c8-41e1-9383-128da7767e72/extract-content/0.log" Oct 01 17:20:44 crc kubenswrapper[4869]: I1001 17:20:44.413246 4869 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-nvb7t_12b56197-83b7-4f2e-a7f3-d3b304b7c2fa/registry-server/0.log" Oct 01 17:20:44 crc kubenswrapper[4869]: I1001 17:20:44.423957 4869 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-clqzs_9bae054d-44c8-41e1-9383-128da7767e72/extract-utilities/0.log" Oct 01 17:20:44 crc kubenswrapper[4869]: I1001 17:20:44.642070 4869 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-clqzs_9bae054d-44c8-41e1-9383-128da7767e72/extract-utilities/0.log" Oct 01 17:20:44 crc kubenswrapper[4869]: I1001 17:20:44.656036 4869 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-clqzs_9bae054d-44c8-41e1-9383-128da7767e72/extract-content/0.log" Oct 01 17:20:44 crc kubenswrapper[4869]: I1001 17:20:44.836431 4869 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96h5q5b_7e6aa79a-0f81-43b0-b0e7-61f08276a955/util/0.log" Oct 01 17:20:45 crc kubenswrapper[4869]: I1001 17:20:45.058221 4869 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96h5q5b_7e6aa79a-0f81-43b0-b0e7-61f08276a955/pull/0.log" Oct 01 17:20:45 crc kubenswrapper[4869]: I1001 17:20:45.091104 4869 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96h5q5b_7e6aa79a-0f81-43b0-b0e7-61f08276a955/pull/0.log" Oct 01 17:20:45 crc kubenswrapper[4869]: I1001 17:20:45.135942 4869 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96h5q5b_7e6aa79a-0f81-43b0-b0e7-61f08276a955/util/0.log" Oct 01 17:20:45 crc kubenswrapper[4869]: I1001 17:20:45.328159 4869 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96h5q5b_7e6aa79a-0f81-43b0-b0e7-61f08276a955/util/0.log" Oct 01 17:20:45 crc kubenswrapper[4869]: I1001 17:20:45.373900 4869 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96h5q5b_7e6aa79a-0f81-43b0-b0e7-61f08276a955/extract/0.log" Oct 01 17:20:45 crc kubenswrapper[4869]: I1001 17:20:45.389579 4869 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96h5q5b_7e6aa79a-0f81-43b0-b0e7-61f08276a955/pull/0.log" Oct 01 17:20:45 crc kubenswrapper[4869]: E1001 17:20:45.581645 4869 kubelet_pods.go:538] "Hostname for pod was too long, truncated it" podName="test-operator-logs-pod-horizontest-horizontest-tests-horizontest" hostnameMaxLen=63 truncatedHostname="test-operator-logs-pod-horizontest-horizontest-tests-horizontes" Oct 01 17:20:45 crc kubenswrapper[4869]: I1001 17:20:45.630596 4869 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835c6l949_2d7aa34b-5f2f-4a92-95d8-4a4778a4c3be/util/0.log" Oct 01 17:20:45 crc kubenswrapper[4869]: I1001 17:20:45.768091 4869 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-clqzs_9bae054d-44c8-41e1-9383-128da7767e72/registry-server/0.log" Oct 01 17:20:45 crc kubenswrapper[4869]: I1001 17:20:45.795050 4869 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835c6l949_2d7aa34b-5f2f-4a92-95d8-4a4778a4c3be/pull/0.log" Oct 01 17:20:45 crc kubenswrapper[4869]: I1001 17:20:45.835458 4869 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835c6l949_2d7aa34b-5f2f-4a92-95d8-4a4778a4c3be/pull/0.log" Oct 01 17:20:45 crc kubenswrapper[4869]: I1001 17:20:45.841382 4869 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835c6l949_2d7aa34b-5f2f-4a92-95d8-4a4778a4c3be/util/0.log" Oct 01 17:20:46 crc kubenswrapper[4869]: I1001 17:20:46.011727 4869 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835c6l949_2d7aa34b-5f2f-4a92-95d8-4a4778a4c3be/pull/0.log" Oct 01 17:20:46 crc kubenswrapper[4869]: I1001 17:20:46.050319 4869 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835c6l949_2d7aa34b-5f2f-4a92-95d8-4a4778a4c3be/extract/0.log" Oct 01 17:20:46 crc kubenswrapper[4869]: I1001 17:20:46.061503 4869 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835c6l949_2d7aa34b-5f2f-4a92-95d8-4a4778a4c3be/util/0.log" Oct 01 17:20:46 crc kubenswrapper[4869]: I1001 17:20:46.099401 4869 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_marketplace-operator-79b997595-fqbbl_0674ba6e-99f1-494a-ab15-a852605f2d52/marketplace-operator/0.log" Oct 01 17:20:46 crc kubenswrapper[4869]: I1001 17:20:46.233544 4869 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-zqdck_1ec94678-9587-4f84-b2ce-745d0321216b/extract-utilities/0.log" Oct 01 17:20:46 crc kubenswrapper[4869]: I1001 17:20:46.425053 4869 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-zqdck_1ec94678-9587-4f84-b2ce-745d0321216b/extract-content/0.log" Oct 01 17:20:46 crc kubenswrapper[4869]: I1001 17:20:46.425104 4869 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-zqdck_1ec94678-9587-4f84-b2ce-745d0321216b/extract-utilities/0.log" Oct 01 17:20:46 crc kubenswrapper[4869]: I1001 17:20:46.427381 4869 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-zqdck_1ec94678-9587-4f84-b2ce-745d0321216b/extract-content/0.log" Oct 01 17:20:46 crc kubenswrapper[4869]: I1001 17:20:46.584737 4869 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-zqdck_1ec94678-9587-4f84-b2ce-745d0321216b/extract-content/0.log" Oct 01 17:20:46 crc kubenswrapper[4869]: I1001 17:20:46.611740 4869 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-zqdck_1ec94678-9587-4f84-b2ce-745d0321216b/extract-utilities/0.log" Oct 01 17:20:46 crc kubenswrapper[4869]: I1001 17:20:46.646444 4869 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-f4m9s_894a5177-f062-4912-83bd-56783e2dcc11/extract-utilities/0.log" Oct 01 17:20:46 crc kubenswrapper[4869]: I1001 17:20:46.851642 4869 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-f4m9s_894a5177-f062-4912-83bd-56783e2dcc11/extract-content/0.log" Oct 01 17:20:46 crc kubenswrapper[4869]: I1001 17:20:46.886679 4869 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-zqdck_1ec94678-9587-4f84-b2ce-745d0321216b/registry-server/0.log" Oct 01 17:20:46 crc kubenswrapper[4869]: I1001 17:20:46.887427 4869 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-f4m9s_894a5177-f062-4912-83bd-56783e2dcc11/extract-utilities/0.log" Oct 01 17:20:46 crc kubenswrapper[4869]: I1001 17:20:46.916504 4869 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-f4m9s_894a5177-f062-4912-83bd-56783e2dcc11/extract-content/0.log" Oct 01 17:20:47 crc kubenswrapper[4869]: I1001 17:20:47.043691 4869 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-f4m9s_894a5177-f062-4912-83bd-56783e2dcc11/extract-content/0.log" Oct 01 17:20:47 crc kubenswrapper[4869]: I1001 17:20:47.068189 4869 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-f4m9s_894a5177-f062-4912-83bd-56783e2dcc11/extract-utilities/0.log" Oct 01 17:20:47 crc kubenswrapper[4869]: I1001 17:20:47.545368 4869 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-f4m9s_894a5177-f062-4912-83bd-56783e2dcc11/registry-server/0.log" Oct 01 17:20:47 crc kubenswrapper[4869]: I1001 17:20:47.863993 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-sxfz2"] Oct 01 17:20:47 crc kubenswrapper[4869]: E1001 17:20:47.864520 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="71bab402-5ec0-4ea8-904f-0574481967ed" containerName="container-00" Oct 01 17:20:47 crc kubenswrapper[4869]: I1001 17:20:47.864541 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="71bab402-5ec0-4ea8-904f-0574481967ed" containerName="container-00" Oct 01 17:20:47 crc kubenswrapper[4869]: I1001 17:20:47.864738 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="71bab402-5ec0-4ea8-904f-0574481967ed" containerName="container-00" Oct 01 17:20:47 crc kubenswrapper[4869]: I1001 17:20:47.866170 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-sxfz2" Oct 01 17:20:47 crc kubenswrapper[4869]: I1001 17:20:47.876606 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-sxfz2"] Oct 01 17:20:47 crc kubenswrapper[4869]: I1001 17:20:47.908210 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wclrz\" (UniqueName: \"kubernetes.io/projected/bbc30cac-2bb4-4a17-8009-eff7e30369e7-kube-api-access-wclrz\") pod \"community-operators-sxfz2\" (UID: \"bbc30cac-2bb4-4a17-8009-eff7e30369e7\") " pod="openshift-marketplace/community-operators-sxfz2" Oct 01 17:20:47 crc kubenswrapper[4869]: I1001 17:20:47.908364 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/bbc30cac-2bb4-4a17-8009-eff7e30369e7-utilities\") pod \"community-operators-sxfz2\" (UID: \"bbc30cac-2bb4-4a17-8009-eff7e30369e7\") " pod="openshift-marketplace/community-operators-sxfz2" Oct 01 17:20:47 crc kubenswrapper[4869]: I1001 17:20:47.908455 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/bbc30cac-2bb4-4a17-8009-eff7e30369e7-catalog-content\") pod \"community-operators-sxfz2\" (UID: \"bbc30cac-2bb4-4a17-8009-eff7e30369e7\") " pod="openshift-marketplace/community-operators-sxfz2" Oct 01 17:20:48 crc kubenswrapper[4869]: I1001 17:20:48.010040 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wclrz\" (UniqueName: \"kubernetes.io/projected/bbc30cac-2bb4-4a17-8009-eff7e30369e7-kube-api-access-wclrz\") pod \"community-operators-sxfz2\" (UID: \"bbc30cac-2bb4-4a17-8009-eff7e30369e7\") " pod="openshift-marketplace/community-operators-sxfz2" Oct 01 17:20:48 crc kubenswrapper[4869]: I1001 17:20:48.010161 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/bbc30cac-2bb4-4a17-8009-eff7e30369e7-utilities\") pod \"community-operators-sxfz2\" (UID: \"bbc30cac-2bb4-4a17-8009-eff7e30369e7\") " pod="openshift-marketplace/community-operators-sxfz2" Oct 01 17:20:48 crc kubenswrapper[4869]: I1001 17:20:48.010247 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/bbc30cac-2bb4-4a17-8009-eff7e30369e7-catalog-content\") pod \"community-operators-sxfz2\" (UID: \"bbc30cac-2bb4-4a17-8009-eff7e30369e7\") " pod="openshift-marketplace/community-operators-sxfz2" Oct 01 17:20:48 crc kubenswrapper[4869]: I1001 17:20:48.010798 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/bbc30cac-2bb4-4a17-8009-eff7e30369e7-catalog-content\") pod \"community-operators-sxfz2\" (UID: \"bbc30cac-2bb4-4a17-8009-eff7e30369e7\") " pod="openshift-marketplace/community-operators-sxfz2" Oct 01 17:20:48 crc kubenswrapper[4869]: I1001 17:20:48.011145 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/bbc30cac-2bb4-4a17-8009-eff7e30369e7-utilities\") pod \"community-operators-sxfz2\" (UID: \"bbc30cac-2bb4-4a17-8009-eff7e30369e7\") " pod="openshift-marketplace/community-operators-sxfz2" Oct 01 17:20:48 crc kubenswrapper[4869]: I1001 17:20:48.040348 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wclrz\" (UniqueName: \"kubernetes.io/projected/bbc30cac-2bb4-4a17-8009-eff7e30369e7-kube-api-access-wclrz\") pod \"community-operators-sxfz2\" (UID: \"bbc30cac-2bb4-4a17-8009-eff7e30369e7\") " pod="openshift-marketplace/community-operators-sxfz2" Oct 01 17:20:48 crc kubenswrapper[4869]: I1001 17:20:48.205732 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-sxfz2" Oct 01 17:20:48 crc kubenswrapper[4869]: I1001 17:20:48.754417 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-sxfz2"] Oct 01 17:20:49 crc kubenswrapper[4869]: I1001 17:20:49.155227 4869 generic.go:334] "Generic (PLEG): container finished" podID="bbc30cac-2bb4-4a17-8009-eff7e30369e7" containerID="ac031b5b4d189a3ea8805e1ecb3310a47651c02f8aab2f64068a57a5767c085d" exitCode=0 Oct 01 17:20:49 crc kubenswrapper[4869]: I1001 17:20:49.155303 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-sxfz2" event={"ID":"bbc30cac-2bb4-4a17-8009-eff7e30369e7","Type":"ContainerDied","Data":"ac031b5b4d189a3ea8805e1ecb3310a47651c02f8aab2f64068a57a5767c085d"} Oct 01 17:20:49 crc kubenswrapper[4869]: I1001 17:20:49.155521 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-sxfz2" event={"ID":"bbc30cac-2bb4-4a17-8009-eff7e30369e7","Type":"ContainerStarted","Data":"1431d5e36cc42a3780b05fbf7296bf747fc29526944e3642337210f22352adab"} Oct 01 17:20:49 crc kubenswrapper[4869]: I1001 17:20:49.157032 4869 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Oct 01 17:20:51 crc kubenswrapper[4869]: I1001 17:20:51.228316 4869 generic.go:334] "Generic (PLEG): container finished" podID="bbc30cac-2bb4-4a17-8009-eff7e30369e7" containerID="601c017429731bd7a85952c482913b0f9675736d18814888be7b7c7c60b689ef" exitCode=0 Oct 01 17:20:51 crc kubenswrapper[4869]: I1001 17:20:51.231034 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-sxfz2" event={"ID":"bbc30cac-2bb4-4a17-8009-eff7e30369e7","Type":"ContainerDied","Data":"601c017429731bd7a85952c482913b0f9675736d18814888be7b7c7c60b689ef"} Oct 01 17:20:52 crc kubenswrapper[4869]: I1001 17:20:52.239616 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-sxfz2" event={"ID":"bbc30cac-2bb4-4a17-8009-eff7e30369e7","Type":"ContainerStarted","Data":"0162dcb9e7b9541c8117ed6698aa76872fd198e85d8bc4f9f0437ea1924dc26c"} Oct 01 17:20:52 crc kubenswrapper[4869]: I1001 17:20:52.261702 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-sxfz2" podStartSLOduration=2.6533509029999998 podStartE2EDuration="5.261680748s" podCreationTimestamp="2025-10-01 17:20:47 +0000 UTC" firstStartedPulling="2025-10-01 17:20:49.156813325 +0000 UTC m=+8158.303656441" lastFinishedPulling="2025-10-01 17:20:51.76514315 +0000 UTC m=+8160.911986286" observedRunningTime="2025-10-01 17:20:52.257912283 +0000 UTC m=+8161.404755399" watchObservedRunningTime="2025-10-01 17:20:52.261680748 +0000 UTC m=+8161.408523864" Oct 01 17:20:52 crc kubenswrapper[4869]: I1001 17:20:52.642830 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-72gqz"] Oct 01 17:20:52 crc kubenswrapper[4869]: I1001 17:20:52.646254 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-72gqz" Oct 01 17:20:52 crc kubenswrapper[4869]: I1001 17:20:52.655747 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-72gqz"] Oct 01 17:20:52 crc kubenswrapper[4869]: I1001 17:20:52.710834 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7twgs\" (UniqueName: \"kubernetes.io/projected/f9daf550-5dc7-4617-8369-32cb5413bc73-kube-api-access-7twgs\") pod \"redhat-operators-72gqz\" (UID: \"f9daf550-5dc7-4617-8369-32cb5413bc73\") " pod="openshift-marketplace/redhat-operators-72gqz" Oct 01 17:20:52 crc kubenswrapper[4869]: I1001 17:20:52.710939 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f9daf550-5dc7-4617-8369-32cb5413bc73-utilities\") pod \"redhat-operators-72gqz\" (UID: \"f9daf550-5dc7-4617-8369-32cb5413bc73\") " pod="openshift-marketplace/redhat-operators-72gqz" Oct 01 17:20:52 crc kubenswrapper[4869]: I1001 17:20:52.711066 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f9daf550-5dc7-4617-8369-32cb5413bc73-catalog-content\") pod \"redhat-operators-72gqz\" (UID: \"f9daf550-5dc7-4617-8369-32cb5413bc73\") " pod="openshift-marketplace/redhat-operators-72gqz" Oct 01 17:20:52 crc kubenswrapper[4869]: I1001 17:20:52.812335 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f9daf550-5dc7-4617-8369-32cb5413bc73-catalog-content\") pod \"redhat-operators-72gqz\" (UID: \"f9daf550-5dc7-4617-8369-32cb5413bc73\") " pod="openshift-marketplace/redhat-operators-72gqz" Oct 01 17:20:52 crc kubenswrapper[4869]: I1001 17:20:52.812433 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7twgs\" (UniqueName: \"kubernetes.io/projected/f9daf550-5dc7-4617-8369-32cb5413bc73-kube-api-access-7twgs\") pod \"redhat-operators-72gqz\" (UID: \"f9daf550-5dc7-4617-8369-32cb5413bc73\") " pod="openshift-marketplace/redhat-operators-72gqz" Oct 01 17:20:52 crc kubenswrapper[4869]: I1001 17:20:52.812489 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f9daf550-5dc7-4617-8369-32cb5413bc73-utilities\") pod \"redhat-operators-72gqz\" (UID: \"f9daf550-5dc7-4617-8369-32cb5413bc73\") " pod="openshift-marketplace/redhat-operators-72gqz" Oct 01 17:20:52 crc kubenswrapper[4869]: I1001 17:20:52.812937 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f9daf550-5dc7-4617-8369-32cb5413bc73-utilities\") pod \"redhat-operators-72gqz\" (UID: \"f9daf550-5dc7-4617-8369-32cb5413bc73\") " pod="openshift-marketplace/redhat-operators-72gqz" Oct 01 17:20:52 crc kubenswrapper[4869]: I1001 17:20:52.813150 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f9daf550-5dc7-4617-8369-32cb5413bc73-catalog-content\") pod \"redhat-operators-72gqz\" (UID: \"f9daf550-5dc7-4617-8369-32cb5413bc73\") " pod="openshift-marketplace/redhat-operators-72gqz" Oct 01 17:20:52 crc kubenswrapper[4869]: I1001 17:20:52.837998 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7twgs\" (UniqueName: \"kubernetes.io/projected/f9daf550-5dc7-4617-8369-32cb5413bc73-kube-api-access-7twgs\") pod \"redhat-operators-72gqz\" (UID: \"f9daf550-5dc7-4617-8369-32cb5413bc73\") " pod="openshift-marketplace/redhat-operators-72gqz" Oct 01 17:20:52 crc kubenswrapper[4869]: I1001 17:20:52.969234 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-72gqz" Oct 01 17:20:53 crc kubenswrapper[4869]: I1001 17:20:53.444402 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-72gqz"] Oct 01 17:20:54 crc kubenswrapper[4869]: I1001 17:20:54.286280 4869 generic.go:334] "Generic (PLEG): container finished" podID="f9daf550-5dc7-4617-8369-32cb5413bc73" containerID="9277107dc56ac064d45d0fc4c82b4321107929eb30a6ccfad57ee6aa6ddba999" exitCode=0 Oct 01 17:20:54 crc kubenswrapper[4869]: I1001 17:20:54.286359 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-72gqz" event={"ID":"f9daf550-5dc7-4617-8369-32cb5413bc73","Type":"ContainerDied","Data":"9277107dc56ac064d45d0fc4c82b4321107929eb30a6ccfad57ee6aa6ddba999"} Oct 01 17:20:54 crc kubenswrapper[4869]: I1001 17:20:54.286612 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-72gqz" event={"ID":"f9daf550-5dc7-4617-8369-32cb5413bc73","Type":"ContainerStarted","Data":"d6464cd7032bdaffb1eeef9d531bc50de2f8a444c27cefe69c783f85a03585ad"} Oct 01 17:20:58 crc kubenswrapper[4869]: I1001 17:20:58.206006 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-sxfz2" Oct 01 17:20:58 crc kubenswrapper[4869]: I1001 17:20:58.206600 4869 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-sxfz2" Oct 01 17:20:58 crc kubenswrapper[4869]: I1001 17:20:58.261000 4869 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-sxfz2" Oct 01 17:20:58 crc kubenswrapper[4869]: I1001 17:20:58.368970 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-sxfz2" Oct 01 17:21:00 crc kubenswrapper[4869]: I1001 17:21:00.039776 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-sxfz2"] Oct 01 17:21:00 crc kubenswrapper[4869]: I1001 17:21:00.343340 4869 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-sxfz2" podUID="bbc30cac-2bb4-4a17-8009-eff7e30369e7" containerName="registry-server" containerID="cri-o://0162dcb9e7b9541c8117ed6698aa76872fd198e85d8bc4f9f0437ea1924dc26c" gracePeriod=2 Oct 01 17:21:00 crc kubenswrapper[4869]: E1001 17:21:00.876339 4869 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podbbc30cac_2bb4_4a17_8009_eff7e30369e7.slice/crio-0162dcb9e7b9541c8117ed6698aa76872fd198e85d8bc4f9f0437ea1924dc26c.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podbbc30cac_2bb4_4a17_8009_eff7e30369e7.slice/crio-conmon-0162dcb9e7b9541c8117ed6698aa76872fd198e85d8bc4f9f0437ea1924dc26c.scope\": RecentStats: unable to find data in memory cache]" Oct 01 17:21:01 crc kubenswrapper[4869]: I1001 17:21:01.353466 4869 generic.go:334] "Generic (PLEG): container finished" podID="bbc30cac-2bb4-4a17-8009-eff7e30369e7" containerID="0162dcb9e7b9541c8117ed6698aa76872fd198e85d8bc4f9f0437ea1924dc26c" exitCode=0 Oct 01 17:21:01 crc kubenswrapper[4869]: I1001 17:21:01.353543 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-sxfz2" event={"ID":"bbc30cac-2bb4-4a17-8009-eff7e30369e7","Type":"ContainerDied","Data":"0162dcb9e7b9541c8117ed6698aa76872fd198e85d8bc4f9f0437ea1924dc26c"} Oct 01 17:21:04 crc kubenswrapper[4869]: I1001 17:21:04.026802 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-sxfz2" Oct 01 17:21:04 crc kubenswrapper[4869]: I1001 17:21:04.201434 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/bbc30cac-2bb4-4a17-8009-eff7e30369e7-catalog-content\") pod \"bbc30cac-2bb4-4a17-8009-eff7e30369e7\" (UID: \"bbc30cac-2bb4-4a17-8009-eff7e30369e7\") " Oct 01 17:21:04 crc kubenswrapper[4869]: I1001 17:21:04.201570 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/bbc30cac-2bb4-4a17-8009-eff7e30369e7-utilities\") pod \"bbc30cac-2bb4-4a17-8009-eff7e30369e7\" (UID: \"bbc30cac-2bb4-4a17-8009-eff7e30369e7\") " Oct 01 17:21:04 crc kubenswrapper[4869]: I1001 17:21:04.201598 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wclrz\" (UniqueName: \"kubernetes.io/projected/bbc30cac-2bb4-4a17-8009-eff7e30369e7-kube-api-access-wclrz\") pod \"bbc30cac-2bb4-4a17-8009-eff7e30369e7\" (UID: \"bbc30cac-2bb4-4a17-8009-eff7e30369e7\") " Oct 01 17:21:04 crc kubenswrapper[4869]: I1001 17:21:04.202451 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/bbc30cac-2bb4-4a17-8009-eff7e30369e7-utilities" (OuterVolumeSpecName: "utilities") pod "bbc30cac-2bb4-4a17-8009-eff7e30369e7" (UID: "bbc30cac-2bb4-4a17-8009-eff7e30369e7"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 17:21:04 crc kubenswrapper[4869]: I1001 17:21:04.216175 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bbc30cac-2bb4-4a17-8009-eff7e30369e7-kube-api-access-wclrz" (OuterVolumeSpecName: "kube-api-access-wclrz") pod "bbc30cac-2bb4-4a17-8009-eff7e30369e7" (UID: "bbc30cac-2bb4-4a17-8009-eff7e30369e7"). InnerVolumeSpecName "kube-api-access-wclrz". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 17:21:04 crc kubenswrapper[4869]: I1001 17:21:04.290318 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/bbc30cac-2bb4-4a17-8009-eff7e30369e7-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "bbc30cac-2bb4-4a17-8009-eff7e30369e7" (UID: "bbc30cac-2bb4-4a17-8009-eff7e30369e7"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 17:21:04 crc kubenswrapper[4869]: I1001 17:21:04.304305 4869 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/bbc30cac-2bb4-4a17-8009-eff7e30369e7-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 01 17:21:04 crc kubenswrapper[4869]: I1001 17:21:04.304596 4869 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/bbc30cac-2bb4-4a17-8009-eff7e30369e7-utilities\") on node \"crc\" DevicePath \"\"" Oct 01 17:21:04 crc kubenswrapper[4869]: I1001 17:21:04.304666 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wclrz\" (UniqueName: \"kubernetes.io/projected/bbc30cac-2bb4-4a17-8009-eff7e30369e7-kube-api-access-wclrz\") on node \"crc\" DevicePath \"\"" Oct 01 17:21:04 crc kubenswrapper[4869]: I1001 17:21:04.394514 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-72gqz" event={"ID":"f9daf550-5dc7-4617-8369-32cb5413bc73","Type":"ContainerStarted","Data":"50366c32a3482956a301838e210022f5624b65c70c8e8ac4918db1399f574d7a"} Oct 01 17:21:04 crc kubenswrapper[4869]: I1001 17:21:04.398599 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-sxfz2" event={"ID":"bbc30cac-2bb4-4a17-8009-eff7e30369e7","Type":"ContainerDied","Data":"1431d5e36cc42a3780b05fbf7296bf747fc29526944e3642337210f22352adab"} Oct 01 17:21:04 crc kubenswrapper[4869]: I1001 17:21:04.398654 4869 scope.go:117] "RemoveContainer" containerID="0162dcb9e7b9541c8117ed6698aa76872fd198e85d8bc4f9f0437ea1924dc26c" Oct 01 17:21:04 crc kubenswrapper[4869]: I1001 17:21:04.398664 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-sxfz2" Oct 01 17:21:04 crc kubenswrapper[4869]: I1001 17:21:04.434293 4869 scope.go:117] "RemoveContainer" containerID="601c017429731bd7a85952c482913b0f9675736d18814888be7b7c7c60b689ef" Oct 01 17:21:04 crc kubenswrapper[4869]: I1001 17:21:04.447529 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-sxfz2"] Oct 01 17:21:04 crc kubenswrapper[4869]: I1001 17:21:04.460874 4869 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-sxfz2"] Oct 01 17:21:04 crc kubenswrapper[4869]: I1001 17:21:04.497815 4869 scope.go:117] "RemoveContainer" containerID="ac031b5b4d189a3ea8805e1ecb3310a47651c02f8aab2f64068a57a5767c085d" Oct 01 17:21:05 crc kubenswrapper[4869]: I1001 17:21:05.426018 4869 generic.go:334] "Generic (PLEG): container finished" podID="f9daf550-5dc7-4617-8369-32cb5413bc73" containerID="50366c32a3482956a301838e210022f5624b65c70c8e8ac4918db1399f574d7a" exitCode=0 Oct 01 17:21:05 crc kubenswrapper[4869]: I1001 17:21:05.426117 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-72gqz" event={"ID":"f9daf550-5dc7-4617-8369-32cb5413bc73","Type":"ContainerDied","Data":"50366c32a3482956a301838e210022f5624b65c70c8e8ac4918db1399f574d7a"} Oct 01 17:21:05 crc kubenswrapper[4869]: I1001 17:21:05.591502 4869 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bbc30cac-2bb4-4a17-8009-eff7e30369e7" path="/var/lib/kubelet/pods/bbc30cac-2bb4-4a17-8009-eff7e30369e7/volumes" Oct 01 17:21:08 crc kubenswrapper[4869]: I1001 17:21:08.456600 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-72gqz" event={"ID":"f9daf550-5dc7-4617-8369-32cb5413bc73","Type":"ContainerStarted","Data":"5a1c82b2300e50b408fb3ae6daed8dc6486ad51c44406bc936c7cafc89cc4564"} Oct 01 17:21:08 crc kubenswrapper[4869]: I1001 17:21:08.480513 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-72gqz" podStartSLOduration=2.997103121 podStartE2EDuration="16.480496934s" podCreationTimestamp="2025-10-01 17:20:52 +0000 UTC" firstStartedPulling="2025-10-01 17:20:54.290179211 +0000 UTC m=+8163.437022327" lastFinishedPulling="2025-10-01 17:21:07.773573004 +0000 UTC m=+8176.920416140" observedRunningTime="2025-10-01 17:21:08.472084281 +0000 UTC m=+8177.618927407" watchObservedRunningTime="2025-10-01 17:21:08.480496934 +0000 UTC m=+8177.627340050" Oct 01 17:21:12 crc kubenswrapper[4869]: I1001 17:21:12.970282 4869 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-72gqz" Oct 01 17:21:12 crc kubenswrapper[4869]: I1001 17:21:12.970889 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-72gqz" Oct 01 17:21:13 crc kubenswrapper[4869]: I1001 17:21:13.354086 4869 patch_prober.go:28] interesting pod/machine-config-daemon-c86m8 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 01 17:21:13 crc kubenswrapper[4869]: I1001 17:21:13.354144 4869 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-c86m8" podUID="a4b64f7f-0b03-4f47-965b-9fde048b735c" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 01 17:21:13 crc kubenswrapper[4869]: I1001 17:21:13.354189 4869 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-c86m8" Oct 01 17:21:13 crc kubenswrapper[4869]: I1001 17:21:13.355136 4869 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"244ffa7b3483f330573b185ecbcd30b8ee2fd9a9414df3e81c3e654d5578f87a"} pod="openshift-machine-config-operator/machine-config-daemon-c86m8" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Oct 01 17:21:13 crc kubenswrapper[4869]: I1001 17:21:13.355202 4869 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-c86m8" podUID="a4b64f7f-0b03-4f47-965b-9fde048b735c" containerName="machine-config-daemon" containerID="cri-o://244ffa7b3483f330573b185ecbcd30b8ee2fd9a9414df3e81c3e654d5578f87a" gracePeriod=600 Oct 01 17:21:13 crc kubenswrapper[4869]: I1001 17:21:13.527130 4869 generic.go:334] "Generic (PLEG): container finished" podID="a4b64f7f-0b03-4f47-965b-9fde048b735c" containerID="244ffa7b3483f330573b185ecbcd30b8ee2fd9a9414df3e81c3e654d5578f87a" exitCode=0 Oct 01 17:21:13 crc kubenswrapper[4869]: I1001 17:21:13.527375 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-c86m8" event={"ID":"a4b64f7f-0b03-4f47-965b-9fde048b735c","Type":"ContainerDied","Data":"244ffa7b3483f330573b185ecbcd30b8ee2fd9a9414df3e81c3e654d5578f87a"} Oct 01 17:21:13 crc kubenswrapper[4869]: I1001 17:21:13.527486 4869 scope.go:117] "RemoveContainer" containerID="1c17c6f6d4b5b7ea03383059037e4e76a83710badfded0c48fa96960660db929" Oct 01 17:21:14 crc kubenswrapper[4869]: I1001 17:21:14.017550 4869 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-72gqz" podUID="f9daf550-5dc7-4617-8369-32cb5413bc73" containerName="registry-server" probeResult="failure" output=< Oct 01 17:21:14 crc kubenswrapper[4869]: timeout: failed to connect service ":50051" within 1s Oct 01 17:21:14 crc kubenswrapper[4869]: > Oct 01 17:21:14 crc kubenswrapper[4869]: I1001 17:21:14.539445 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-c86m8" event={"ID":"a4b64f7f-0b03-4f47-965b-9fde048b735c","Type":"ContainerStarted","Data":"5c5081815445528475a924b08879ebd33f35107feee51b5609a739db117a7ead"} Oct 01 17:21:23 crc kubenswrapper[4869]: I1001 17:21:23.022049 4869 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-72gqz" Oct 01 17:21:23 crc kubenswrapper[4869]: I1001 17:21:23.075870 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-72gqz" Oct 01 17:21:23 crc kubenswrapper[4869]: I1001 17:21:23.666919 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-72gqz"] Oct 01 17:21:23 crc kubenswrapper[4869]: I1001 17:21:23.844880 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-f4m9s"] Oct 01 17:21:23 crc kubenswrapper[4869]: I1001 17:21:23.845181 4869 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-f4m9s" podUID="894a5177-f062-4912-83bd-56783e2dcc11" containerName="registry-server" containerID="cri-o://d3b119c4f1190532588772c23735ca061fd6af337a28d7650e54f66d441b3335" gracePeriod=2 Oct 01 17:21:24 crc kubenswrapper[4869]: I1001 17:21:24.326617 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-f4m9s" Oct 01 17:21:24 crc kubenswrapper[4869]: I1001 17:21:24.456945 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-b8pct\" (UniqueName: \"kubernetes.io/projected/894a5177-f062-4912-83bd-56783e2dcc11-kube-api-access-b8pct\") pod \"894a5177-f062-4912-83bd-56783e2dcc11\" (UID: \"894a5177-f062-4912-83bd-56783e2dcc11\") " Oct 01 17:21:24 crc kubenswrapper[4869]: I1001 17:21:24.457072 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/894a5177-f062-4912-83bd-56783e2dcc11-catalog-content\") pod \"894a5177-f062-4912-83bd-56783e2dcc11\" (UID: \"894a5177-f062-4912-83bd-56783e2dcc11\") " Oct 01 17:21:24 crc kubenswrapper[4869]: I1001 17:21:24.457196 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/894a5177-f062-4912-83bd-56783e2dcc11-utilities\") pod \"894a5177-f062-4912-83bd-56783e2dcc11\" (UID: \"894a5177-f062-4912-83bd-56783e2dcc11\") " Oct 01 17:21:24 crc kubenswrapper[4869]: I1001 17:21:24.458318 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/894a5177-f062-4912-83bd-56783e2dcc11-utilities" (OuterVolumeSpecName: "utilities") pod "894a5177-f062-4912-83bd-56783e2dcc11" (UID: "894a5177-f062-4912-83bd-56783e2dcc11"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 17:21:24 crc kubenswrapper[4869]: I1001 17:21:24.465408 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/894a5177-f062-4912-83bd-56783e2dcc11-kube-api-access-b8pct" (OuterVolumeSpecName: "kube-api-access-b8pct") pod "894a5177-f062-4912-83bd-56783e2dcc11" (UID: "894a5177-f062-4912-83bd-56783e2dcc11"). InnerVolumeSpecName "kube-api-access-b8pct". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 17:21:24 crc kubenswrapper[4869]: I1001 17:21:24.560556 4869 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/894a5177-f062-4912-83bd-56783e2dcc11-utilities\") on node \"crc\" DevicePath \"\"" Oct 01 17:21:24 crc kubenswrapper[4869]: I1001 17:21:24.560593 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-b8pct\" (UniqueName: \"kubernetes.io/projected/894a5177-f062-4912-83bd-56783e2dcc11-kube-api-access-b8pct\") on node \"crc\" DevicePath \"\"" Oct 01 17:21:24 crc kubenswrapper[4869]: I1001 17:21:24.577802 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/894a5177-f062-4912-83bd-56783e2dcc11-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "894a5177-f062-4912-83bd-56783e2dcc11" (UID: "894a5177-f062-4912-83bd-56783e2dcc11"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 17:21:24 crc kubenswrapper[4869]: I1001 17:21:24.644699 4869 generic.go:334] "Generic (PLEG): container finished" podID="894a5177-f062-4912-83bd-56783e2dcc11" containerID="d3b119c4f1190532588772c23735ca061fd6af337a28d7650e54f66d441b3335" exitCode=0 Oct 01 17:21:24 crc kubenswrapper[4869]: I1001 17:21:24.644771 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-f4m9s" Oct 01 17:21:24 crc kubenswrapper[4869]: I1001 17:21:24.644792 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-f4m9s" event={"ID":"894a5177-f062-4912-83bd-56783e2dcc11","Type":"ContainerDied","Data":"d3b119c4f1190532588772c23735ca061fd6af337a28d7650e54f66d441b3335"} Oct 01 17:21:24 crc kubenswrapper[4869]: I1001 17:21:24.644862 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-f4m9s" event={"ID":"894a5177-f062-4912-83bd-56783e2dcc11","Type":"ContainerDied","Data":"518b0b01a067c9db4367534c5cd58744f76c60b8afab615f46f4b5c374925664"} Oct 01 17:21:24 crc kubenswrapper[4869]: I1001 17:21:24.644883 4869 scope.go:117] "RemoveContainer" containerID="d3b119c4f1190532588772c23735ca061fd6af337a28d7650e54f66d441b3335" Oct 01 17:21:24 crc kubenswrapper[4869]: I1001 17:21:24.662105 4869 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/894a5177-f062-4912-83bd-56783e2dcc11-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 01 17:21:24 crc kubenswrapper[4869]: I1001 17:21:24.688797 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-f4m9s"] Oct 01 17:21:24 crc kubenswrapper[4869]: I1001 17:21:24.697456 4869 scope.go:117] "RemoveContainer" containerID="5c9cab60d7cd09e8b0c7354ce76b7fb118292f1b252aa5c6375a60dfcc5263b2" Oct 01 17:21:24 crc kubenswrapper[4869]: I1001 17:21:24.697938 4869 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-f4m9s"] Oct 01 17:21:24 crc kubenswrapper[4869]: I1001 17:21:24.740897 4869 scope.go:117] "RemoveContainer" containerID="b149bfb730f4d70df386265133db696fe33fa349f10350252e7c4c06b055a689" Oct 01 17:21:24 crc kubenswrapper[4869]: I1001 17:21:24.767386 4869 scope.go:117] "RemoveContainer" containerID="d3b119c4f1190532588772c23735ca061fd6af337a28d7650e54f66d441b3335" Oct 01 17:21:24 crc kubenswrapper[4869]: E1001 17:21:24.770497 4869 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d3b119c4f1190532588772c23735ca061fd6af337a28d7650e54f66d441b3335\": container with ID starting with d3b119c4f1190532588772c23735ca061fd6af337a28d7650e54f66d441b3335 not found: ID does not exist" containerID="d3b119c4f1190532588772c23735ca061fd6af337a28d7650e54f66d441b3335" Oct 01 17:21:24 crc kubenswrapper[4869]: I1001 17:21:24.770545 4869 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d3b119c4f1190532588772c23735ca061fd6af337a28d7650e54f66d441b3335"} err="failed to get container status \"d3b119c4f1190532588772c23735ca061fd6af337a28d7650e54f66d441b3335\": rpc error: code = NotFound desc = could not find container \"d3b119c4f1190532588772c23735ca061fd6af337a28d7650e54f66d441b3335\": container with ID starting with d3b119c4f1190532588772c23735ca061fd6af337a28d7650e54f66d441b3335 not found: ID does not exist" Oct 01 17:21:24 crc kubenswrapper[4869]: I1001 17:21:24.770574 4869 scope.go:117] "RemoveContainer" containerID="5c9cab60d7cd09e8b0c7354ce76b7fb118292f1b252aa5c6375a60dfcc5263b2" Oct 01 17:21:24 crc kubenswrapper[4869]: E1001 17:21:24.771003 4869 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5c9cab60d7cd09e8b0c7354ce76b7fb118292f1b252aa5c6375a60dfcc5263b2\": container with ID starting with 5c9cab60d7cd09e8b0c7354ce76b7fb118292f1b252aa5c6375a60dfcc5263b2 not found: ID does not exist" containerID="5c9cab60d7cd09e8b0c7354ce76b7fb118292f1b252aa5c6375a60dfcc5263b2" Oct 01 17:21:24 crc kubenswrapper[4869]: I1001 17:21:24.771033 4869 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5c9cab60d7cd09e8b0c7354ce76b7fb118292f1b252aa5c6375a60dfcc5263b2"} err="failed to get container status \"5c9cab60d7cd09e8b0c7354ce76b7fb118292f1b252aa5c6375a60dfcc5263b2\": rpc error: code = NotFound desc = could not find container \"5c9cab60d7cd09e8b0c7354ce76b7fb118292f1b252aa5c6375a60dfcc5263b2\": container with ID starting with 5c9cab60d7cd09e8b0c7354ce76b7fb118292f1b252aa5c6375a60dfcc5263b2 not found: ID does not exist" Oct 01 17:21:24 crc kubenswrapper[4869]: I1001 17:21:24.771055 4869 scope.go:117] "RemoveContainer" containerID="b149bfb730f4d70df386265133db696fe33fa349f10350252e7c4c06b055a689" Oct 01 17:21:24 crc kubenswrapper[4869]: E1001 17:21:24.771607 4869 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b149bfb730f4d70df386265133db696fe33fa349f10350252e7c4c06b055a689\": container with ID starting with b149bfb730f4d70df386265133db696fe33fa349f10350252e7c4c06b055a689 not found: ID does not exist" containerID="b149bfb730f4d70df386265133db696fe33fa349f10350252e7c4c06b055a689" Oct 01 17:21:24 crc kubenswrapper[4869]: I1001 17:21:24.771638 4869 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b149bfb730f4d70df386265133db696fe33fa349f10350252e7c4c06b055a689"} err="failed to get container status \"b149bfb730f4d70df386265133db696fe33fa349f10350252e7c4c06b055a689\": rpc error: code = NotFound desc = could not find container \"b149bfb730f4d70df386265133db696fe33fa349f10350252e7c4c06b055a689\": container with ID starting with b149bfb730f4d70df386265133db696fe33fa349f10350252e7c4c06b055a689 not found: ID does not exist" Oct 01 17:21:25 crc kubenswrapper[4869]: I1001 17:21:25.593765 4869 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="894a5177-f062-4912-83bd-56783e2dcc11" path="/var/lib/kubelet/pods/894a5177-f062-4912-83bd-56783e2dcc11/volumes" Oct 01 17:21:57 crc kubenswrapper[4869]: E1001 17:21:57.584721 4869 kubelet_pods.go:538] "Hostname for pod was too long, truncated it" podName="test-operator-logs-pod-horizontest-horizontest-tests-horizontest" hostnameMaxLen=63 truncatedHostname="test-operator-logs-pod-horizontest-horizontest-tests-horizontes" Oct 01 17:23:13 crc kubenswrapper[4869]: I1001 17:23:13.354186 4869 patch_prober.go:28] interesting pod/machine-config-daemon-c86m8 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 01 17:23:13 crc kubenswrapper[4869]: I1001 17:23:13.355022 4869 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-c86m8" podUID="a4b64f7f-0b03-4f47-965b-9fde048b735c" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 01 17:23:19 crc kubenswrapper[4869]: I1001 17:23:19.695030 4869 scope.go:117] "RemoveContainer" containerID="6a4e5c535aef702a50f6ff6ea7f2fbde2651a1b487c1f73a04bd6f313bdc74db" Oct 01 17:23:22 crc kubenswrapper[4869]: E1001 17:23:22.581004 4869 kubelet_pods.go:538] "Hostname for pod was too long, truncated it" podName="test-operator-logs-pod-horizontest-horizontest-tests-horizontest" hostnameMaxLen=63 truncatedHostname="test-operator-logs-pod-horizontest-horizontest-tests-horizontes" Oct 01 17:23:27 crc kubenswrapper[4869]: I1001 17:23:27.039189 4869 generic.go:334] "Generic (PLEG): container finished" podID="4d51b371-a81b-4c7f-9117-3694ccd6464d" containerID="e5991c9d3c8de729fd9ecdfa1a176de7b67f9a2c32ce532088efca33f472b3c3" exitCode=0 Oct 01 17:23:27 crc kubenswrapper[4869]: I1001 17:23:27.039313 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-w7d9w/must-gather-qp92d" event={"ID":"4d51b371-a81b-4c7f-9117-3694ccd6464d","Type":"ContainerDied","Data":"e5991c9d3c8de729fd9ecdfa1a176de7b67f9a2c32ce532088efca33f472b3c3"} Oct 01 17:23:27 crc kubenswrapper[4869]: I1001 17:23:27.040413 4869 scope.go:117] "RemoveContainer" containerID="e5991c9d3c8de729fd9ecdfa1a176de7b67f9a2c32ce532088efca33f472b3c3" Oct 01 17:23:27 crc kubenswrapper[4869]: I1001 17:23:27.547110 4869 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-w7d9w_must-gather-qp92d_4d51b371-a81b-4c7f-9117-3694ccd6464d/gather/0.log" Oct 01 17:23:37 crc kubenswrapper[4869]: I1001 17:23:37.176608 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-w7d9w/must-gather-qp92d"] Oct 01 17:23:37 crc kubenswrapper[4869]: I1001 17:23:37.177482 4869 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-must-gather-w7d9w/must-gather-qp92d" podUID="4d51b371-a81b-4c7f-9117-3694ccd6464d" containerName="copy" containerID="cri-o://6625528abc4c76cdb12832e69cdf23f751deca26117085e8a20e629bfb741244" gracePeriod=2 Oct 01 17:23:37 crc kubenswrapper[4869]: I1001 17:23:37.185436 4869 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-w7d9w/must-gather-qp92d"] Oct 01 17:23:37 crc kubenswrapper[4869]: I1001 17:23:37.724501 4869 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-w7d9w_must-gather-qp92d_4d51b371-a81b-4c7f-9117-3694ccd6464d/copy/0.log" Oct 01 17:23:37 crc kubenswrapper[4869]: I1001 17:23:37.725464 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-w7d9w/must-gather-qp92d" Oct 01 17:23:37 crc kubenswrapper[4869]: I1001 17:23:37.828573 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-krdvx\" (UniqueName: \"kubernetes.io/projected/4d51b371-a81b-4c7f-9117-3694ccd6464d-kube-api-access-krdvx\") pod \"4d51b371-a81b-4c7f-9117-3694ccd6464d\" (UID: \"4d51b371-a81b-4c7f-9117-3694ccd6464d\") " Oct 01 17:23:37 crc kubenswrapper[4869]: I1001 17:23:37.828939 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/4d51b371-a81b-4c7f-9117-3694ccd6464d-must-gather-output\") pod \"4d51b371-a81b-4c7f-9117-3694ccd6464d\" (UID: \"4d51b371-a81b-4c7f-9117-3694ccd6464d\") " Oct 01 17:23:37 crc kubenswrapper[4869]: I1001 17:23:37.835046 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4d51b371-a81b-4c7f-9117-3694ccd6464d-kube-api-access-krdvx" (OuterVolumeSpecName: "kube-api-access-krdvx") pod "4d51b371-a81b-4c7f-9117-3694ccd6464d" (UID: "4d51b371-a81b-4c7f-9117-3694ccd6464d"). InnerVolumeSpecName "kube-api-access-krdvx". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 17:23:37 crc kubenswrapper[4869]: I1001 17:23:37.932038 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-krdvx\" (UniqueName: \"kubernetes.io/projected/4d51b371-a81b-4c7f-9117-3694ccd6464d-kube-api-access-krdvx\") on node \"crc\" DevicePath \"\"" Oct 01 17:23:38 crc kubenswrapper[4869]: I1001 17:23:38.059861 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4d51b371-a81b-4c7f-9117-3694ccd6464d-must-gather-output" (OuterVolumeSpecName: "must-gather-output") pod "4d51b371-a81b-4c7f-9117-3694ccd6464d" (UID: "4d51b371-a81b-4c7f-9117-3694ccd6464d"). InnerVolumeSpecName "must-gather-output". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 17:23:38 crc kubenswrapper[4869]: I1001 17:23:38.137006 4869 reconciler_common.go:293] "Volume detached for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/4d51b371-a81b-4c7f-9117-3694ccd6464d-must-gather-output\") on node \"crc\" DevicePath \"\"" Oct 01 17:23:38 crc kubenswrapper[4869]: I1001 17:23:38.184347 4869 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-w7d9w_must-gather-qp92d_4d51b371-a81b-4c7f-9117-3694ccd6464d/copy/0.log" Oct 01 17:23:38 crc kubenswrapper[4869]: I1001 17:23:38.184708 4869 generic.go:334] "Generic (PLEG): container finished" podID="4d51b371-a81b-4c7f-9117-3694ccd6464d" containerID="6625528abc4c76cdb12832e69cdf23f751deca26117085e8a20e629bfb741244" exitCode=143 Oct 01 17:23:38 crc kubenswrapper[4869]: I1001 17:23:38.184756 4869 scope.go:117] "RemoveContainer" containerID="6625528abc4c76cdb12832e69cdf23f751deca26117085e8a20e629bfb741244" Oct 01 17:23:38 crc kubenswrapper[4869]: I1001 17:23:38.184833 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-w7d9w/must-gather-qp92d" Oct 01 17:23:38 crc kubenswrapper[4869]: I1001 17:23:38.209055 4869 scope.go:117] "RemoveContainer" containerID="e5991c9d3c8de729fd9ecdfa1a176de7b67f9a2c32ce532088efca33f472b3c3" Oct 01 17:23:38 crc kubenswrapper[4869]: I1001 17:23:38.292026 4869 scope.go:117] "RemoveContainer" containerID="6625528abc4c76cdb12832e69cdf23f751deca26117085e8a20e629bfb741244" Oct 01 17:23:38 crc kubenswrapper[4869]: E1001 17:23:38.292587 4869 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6625528abc4c76cdb12832e69cdf23f751deca26117085e8a20e629bfb741244\": container with ID starting with 6625528abc4c76cdb12832e69cdf23f751deca26117085e8a20e629bfb741244 not found: ID does not exist" containerID="6625528abc4c76cdb12832e69cdf23f751deca26117085e8a20e629bfb741244" Oct 01 17:23:38 crc kubenswrapper[4869]: I1001 17:23:38.292623 4869 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6625528abc4c76cdb12832e69cdf23f751deca26117085e8a20e629bfb741244"} err="failed to get container status \"6625528abc4c76cdb12832e69cdf23f751deca26117085e8a20e629bfb741244\": rpc error: code = NotFound desc = could not find container \"6625528abc4c76cdb12832e69cdf23f751deca26117085e8a20e629bfb741244\": container with ID starting with 6625528abc4c76cdb12832e69cdf23f751deca26117085e8a20e629bfb741244 not found: ID does not exist" Oct 01 17:23:38 crc kubenswrapper[4869]: I1001 17:23:38.292646 4869 scope.go:117] "RemoveContainer" containerID="e5991c9d3c8de729fd9ecdfa1a176de7b67f9a2c32ce532088efca33f472b3c3" Oct 01 17:23:38 crc kubenswrapper[4869]: E1001 17:23:38.292958 4869 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e5991c9d3c8de729fd9ecdfa1a176de7b67f9a2c32ce532088efca33f472b3c3\": container with ID starting with e5991c9d3c8de729fd9ecdfa1a176de7b67f9a2c32ce532088efca33f472b3c3 not found: ID does not exist" containerID="e5991c9d3c8de729fd9ecdfa1a176de7b67f9a2c32ce532088efca33f472b3c3" Oct 01 17:23:38 crc kubenswrapper[4869]: I1001 17:23:38.292982 4869 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e5991c9d3c8de729fd9ecdfa1a176de7b67f9a2c32ce532088efca33f472b3c3"} err="failed to get container status \"e5991c9d3c8de729fd9ecdfa1a176de7b67f9a2c32ce532088efca33f472b3c3\": rpc error: code = NotFound desc = could not find container \"e5991c9d3c8de729fd9ecdfa1a176de7b67f9a2c32ce532088efca33f472b3c3\": container with ID starting with e5991c9d3c8de729fd9ecdfa1a176de7b67f9a2c32ce532088efca33f472b3c3 not found: ID does not exist" Oct 01 17:23:39 crc kubenswrapper[4869]: I1001 17:23:39.625920 4869 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4d51b371-a81b-4c7f-9117-3694ccd6464d" path="/var/lib/kubelet/pods/4d51b371-a81b-4c7f-9117-3694ccd6464d/volumes" Oct 01 17:23:43 crc kubenswrapper[4869]: I1001 17:23:43.354313 4869 patch_prober.go:28] interesting pod/machine-config-daemon-c86m8 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 01 17:23:43 crc kubenswrapper[4869]: I1001 17:23:43.354877 4869 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-c86m8" podUID="a4b64f7f-0b03-4f47-965b-9fde048b735c" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 01 17:23:47 crc kubenswrapper[4869]: I1001 17:23:47.028166 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-4ckm9"] Oct 01 17:23:47 crc kubenswrapper[4869]: E1001 17:23:47.030001 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="894a5177-f062-4912-83bd-56783e2dcc11" containerName="extract-content" Oct 01 17:23:47 crc kubenswrapper[4869]: I1001 17:23:47.030076 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="894a5177-f062-4912-83bd-56783e2dcc11" containerName="extract-content" Oct 01 17:23:47 crc kubenswrapper[4869]: E1001 17:23:47.030146 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="894a5177-f062-4912-83bd-56783e2dcc11" containerName="extract-utilities" Oct 01 17:23:47 crc kubenswrapper[4869]: I1001 17:23:47.030203 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="894a5177-f062-4912-83bd-56783e2dcc11" containerName="extract-utilities" Oct 01 17:23:47 crc kubenswrapper[4869]: E1001 17:23:47.030300 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bbc30cac-2bb4-4a17-8009-eff7e30369e7" containerName="extract-utilities" Oct 01 17:23:47 crc kubenswrapper[4869]: I1001 17:23:47.030384 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="bbc30cac-2bb4-4a17-8009-eff7e30369e7" containerName="extract-utilities" Oct 01 17:23:47 crc kubenswrapper[4869]: E1001 17:23:47.030471 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4d51b371-a81b-4c7f-9117-3694ccd6464d" containerName="gather" Oct 01 17:23:47 crc kubenswrapper[4869]: I1001 17:23:47.030539 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="4d51b371-a81b-4c7f-9117-3694ccd6464d" containerName="gather" Oct 01 17:23:47 crc kubenswrapper[4869]: E1001 17:23:47.030623 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="894a5177-f062-4912-83bd-56783e2dcc11" containerName="registry-server" Oct 01 17:23:47 crc kubenswrapper[4869]: I1001 17:23:47.030691 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="894a5177-f062-4912-83bd-56783e2dcc11" containerName="registry-server" Oct 01 17:23:47 crc kubenswrapper[4869]: E1001 17:23:47.030770 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bbc30cac-2bb4-4a17-8009-eff7e30369e7" containerName="registry-server" Oct 01 17:23:47 crc kubenswrapper[4869]: I1001 17:23:47.030837 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="bbc30cac-2bb4-4a17-8009-eff7e30369e7" containerName="registry-server" Oct 01 17:23:47 crc kubenswrapper[4869]: E1001 17:23:47.030956 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bbc30cac-2bb4-4a17-8009-eff7e30369e7" containerName="extract-content" Oct 01 17:23:47 crc kubenswrapper[4869]: I1001 17:23:47.031027 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="bbc30cac-2bb4-4a17-8009-eff7e30369e7" containerName="extract-content" Oct 01 17:23:47 crc kubenswrapper[4869]: E1001 17:23:47.031105 4869 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4d51b371-a81b-4c7f-9117-3694ccd6464d" containerName="copy" Oct 01 17:23:47 crc kubenswrapper[4869]: I1001 17:23:47.031175 4869 state_mem.go:107] "Deleted CPUSet assignment" podUID="4d51b371-a81b-4c7f-9117-3694ccd6464d" containerName="copy" Oct 01 17:23:47 crc kubenswrapper[4869]: I1001 17:23:47.031495 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="bbc30cac-2bb4-4a17-8009-eff7e30369e7" containerName="registry-server" Oct 01 17:23:47 crc kubenswrapper[4869]: I1001 17:23:47.031607 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="4d51b371-a81b-4c7f-9117-3694ccd6464d" containerName="copy" Oct 01 17:23:47 crc kubenswrapper[4869]: I1001 17:23:47.031697 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="4d51b371-a81b-4c7f-9117-3694ccd6464d" containerName="gather" Oct 01 17:23:47 crc kubenswrapper[4869]: I1001 17:23:47.031787 4869 memory_manager.go:354] "RemoveStaleState removing state" podUID="894a5177-f062-4912-83bd-56783e2dcc11" containerName="registry-server" Oct 01 17:23:47 crc kubenswrapper[4869]: I1001 17:23:47.033763 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-4ckm9" Oct 01 17:23:47 crc kubenswrapper[4869]: I1001 17:23:47.047563 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-4ckm9"] Oct 01 17:23:47 crc kubenswrapper[4869]: I1001 17:23:47.141877 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0f5084eb-59d8-49a8-aeda-b6a15562ff27-catalog-content\") pod \"redhat-marketplace-4ckm9\" (UID: \"0f5084eb-59d8-49a8-aeda-b6a15562ff27\") " pod="openshift-marketplace/redhat-marketplace-4ckm9" Oct 01 17:23:47 crc kubenswrapper[4869]: I1001 17:23:47.141942 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bh57p\" (UniqueName: \"kubernetes.io/projected/0f5084eb-59d8-49a8-aeda-b6a15562ff27-kube-api-access-bh57p\") pod \"redhat-marketplace-4ckm9\" (UID: \"0f5084eb-59d8-49a8-aeda-b6a15562ff27\") " pod="openshift-marketplace/redhat-marketplace-4ckm9" Oct 01 17:23:47 crc kubenswrapper[4869]: I1001 17:23:47.142015 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0f5084eb-59d8-49a8-aeda-b6a15562ff27-utilities\") pod \"redhat-marketplace-4ckm9\" (UID: \"0f5084eb-59d8-49a8-aeda-b6a15562ff27\") " pod="openshift-marketplace/redhat-marketplace-4ckm9" Oct 01 17:23:47 crc kubenswrapper[4869]: I1001 17:23:47.243371 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0f5084eb-59d8-49a8-aeda-b6a15562ff27-catalog-content\") pod \"redhat-marketplace-4ckm9\" (UID: \"0f5084eb-59d8-49a8-aeda-b6a15562ff27\") " pod="openshift-marketplace/redhat-marketplace-4ckm9" Oct 01 17:23:47 crc kubenswrapper[4869]: I1001 17:23:47.243624 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bh57p\" (UniqueName: \"kubernetes.io/projected/0f5084eb-59d8-49a8-aeda-b6a15562ff27-kube-api-access-bh57p\") pod \"redhat-marketplace-4ckm9\" (UID: \"0f5084eb-59d8-49a8-aeda-b6a15562ff27\") " pod="openshift-marketplace/redhat-marketplace-4ckm9" Oct 01 17:23:47 crc kubenswrapper[4869]: I1001 17:23:47.243689 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0f5084eb-59d8-49a8-aeda-b6a15562ff27-utilities\") pod \"redhat-marketplace-4ckm9\" (UID: \"0f5084eb-59d8-49a8-aeda-b6a15562ff27\") " pod="openshift-marketplace/redhat-marketplace-4ckm9" Oct 01 17:23:47 crc kubenswrapper[4869]: I1001 17:23:47.244405 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0f5084eb-59d8-49a8-aeda-b6a15562ff27-utilities\") pod \"redhat-marketplace-4ckm9\" (UID: \"0f5084eb-59d8-49a8-aeda-b6a15562ff27\") " pod="openshift-marketplace/redhat-marketplace-4ckm9" Oct 01 17:23:47 crc kubenswrapper[4869]: I1001 17:23:47.244461 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0f5084eb-59d8-49a8-aeda-b6a15562ff27-catalog-content\") pod \"redhat-marketplace-4ckm9\" (UID: \"0f5084eb-59d8-49a8-aeda-b6a15562ff27\") " pod="openshift-marketplace/redhat-marketplace-4ckm9" Oct 01 17:23:47 crc kubenswrapper[4869]: I1001 17:23:47.264173 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bh57p\" (UniqueName: \"kubernetes.io/projected/0f5084eb-59d8-49a8-aeda-b6a15562ff27-kube-api-access-bh57p\") pod \"redhat-marketplace-4ckm9\" (UID: \"0f5084eb-59d8-49a8-aeda-b6a15562ff27\") " pod="openshift-marketplace/redhat-marketplace-4ckm9" Oct 01 17:23:47 crc kubenswrapper[4869]: I1001 17:23:47.358302 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-4ckm9" Oct 01 17:23:48 crc kubenswrapper[4869]: I1001 17:23:48.057150 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-4ckm9"] Oct 01 17:23:48 crc kubenswrapper[4869]: I1001 17:23:48.283981 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-4ckm9" event={"ID":"0f5084eb-59d8-49a8-aeda-b6a15562ff27","Type":"ContainerStarted","Data":"ee0fef0bc56edefa284c3d8c0822b3bbb8201846628d69e50aebcebbba160942"} Oct 01 17:23:49 crc kubenswrapper[4869]: I1001 17:23:49.298391 4869 generic.go:334] "Generic (PLEG): container finished" podID="0f5084eb-59d8-49a8-aeda-b6a15562ff27" containerID="d32bb6f297a309c4c28fece6f6c50c214b38d43324f0a1fdb4db8db7d28c05e2" exitCode=0 Oct 01 17:23:49 crc kubenswrapper[4869]: I1001 17:23:49.299129 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-4ckm9" event={"ID":"0f5084eb-59d8-49a8-aeda-b6a15562ff27","Type":"ContainerDied","Data":"d32bb6f297a309c4c28fece6f6c50c214b38d43324f0a1fdb4db8db7d28c05e2"} Oct 01 17:23:50 crc kubenswrapper[4869]: I1001 17:23:50.214076 4869 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-hb4xl"] Oct 01 17:23:50 crc kubenswrapper[4869]: I1001 17:23:50.217800 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-hb4xl" Oct 01 17:23:50 crc kubenswrapper[4869]: I1001 17:23:50.270221 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-hb4xl"] Oct 01 17:23:50 crc kubenswrapper[4869]: I1001 17:23:50.311524 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/423c68ed-a313-46b3-a532-41a1e8175ac7-utilities\") pod \"certified-operators-hb4xl\" (UID: \"423c68ed-a313-46b3-a532-41a1e8175ac7\") " pod="openshift-marketplace/certified-operators-hb4xl" Oct 01 17:23:50 crc kubenswrapper[4869]: I1001 17:23:50.311618 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/423c68ed-a313-46b3-a532-41a1e8175ac7-catalog-content\") pod \"certified-operators-hb4xl\" (UID: \"423c68ed-a313-46b3-a532-41a1e8175ac7\") " pod="openshift-marketplace/certified-operators-hb4xl" Oct 01 17:23:50 crc kubenswrapper[4869]: I1001 17:23:50.311780 4869 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zn2wf\" (UniqueName: \"kubernetes.io/projected/423c68ed-a313-46b3-a532-41a1e8175ac7-kube-api-access-zn2wf\") pod \"certified-operators-hb4xl\" (UID: \"423c68ed-a313-46b3-a532-41a1e8175ac7\") " pod="openshift-marketplace/certified-operators-hb4xl" Oct 01 17:23:50 crc kubenswrapper[4869]: I1001 17:23:50.413777 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/423c68ed-a313-46b3-a532-41a1e8175ac7-utilities\") pod \"certified-operators-hb4xl\" (UID: \"423c68ed-a313-46b3-a532-41a1e8175ac7\") " pod="openshift-marketplace/certified-operators-hb4xl" Oct 01 17:23:50 crc kubenswrapper[4869]: I1001 17:23:50.414092 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/423c68ed-a313-46b3-a532-41a1e8175ac7-catalog-content\") pod \"certified-operators-hb4xl\" (UID: \"423c68ed-a313-46b3-a532-41a1e8175ac7\") " pod="openshift-marketplace/certified-operators-hb4xl" Oct 01 17:23:50 crc kubenswrapper[4869]: I1001 17:23:50.414358 4869 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zn2wf\" (UniqueName: \"kubernetes.io/projected/423c68ed-a313-46b3-a532-41a1e8175ac7-kube-api-access-zn2wf\") pod \"certified-operators-hb4xl\" (UID: \"423c68ed-a313-46b3-a532-41a1e8175ac7\") " pod="openshift-marketplace/certified-operators-hb4xl" Oct 01 17:23:50 crc kubenswrapper[4869]: I1001 17:23:50.414554 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/423c68ed-a313-46b3-a532-41a1e8175ac7-utilities\") pod \"certified-operators-hb4xl\" (UID: \"423c68ed-a313-46b3-a532-41a1e8175ac7\") " pod="openshift-marketplace/certified-operators-hb4xl" Oct 01 17:23:50 crc kubenswrapper[4869]: I1001 17:23:50.414589 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/423c68ed-a313-46b3-a532-41a1e8175ac7-catalog-content\") pod \"certified-operators-hb4xl\" (UID: \"423c68ed-a313-46b3-a532-41a1e8175ac7\") " pod="openshift-marketplace/certified-operators-hb4xl" Oct 01 17:23:50 crc kubenswrapper[4869]: I1001 17:23:50.443136 4869 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zn2wf\" (UniqueName: \"kubernetes.io/projected/423c68ed-a313-46b3-a532-41a1e8175ac7-kube-api-access-zn2wf\") pod \"certified-operators-hb4xl\" (UID: \"423c68ed-a313-46b3-a532-41a1e8175ac7\") " pod="openshift-marketplace/certified-operators-hb4xl" Oct 01 17:23:50 crc kubenswrapper[4869]: I1001 17:23:50.547700 4869 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-hb4xl" Oct 01 17:23:51 crc kubenswrapper[4869]: I1001 17:23:51.069865 4869 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-hb4xl"] Oct 01 17:23:51 crc kubenswrapper[4869]: I1001 17:23:51.318572 4869 generic.go:334] "Generic (PLEG): container finished" podID="423c68ed-a313-46b3-a532-41a1e8175ac7" containerID="af756133d43fb6644d22fae51c968ed5e9af5cc0a9322b3f1ee9c56f530790ca" exitCode=0 Oct 01 17:23:51 crc kubenswrapper[4869]: I1001 17:23:51.318619 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-hb4xl" event={"ID":"423c68ed-a313-46b3-a532-41a1e8175ac7","Type":"ContainerDied","Data":"af756133d43fb6644d22fae51c968ed5e9af5cc0a9322b3f1ee9c56f530790ca"} Oct 01 17:23:51 crc kubenswrapper[4869]: I1001 17:23:51.318664 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-hb4xl" event={"ID":"423c68ed-a313-46b3-a532-41a1e8175ac7","Type":"ContainerStarted","Data":"b8b3294bf729cd429f0a96df5636cf0eafa215006db2a1cc0fa699d4c451954c"} Oct 01 17:23:51 crc kubenswrapper[4869]: I1001 17:23:51.320799 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-4ckm9" event={"ID":"0f5084eb-59d8-49a8-aeda-b6a15562ff27","Type":"ContainerStarted","Data":"9abb0c9c334417c214999e800099de84ae70851bef60025911c169488dc2b2d8"} Oct 01 17:23:52 crc kubenswrapper[4869]: I1001 17:23:52.332763 4869 generic.go:334] "Generic (PLEG): container finished" podID="0f5084eb-59d8-49a8-aeda-b6a15562ff27" containerID="9abb0c9c334417c214999e800099de84ae70851bef60025911c169488dc2b2d8" exitCode=0 Oct 01 17:23:52 crc kubenswrapper[4869]: I1001 17:23:52.333116 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-4ckm9" event={"ID":"0f5084eb-59d8-49a8-aeda-b6a15562ff27","Type":"ContainerDied","Data":"9abb0c9c334417c214999e800099de84ae70851bef60025911c169488dc2b2d8"} Oct 01 17:23:53 crc kubenswrapper[4869]: I1001 17:23:53.342503 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-hb4xl" event={"ID":"423c68ed-a313-46b3-a532-41a1e8175ac7","Type":"ContainerStarted","Data":"72cdb3da9cc4459fde02ab27ee1c8fb075dae9ab253c5a2527de8021cc4e313c"} Oct 01 17:23:53 crc kubenswrapper[4869]: I1001 17:23:53.345214 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-4ckm9" event={"ID":"0f5084eb-59d8-49a8-aeda-b6a15562ff27","Type":"ContainerStarted","Data":"4338961ee878bf3d4934e91f30e28102ea009a37c0cd6918e65138ad85108bb6"} Oct 01 17:23:53 crc kubenswrapper[4869]: I1001 17:23:53.384059 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-4ckm9" podStartSLOduration=3.875234636 podStartE2EDuration="7.38403918s" podCreationTimestamp="2025-10-01 17:23:46 +0000 UTC" firstStartedPulling="2025-10-01 17:23:49.301745934 +0000 UTC m=+8338.448589050" lastFinishedPulling="2025-10-01 17:23:52.810550478 +0000 UTC m=+8341.957393594" observedRunningTime="2025-10-01 17:23:53.379809563 +0000 UTC m=+8342.526652679" watchObservedRunningTime="2025-10-01 17:23:53.38403918 +0000 UTC m=+8342.530882296" Oct 01 17:23:54 crc kubenswrapper[4869]: I1001 17:23:54.362954 4869 generic.go:334] "Generic (PLEG): container finished" podID="423c68ed-a313-46b3-a532-41a1e8175ac7" containerID="72cdb3da9cc4459fde02ab27ee1c8fb075dae9ab253c5a2527de8021cc4e313c" exitCode=0 Oct 01 17:23:54 crc kubenswrapper[4869]: I1001 17:23:54.363371 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-hb4xl" event={"ID":"423c68ed-a313-46b3-a532-41a1e8175ac7","Type":"ContainerDied","Data":"72cdb3da9cc4459fde02ab27ee1c8fb075dae9ab253c5a2527de8021cc4e313c"} Oct 01 17:23:55 crc kubenswrapper[4869]: I1001 17:23:55.381427 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-hb4xl" event={"ID":"423c68ed-a313-46b3-a532-41a1e8175ac7","Type":"ContainerStarted","Data":"b5db4f9c3c19cf6a4eadeae070ed94b6f51e04bb389ffb85cd6a4386db95e2a9"} Oct 01 17:23:55 crc kubenswrapper[4869]: I1001 17:23:55.419347 4869 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-hb4xl" podStartSLOduration=1.940816726 podStartE2EDuration="5.419322385s" podCreationTimestamp="2025-10-01 17:23:50 +0000 UTC" firstStartedPulling="2025-10-01 17:23:51.321331403 +0000 UTC m=+8340.468174519" lastFinishedPulling="2025-10-01 17:23:54.799837032 +0000 UTC m=+8343.946680178" observedRunningTime="2025-10-01 17:23:55.411045426 +0000 UTC m=+8344.557888562" watchObservedRunningTime="2025-10-01 17:23:55.419322385 +0000 UTC m=+8344.566165531" Oct 01 17:23:57 crc kubenswrapper[4869]: I1001 17:23:57.359317 4869 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-4ckm9" Oct 01 17:23:57 crc kubenswrapper[4869]: I1001 17:23:57.360412 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-4ckm9" Oct 01 17:23:57 crc kubenswrapper[4869]: I1001 17:23:57.418758 4869 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-4ckm9" Oct 01 17:23:58 crc kubenswrapper[4869]: I1001 17:23:58.461672 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-4ckm9" Oct 01 17:23:59 crc kubenswrapper[4869]: I1001 17:23:59.208069 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-4ckm9"] Oct 01 17:24:00 crc kubenswrapper[4869]: I1001 17:24:00.423169 4869 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-4ckm9" podUID="0f5084eb-59d8-49a8-aeda-b6a15562ff27" containerName="registry-server" containerID="cri-o://4338961ee878bf3d4934e91f30e28102ea009a37c0cd6918e65138ad85108bb6" gracePeriod=2 Oct 01 17:24:00 crc kubenswrapper[4869]: I1001 17:24:00.548686 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-hb4xl" Oct 01 17:24:00 crc kubenswrapper[4869]: I1001 17:24:00.548968 4869 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-hb4xl" Oct 01 17:24:00 crc kubenswrapper[4869]: I1001 17:24:00.611996 4869 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-hb4xl" Oct 01 17:24:00 crc kubenswrapper[4869]: I1001 17:24:00.908055 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-4ckm9" Oct 01 17:24:01 crc kubenswrapper[4869]: I1001 17:24:01.067302 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0f5084eb-59d8-49a8-aeda-b6a15562ff27-utilities\") pod \"0f5084eb-59d8-49a8-aeda-b6a15562ff27\" (UID: \"0f5084eb-59d8-49a8-aeda-b6a15562ff27\") " Oct 01 17:24:01 crc kubenswrapper[4869]: I1001 17:24:01.067984 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bh57p\" (UniqueName: \"kubernetes.io/projected/0f5084eb-59d8-49a8-aeda-b6a15562ff27-kube-api-access-bh57p\") pod \"0f5084eb-59d8-49a8-aeda-b6a15562ff27\" (UID: \"0f5084eb-59d8-49a8-aeda-b6a15562ff27\") " Oct 01 17:24:01 crc kubenswrapper[4869]: I1001 17:24:01.068215 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0f5084eb-59d8-49a8-aeda-b6a15562ff27-catalog-content\") pod \"0f5084eb-59d8-49a8-aeda-b6a15562ff27\" (UID: \"0f5084eb-59d8-49a8-aeda-b6a15562ff27\") " Oct 01 17:24:01 crc kubenswrapper[4869]: I1001 17:24:01.068750 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0f5084eb-59d8-49a8-aeda-b6a15562ff27-utilities" (OuterVolumeSpecName: "utilities") pod "0f5084eb-59d8-49a8-aeda-b6a15562ff27" (UID: "0f5084eb-59d8-49a8-aeda-b6a15562ff27"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 17:24:01 crc kubenswrapper[4869]: I1001 17:24:01.076688 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0f5084eb-59d8-49a8-aeda-b6a15562ff27-kube-api-access-bh57p" (OuterVolumeSpecName: "kube-api-access-bh57p") pod "0f5084eb-59d8-49a8-aeda-b6a15562ff27" (UID: "0f5084eb-59d8-49a8-aeda-b6a15562ff27"). InnerVolumeSpecName "kube-api-access-bh57p". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 17:24:01 crc kubenswrapper[4869]: I1001 17:24:01.083344 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0f5084eb-59d8-49a8-aeda-b6a15562ff27-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "0f5084eb-59d8-49a8-aeda-b6a15562ff27" (UID: "0f5084eb-59d8-49a8-aeda-b6a15562ff27"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 17:24:01 crc kubenswrapper[4869]: I1001 17:24:01.172454 4869 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0f5084eb-59d8-49a8-aeda-b6a15562ff27-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 01 17:24:01 crc kubenswrapper[4869]: I1001 17:24:01.172506 4869 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0f5084eb-59d8-49a8-aeda-b6a15562ff27-utilities\") on node \"crc\" DevicePath \"\"" Oct 01 17:24:01 crc kubenswrapper[4869]: I1001 17:24:01.172518 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bh57p\" (UniqueName: \"kubernetes.io/projected/0f5084eb-59d8-49a8-aeda-b6a15562ff27-kube-api-access-bh57p\") on node \"crc\" DevicePath \"\"" Oct 01 17:24:01 crc kubenswrapper[4869]: I1001 17:24:01.434379 4869 generic.go:334] "Generic (PLEG): container finished" podID="0f5084eb-59d8-49a8-aeda-b6a15562ff27" containerID="4338961ee878bf3d4934e91f30e28102ea009a37c0cd6918e65138ad85108bb6" exitCode=0 Oct 01 17:24:01 crc kubenswrapper[4869]: I1001 17:24:01.434469 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-4ckm9" Oct 01 17:24:01 crc kubenswrapper[4869]: I1001 17:24:01.434513 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-4ckm9" event={"ID":"0f5084eb-59d8-49a8-aeda-b6a15562ff27","Type":"ContainerDied","Data":"4338961ee878bf3d4934e91f30e28102ea009a37c0cd6918e65138ad85108bb6"} Oct 01 17:24:01 crc kubenswrapper[4869]: I1001 17:24:01.436571 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-4ckm9" event={"ID":"0f5084eb-59d8-49a8-aeda-b6a15562ff27","Type":"ContainerDied","Data":"ee0fef0bc56edefa284c3d8c0822b3bbb8201846628d69e50aebcebbba160942"} Oct 01 17:24:01 crc kubenswrapper[4869]: I1001 17:24:01.436617 4869 scope.go:117] "RemoveContainer" containerID="4338961ee878bf3d4934e91f30e28102ea009a37c0cd6918e65138ad85108bb6" Oct 01 17:24:01 crc kubenswrapper[4869]: I1001 17:24:01.459406 4869 scope.go:117] "RemoveContainer" containerID="9abb0c9c334417c214999e800099de84ae70851bef60025911c169488dc2b2d8" Oct 01 17:24:01 crc kubenswrapper[4869]: I1001 17:24:01.501703 4869 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-hb4xl" Oct 01 17:24:01 crc kubenswrapper[4869]: I1001 17:24:01.504880 4869 scope.go:117] "RemoveContainer" containerID="d32bb6f297a309c4c28fece6f6c50c214b38d43324f0a1fdb4db8db7d28c05e2" Oct 01 17:24:01 crc kubenswrapper[4869]: I1001 17:24:01.505047 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-4ckm9"] Oct 01 17:24:01 crc kubenswrapper[4869]: I1001 17:24:01.517746 4869 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-4ckm9"] Oct 01 17:24:01 crc kubenswrapper[4869]: I1001 17:24:01.558011 4869 scope.go:117] "RemoveContainer" containerID="4338961ee878bf3d4934e91f30e28102ea009a37c0cd6918e65138ad85108bb6" Oct 01 17:24:01 crc kubenswrapper[4869]: E1001 17:24:01.558728 4869 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4338961ee878bf3d4934e91f30e28102ea009a37c0cd6918e65138ad85108bb6\": container with ID starting with 4338961ee878bf3d4934e91f30e28102ea009a37c0cd6918e65138ad85108bb6 not found: ID does not exist" containerID="4338961ee878bf3d4934e91f30e28102ea009a37c0cd6918e65138ad85108bb6" Oct 01 17:24:01 crc kubenswrapper[4869]: I1001 17:24:01.558783 4869 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4338961ee878bf3d4934e91f30e28102ea009a37c0cd6918e65138ad85108bb6"} err="failed to get container status \"4338961ee878bf3d4934e91f30e28102ea009a37c0cd6918e65138ad85108bb6\": rpc error: code = NotFound desc = could not find container \"4338961ee878bf3d4934e91f30e28102ea009a37c0cd6918e65138ad85108bb6\": container with ID starting with 4338961ee878bf3d4934e91f30e28102ea009a37c0cd6918e65138ad85108bb6 not found: ID does not exist" Oct 01 17:24:01 crc kubenswrapper[4869]: I1001 17:24:01.558816 4869 scope.go:117] "RemoveContainer" containerID="9abb0c9c334417c214999e800099de84ae70851bef60025911c169488dc2b2d8" Oct 01 17:24:01 crc kubenswrapper[4869]: E1001 17:24:01.559107 4869 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9abb0c9c334417c214999e800099de84ae70851bef60025911c169488dc2b2d8\": container with ID starting with 9abb0c9c334417c214999e800099de84ae70851bef60025911c169488dc2b2d8 not found: ID does not exist" containerID="9abb0c9c334417c214999e800099de84ae70851bef60025911c169488dc2b2d8" Oct 01 17:24:01 crc kubenswrapper[4869]: I1001 17:24:01.559279 4869 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9abb0c9c334417c214999e800099de84ae70851bef60025911c169488dc2b2d8"} err="failed to get container status \"9abb0c9c334417c214999e800099de84ae70851bef60025911c169488dc2b2d8\": rpc error: code = NotFound desc = could not find container \"9abb0c9c334417c214999e800099de84ae70851bef60025911c169488dc2b2d8\": container with ID starting with 9abb0c9c334417c214999e800099de84ae70851bef60025911c169488dc2b2d8 not found: ID does not exist" Oct 01 17:24:01 crc kubenswrapper[4869]: I1001 17:24:01.559377 4869 scope.go:117] "RemoveContainer" containerID="d32bb6f297a309c4c28fece6f6c50c214b38d43324f0a1fdb4db8db7d28c05e2" Oct 01 17:24:01 crc kubenswrapper[4869]: E1001 17:24:01.559946 4869 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d32bb6f297a309c4c28fece6f6c50c214b38d43324f0a1fdb4db8db7d28c05e2\": container with ID starting with d32bb6f297a309c4c28fece6f6c50c214b38d43324f0a1fdb4db8db7d28c05e2 not found: ID does not exist" containerID="d32bb6f297a309c4c28fece6f6c50c214b38d43324f0a1fdb4db8db7d28c05e2" Oct 01 17:24:01 crc kubenswrapper[4869]: I1001 17:24:01.560034 4869 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d32bb6f297a309c4c28fece6f6c50c214b38d43324f0a1fdb4db8db7d28c05e2"} err="failed to get container status \"d32bb6f297a309c4c28fece6f6c50c214b38d43324f0a1fdb4db8db7d28c05e2\": rpc error: code = NotFound desc = could not find container \"d32bb6f297a309c4c28fece6f6c50c214b38d43324f0a1fdb4db8db7d28c05e2\": container with ID starting with d32bb6f297a309c4c28fece6f6c50c214b38d43324f0a1fdb4db8db7d28c05e2 not found: ID does not exist" Oct 01 17:24:01 crc kubenswrapper[4869]: I1001 17:24:01.595885 4869 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0f5084eb-59d8-49a8-aeda-b6a15562ff27" path="/var/lib/kubelet/pods/0f5084eb-59d8-49a8-aeda-b6a15562ff27/volumes" Oct 01 17:24:02 crc kubenswrapper[4869]: I1001 17:24:02.211340 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-hb4xl"] Oct 01 17:24:03 crc kubenswrapper[4869]: I1001 17:24:03.454533 4869 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-hb4xl" podUID="423c68ed-a313-46b3-a532-41a1e8175ac7" containerName="registry-server" containerID="cri-o://b5db4f9c3c19cf6a4eadeae070ed94b6f51e04bb389ffb85cd6a4386db95e2a9" gracePeriod=2 Oct 01 17:24:03 crc kubenswrapper[4869]: I1001 17:24:03.965308 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-hb4xl" Oct 01 17:24:04 crc kubenswrapper[4869]: I1001 17:24:04.041420 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/423c68ed-a313-46b3-a532-41a1e8175ac7-catalog-content\") pod \"423c68ed-a313-46b3-a532-41a1e8175ac7\" (UID: \"423c68ed-a313-46b3-a532-41a1e8175ac7\") " Oct 01 17:24:04 crc kubenswrapper[4869]: I1001 17:24:04.041517 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zn2wf\" (UniqueName: \"kubernetes.io/projected/423c68ed-a313-46b3-a532-41a1e8175ac7-kube-api-access-zn2wf\") pod \"423c68ed-a313-46b3-a532-41a1e8175ac7\" (UID: \"423c68ed-a313-46b3-a532-41a1e8175ac7\") " Oct 01 17:24:04 crc kubenswrapper[4869]: I1001 17:24:04.041608 4869 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/423c68ed-a313-46b3-a532-41a1e8175ac7-utilities\") pod \"423c68ed-a313-46b3-a532-41a1e8175ac7\" (UID: \"423c68ed-a313-46b3-a532-41a1e8175ac7\") " Oct 01 17:24:04 crc kubenswrapper[4869]: I1001 17:24:04.042941 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/423c68ed-a313-46b3-a532-41a1e8175ac7-utilities" (OuterVolumeSpecName: "utilities") pod "423c68ed-a313-46b3-a532-41a1e8175ac7" (UID: "423c68ed-a313-46b3-a532-41a1e8175ac7"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 17:24:04 crc kubenswrapper[4869]: I1001 17:24:04.048499 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/423c68ed-a313-46b3-a532-41a1e8175ac7-kube-api-access-zn2wf" (OuterVolumeSpecName: "kube-api-access-zn2wf") pod "423c68ed-a313-46b3-a532-41a1e8175ac7" (UID: "423c68ed-a313-46b3-a532-41a1e8175ac7"). InnerVolumeSpecName "kube-api-access-zn2wf". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 17:24:04 crc kubenswrapper[4869]: I1001 17:24:04.144037 4869 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zn2wf\" (UniqueName: \"kubernetes.io/projected/423c68ed-a313-46b3-a532-41a1e8175ac7-kube-api-access-zn2wf\") on node \"crc\" DevicePath \"\"" Oct 01 17:24:04 crc kubenswrapper[4869]: I1001 17:24:04.144073 4869 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/423c68ed-a313-46b3-a532-41a1e8175ac7-utilities\") on node \"crc\" DevicePath \"\"" Oct 01 17:24:04 crc kubenswrapper[4869]: I1001 17:24:04.381478 4869 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/423c68ed-a313-46b3-a532-41a1e8175ac7-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "423c68ed-a313-46b3-a532-41a1e8175ac7" (UID: "423c68ed-a313-46b3-a532-41a1e8175ac7"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 17:24:04 crc kubenswrapper[4869]: I1001 17:24:04.451524 4869 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/423c68ed-a313-46b3-a532-41a1e8175ac7-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 01 17:24:04 crc kubenswrapper[4869]: I1001 17:24:04.473316 4869 generic.go:334] "Generic (PLEG): container finished" podID="423c68ed-a313-46b3-a532-41a1e8175ac7" containerID="b5db4f9c3c19cf6a4eadeae070ed94b6f51e04bb389ffb85cd6a4386db95e2a9" exitCode=0 Oct 01 17:24:04 crc kubenswrapper[4869]: I1001 17:24:04.473422 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-hb4xl" event={"ID":"423c68ed-a313-46b3-a532-41a1e8175ac7","Type":"ContainerDied","Data":"b5db4f9c3c19cf6a4eadeae070ed94b6f51e04bb389ffb85cd6a4386db95e2a9"} Oct 01 17:24:04 crc kubenswrapper[4869]: I1001 17:24:04.473842 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-hb4xl" event={"ID":"423c68ed-a313-46b3-a532-41a1e8175ac7","Type":"ContainerDied","Data":"b8b3294bf729cd429f0a96df5636cf0eafa215006db2a1cc0fa699d4c451954c"} Oct 01 17:24:04 crc kubenswrapper[4869]: I1001 17:24:04.473891 4869 scope.go:117] "RemoveContainer" containerID="b5db4f9c3c19cf6a4eadeae070ed94b6f51e04bb389ffb85cd6a4386db95e2a9" Oct 01 17:24:04 crc kubenswrapper[4869]: I1001 17:24:04.473463 4869 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-hb4xl" Oct 01 17:24:04 crc kubenswrapper[4869]: I1001 17:24:04.509055 4869 scope.go:117] "RemoveContainer" containerID="72cdb3da9cc4459fde02ab27ee1c8fb075dae9ab253c5a2527de8021cc4e313c" Oct 01 17:24:04 crc kubenswrapper[4869]: I1001 17:24:04.541377 4869 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-hb4xl"] Oct 01 17:24:04 crc kubenswrapper[4869]: I1001 17:24:04.555708 4869 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-hb4xl"] Oct 01 17:24:04 crc kubenswrapper[4869]: I1001 17:24:04.563975 4869 scope.go:117] "RemoveContainer" containerID="af756133d43fb6644d22fae51c968ed5e9af5cc0a9322b3f1ee9c56f530790ca" Oct 01 17:24:04 crc kubenswrapper[4869]: I1001 17:24:04.611696 4869 scope.go:117] "RemoveContainer" containerID="b5db4f9c3c19cf6a4eadeae070ed94b6f51e04bb389ffb85cd6a4386db95e2a9" Oct 01 17:24:04 crc kubenswrapper[4869]: E1001 17:24:04.612203 4869 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b5db4f9c3c19cf6a4eadeae070ed94b6f51e04bb389ffb85cd6a4386db95e2a9\": container with ID starting with b5db4f9c3c19cf6a4eadeae070ed94b6f51e04bb389ffb85cd6a4386db95e2a9 not found: ID does not exist" containerID="b5db4f9c3c19cf6a4eadeae070ed94b6f51e04bb389ffb85cd6a4386db95e2a9" Oct 01 17:24:04 crc kubenswrapper[4869]: I1001 17:24:04.612271 4869 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b5db4f9c3c19cf6a4eadeae070ed94b6f51e04bb389ffb85cd6a4386db95e2a9"} err="failed to get container status \"b5db4f9c3c19cf6a4eadeae070ed94b6f51e04bb389ffb85cd6a4386db95e2a9\": rpc error: code = NotFound desc = could not find container \"b5db4f9c3c19cf6a4eadeae070ed94b6f51e04bb389ffb85cd6a4386db95e2a9\": container with ID starting with b5db4f9c3c19cf6a4eadeae070ed94b6f51e04bb389ffb85cd6a4386db95e2a9 not found: ID does not exist" Oct 01 17:24:04 crc kubenswrapper[4869]: I1001 17:24:04.612306 4869 scope.go:117] "RemoveContainer" containerID="72cdb3da9cc4459fde02ab27ee1c8fb075dae9ab253c5a2527de8021cc4e313c" Oct 01 17:24:04 crc kubenswrapper[4869]: E1001 17:24:04.612833 4869 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"72cdb3da9cc4459fde02ab27ee1c8fb075dae9ab253c5a2527de8021cc4e313c\": container with ID starting with 72cdb3da9cc4459fde02ab27ee1c8fb075dae9ab253c5a2527de8021cc4e313c not found: ID does not exist" containerID="72cdb3da9cc4459fde02ab27ee1c8fb075dae9ab253c5a2527de8021cc4e313c" Oct 01 17:24:04 crc kubenswrapper[4869]: I1001 17:24:04.612890 4869 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"72cdb3da9cc4459fde02ab27ee1c8fb075dae9ab253c5a2527de8021cc4e313c"} err="failed to get container status \"72cdb3da9cc4459fde02ab27ee1c8fb075dae9ab253c5a2527de8021cc4e313c\": rpc error: code = NotFound desc = could not find container \"72cdb3da9cc4459fde02ab27ee1c8fb075dae9ab253c5a2527de8021cc4e313c\": container with ID starting with 72cdb3da9cc4459fde02ab27ee1c8fb075dae9ab253c5a2527de8021cc4e313c not found: ID does not exist" Oct 01 17:24:04 crc kubenswrapper[4869]: I1001 17:24:04.612919 4869 scope.go:117] "RemoveContainer" containerID="af756133d43fb6644d22fae51c968ed5e9af5cc0a9322b3f1ee9c56f530790ca" Oct 01 17:24:04 crc kubenswrapper[4869]: E1001 17:24:04.613469 4869 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"af756133d43fb6644d22fae51c968ed5e9af5cc0a9322b3f1ee9c56f530790ca\": container with ID starting with af756133d43fb6644d22fae51c968ed5e9af5cc0a9322b3f1ee9c56f530790ca not found: ID does not exist" containerID="af756133d43fb6644d22fae51c968ed5e9af5cc0a9322b3f1ee9c56f530790ca" Oct 01 17:24:04 crc kubenswrapper[4869]: I1001 17:24:04.613579 4869 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"af756133d43fb6644d22fae51c968ed5e9af5cc0a9322b3f1ee9c56f530790ca"} err="failed to get container status \"af756133d43fb6644d22fae51c968ed5e9af5cc0a9322b3f1ee9c56f530790ca\": rpc error: code = NotFound desc = could not find container \"af756133d43fb6644d22fae51c968ed5e9af5cc0a9322b3f1ee9c56f530790ca\": container with ID starting with af756133d43fb6644d22fae51c968ed5e9af5cc0a9322b3f1ee9c56f530790ca not found: ID does not exist" Oct 01 17:24:05 crc kubenswrapper[4869]: I1001 17:24:05.595905 4869 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="423c68ed-a313-46b3-a532-41a1e8175ac7" path="/var/lib/kubelet/pods/423c68ed-a313-46b3-a532-41a1e8175ac7/volumes" Oct 01 17:24:13 crc kubenswrapper[4869]: I1001 17:24:13.354305 4869 patch_prober.go:28] interesting pod/machine-config-daemon-c86m8 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 01 17:24:13 crc kubenswrapper[4869]: I1001 17:24:13.354805 4869 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-c86m8" podUID="a4b64f7f-0b03-4f47-965b-9fde048b735c" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 01 17:24:13 crc kubenswrapper[4869]: I1001 17:24:13.354854 4869 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-c86m8" Oct 01 17:24:13 crc kubenswrapper[4869]: I1001 17:24:13.355612 4869 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"5c5081815445528475a924b08879ebd33f35107feee51b5609a739db117a7ead"} pod="openshift-machine-config-operator/machine-config-daemon-c86m8" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Oct 01 17:24:13 crc kubenswrapper[4869]: I1001 17:24:13.355669 4869 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-c86m8" podUID="a4b64f7f-0b03-4f47-965b-9fde048b735c" containerName="machine-config-daemon" containerID="cri-o://5c5081815445528475a924b08879ebd33f35107feee51b5609a739db117a7ead" gracePeriod=600 Oct 01 17:24:13 crc kubenswrapper[4869]: E1001 17:24:13.476351 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-c86m8_openshift-machine-config-operator(a4b64f7f-0b03-4f47-965b-9fde048b735c)\"" pod="openshift-machine-config-operator/machine-config-daemon-c86m8" podUID="a4b64f7f-0b03-4f47-965b-9fde048b735c" Oct 01 17:24:13 crc kubenswrapper[4869]: I1001 17:24:13.593725 4869 generic.go:334] "Generic (PLEG): container finished" podID="a4b64f7f-0b03-4f47-965b-9fde048b735c" containerID="5c5081815445528475a924b08879ebd33f35107feee51b5609a739db117a7ead" exitCode=0 Oct 01 17:24:13 crc kubenswrapper[4869]: I1001 17:24:13.597520 4869 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-c86m8" event={"ID":"a4b64f7f-0b03-4f47-965b-9fde048b735c","Type":"ContainerDied","Data":"5c5081815445528475a924b08879ebd33f35107feee51b5609a739db117a7ead"} Oct 01 17:24:13 crc kubenswrapper[4869]: I1001 17:24:13.597613 4869 scope.go:117] "RemoveContainer" containerID="244ffa7b3483f330573b185ecbcd30b8ee2fd9a9414df3e81c3e654d5578f87a" Oct 01 17:24:13 crc kubenswrapper[4869]: I1001 17:24:13.598722 4869 scope.go:117] "RemoveContainer" containerID="5c5081815445528475a924b08879ebd33f35107feee51b5609a739db117a7ead" Oct 01 17:24:13 crc kubenswrapper[4869]: E1001 17:24:13.599386 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-c86m8_openshift-machine-config-operator(a4b64f7f-0b03-4f47-965b-9fde048b735c)\"" pod="openshift-machine-config-operator/machine-config-daemon-c86m8" podUID="a4b64f7f-0b03-4f47-965b-9fde048b735c" Oct 01 17:24:26 crc kubenswrapper[4869]: I1001 17:24:26.581961 4869 scope.go:117] "RemoveContainer" containerID="5c5081815445528475a924b08879ebd33f35107feee51b5609a739db117a7ead" Oct 01 17:24:26 crc kubenswrapper[4869]: E1001 17:24:26.582980 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-c86m8_openshift-machine-config-operator(a4b64f7f-0b03-4f47-965b-9fde048b735c)\"" pod="openshift-machine-config-operator/machine-config-daemon-c86m8" podUID="a4b64f7f-0b03-4f47-965b-9fde048b735c" Oct 01 17:24:30 crc kubenswrapper[4869]: E1001 17:24:30.581672 4869 kubelet_pods.go:538] "Hostname for pod was too long, truncated it" podName="test-operator-logs-pod-horizontest-horizontest-tests-horizontest" hostnameMaxLen=63 truncatedHostname="test-operator-logs-pod-horizontest-horizontest-tests-horizontes" Oct 01 17:24:39 crc kubenswrapper[4869]: I1001 17:24:39.581233 4869 scope.go:117] "RemoveContainer" containerID="5c5081815445528475a924b08879ebd33f35107feee51b5609a739db117a7ead" Oct 01 17:24:39 crc kubenswrapper[4869]: E1001 17:24:39.582029 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-c86m8_openshift-machine-config-operator(a4b64f7f-0b03-4f47-965b-9fde048b735c)\"" pod="openshift-machine-config-operator/machine-config-daemon-c86m8" podUID="a4b64f7f-0b03-4f47-965b-9fde048b735c" Oct 01 17:24:52 crc kubenswrapper[4869]: I1001 17:24:52.581988 4869 scope.go:117] "RemoveContainer" containerID="5c5081815445528475a924b08879ebd33f35107feee51b5609a739db117a7ead" Oct 01 17:24:52 crc kubenswrapper[4869]: E1001 17:24:52.582957 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-c86m8_openshift-machine-config-operator(a4b64f7f-0b03-4f47-965b-9fde048b735c)\"" pod="openshift-machine-config-operator/machine-config-daemon-c86m8" podUID="a4b64f7f-0b03-4f47-965b-9fde048b735c" Oct 01 17:25:04 crc kubenswrapper[4869]: I1001 17:25:04.581374 4869 scope.go:117] "RemoveContainer" containerID="5c5081815445528475a924b08879ebd33f35107feee51b5609a739db117a7ead" Oct 01 17:25:04 crc kubenswrapper[4869]: E1001 17:25:04.582493 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-c86m8_openshift-machine-config-operator(a4b64f7f-0b03-4f47-965b-9fde048b735c)\"" pod="openshift-machine-config-operator/machine-config-daemon-c86m8" podUID="a4b64f7f-0b03-4f47-965b-9fde048b735c" Oct 01 17:25:15 crc kubenswrapper[4869]: I1001 17:25:15.582050 4869 scope.go:117] "RemoveContainer" containerID="5c5081815445528475a924b08879ebd33f35107feee51b5609a739db117a7ead" Oct 01 17:25:15 crc kubenswrapper[4869]: E1001 17:25:15.583025 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-c86m8_openshift-machine-config-operator(a4b64f7f-0b03-4f47-965b-9fde048b735c)\"" pod="openshift-machine-config-operator/machine-config-daemon-c86m8" podUID="a4b64f7f-0b03-4f47-965b-9fde048b735c" Oct 01 17:25:19 crc kubenswrapper[4869]: I1001 17:25:19.879870 4869 scope.go:117] "RemoveContainer" containerID="b1cd448762592f4977a06bad3b54c4973f720e19257b3bbd411c858b42dad5ec" Oct 01 17:25:30 crc kubenswrapper[4869]: I1001 17:25:30.581837 4869 scope.go:117] "RemoveContainer" containerID="5c5081815445528475a924b08879ebd33f35107feee51b5609a739db117a7ead" Oct 01 17:25:30 crc kubenswrapper[4869]: E1001 17:25:30.583618 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-c86m8_openshift-machine-config-operator(a4b64f7f-0b03-4f47-965b-9fde048b735c)\"" pod="openshift-machine-config-operator/machine-config-daemon-c86m8" podUID="a4b64f7f-0b03-4f47-965b-9fde048b735c" Oct 01 17:25:41 crc kubenswrapper[4869]: E1001 17:25:41.588208 4869 kubelet_pods.go:538] "Hostname for pod was too long, truncated it" podName="test-operator-logs-pod-horizontest-horizontest-tests-horizontest" hostnameMaxLen=63 truncatedHostname="test-operator-logs-pod-horizontest-horizontest-tests-horizontes" Oct 01 17:25:44 crc kubenswrapper[4869]: I1001 17:25:44.581174 4869 scope.go:117] "RemoveContainer" containerID="5c5081815445528475a924b08879ebd33f35107feee51b5609a739db117a7ead" Oct 01 17:25:44 crc kubenswrapper[4869]: E1001 17:25:44.581954 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-c86m8_openshift-machine-config-operator(a4b64f7f-0b03-4f47-965b-9fde048b735c)\"" pod="openshift-machine-config-operator/machine-config-daemon-c86m8" podUID="a4b64f7f-0b03-4f47-965b-9fde048b735c" Oct 01 17:25:59 crc kubenswrapper[4869]: I1001 17:25:59.582651 4869 scope.go:117] "RemoveContainer" containerID="5c5081815445528475a924b08879ebd33f35107feee51b5609a739db117a7ead" Oct 01 17:25:59 crc kubenswrapper[4869]: E1001 17:25:59.583710 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-c86m8_openshift-machine-config-operator(a4b64f7f-0b03-4f47-965b-9fde048b735c)\"" pod="openshift-machine-config-operator/machine-config-daemon-c86m8" podUID="a4b64f7f-0b03-4f47-965b-9fde048b735c" Oct 01 17:26:13 crc kubenswrapper[4869]: I1001 17:26:13.581292 4869 scope.go:117] "RemoveContainer" containerID="5c5081815445528475a924b08879ebd33f35107feee51b5609a739db117a7ead" Oct 01 17:26:13 crc kubenswrapper[4869]: E1001 17:26:13.582313 4869 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-c86m8_openshift-machine-config-operator(a4b64f7f-0b03-4f47-965b-9fde048b735c)\"" pod="openshift-machine-config-operator/machine-config-daemon-c86m8" podUID="a4b64f7f-0b03-4f47-965b-9fde048b735c" var/home/core/zuul-output/logs/crc-cloud-workdir-crc-all-logs.tar.gz0000644000175000000000000000005515067262103024447 0ustar coreroot‹íÁ  ÷Om7 €7šÞ'(var/home/core/zuul-output/logs/crc-cloud/0000755000175000000000000000000015067262103017364 5ustar corerootvar/home/core/zuul-output/artifacts/0000755000175000017500000000000015067241106016507 5ustar corecorevar/home/core/zuul-output/docs/0000755000175000017500000000000015067241106015457 5ustar corecore